Phase 3: Outcome storage
- Add RandomNumberOutcome model to models.py - Update worker.py to execute job logic: - Generate random number 0-100 - Record execution duration - Store outcome in database - Add test_jobs.py with unit tests for job handler logic
This commit is contained in:
parent
6ca0ae88dd
commit
7beb213cf5
3 changed files with 257 additions and 10 deletions
173
backend/tests/test_jobs.py
Normal file
173
backend/tests/test_jobs.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
"""Tests for job handler logic."""
|
||||
|
||||
import json
|
||||
from contextlib import asynccontextmanager
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def create_mock_pool(mock_conn: AsyncMock) -> MagicMock:
|
||||
"""Create a mock asyncpg pool with proper async context manager behavior."""
|
||||
mock_pool = MagicMock()
|
||||
|
||||
@asynccontextmanager
|
||||
async def mock_acquire():
|
||||
yield mock_conn
|
||||
|
||||
mock_pool.acquire = mock_acquire
|
||||
return mock_pool
|
||||
|
||||
|
||||
class TestRandomNumberJobHandler:
|
||||
"""Tests for the random number job handler logic."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generates_random_number_in_range(self):
|
||||
"""Verify random number is in range [0, 100]."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
# Create mock job
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = json.dumps({"user_id": 1}).encode()
|
||||
|
||||
# Create mock db pool
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
# Run the job handler
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
# Verify execute was called
|
||||
mock_conn.execute.assert_called_once()
|
||||
call_args = mock_conn.execute.call_args
|
||||
|
||||
# Extract the value argument (position 3 in the args)
|
||||
# Args: (query, job_id, user_id, value, duration_ms, status)
|
||||
value = call_args[0][3]
|
||||
|
||||
assert 0 <= value <= 100, f"Value {value} is not in range [0, 100]"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stores_correct_user_id(self):
|
||||
"""Verify the correct user_id is stored in the outcome."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
user_id = 42
|
||||
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = json.dumps({"user_id": user_id}).encode()
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
mock_conn.execute.assert_called_once()
|
||||
call_args = mock_conn.execute.call_args
|
||||
|
||||
# Args: (query, job_id, user_id, value, duration_ms, status)
|
||||
stored_user_id = call_args[0][2]
|
||||
assert stored_user_id == user_id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stores_job_id(self):
|
||||
"""Verify the job_id is stored in the outcome."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
job_id = 456
|
||||
|
||||
job = MagicMock()
|
||||
job.id = job_id
|
||||
job.payload = json.dumps({"user_id": 1}).encode()
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
mock_conn.execute.assert_called_once()
|
||||
call_args = mock_conn.execute.call_args
|
||||
|
||||
# Args: (query, job_id, user_id, value, duration_ms, status)
|
||||
stored_job_id = call_args[0][1]
|
||||
assert stored_job_id == job_id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stores_status_completed(self):
|
||||
"""Verify the status is set to 'completed'."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = json.dumps({"user_id": 1}).encode()
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
mock_conn.execute.assert_called_once()
|
||||
call_args = mock_conn.execute.call_args
|
||||
|
||||
# Args: (query, job_id, user_id, value, duration_ms, status)
|
||||
status = call_args[0][5]
|
||||
assert status == "completed"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_records_duration_ms(self):
|
||||
"""Verify duration_ms is recorded (should be >= 0)."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = json.dumps({"user_id": 1}).encode()
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
mock_conn.execute.assert_called_once()
|
||||
call_args = mock_conn.execute.call_args
|
||||
|
||||
# Args: (query, job_id, user_id, value, duration_ms, status)
|
||||
duration_ms = call_args[0][4]
|
||||
assert isinstance(duration_ms, int)
|
||||
assert duration_ms >= 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_missing_user_id_does_not_insert(self):
|
||||
"""Verify no insert happens if user_id is missing from payload."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = json.dumps({}).encode() # Missing user_id
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
# Should not have called execute
|
||||
mock_conn.execute.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_payload_does_not_insert(self):
|
||||
"""Verify no insert happens with empty payload."""
|
||||
from worker import process_random_number_job
|
||||
|
||||
job = MagicMock()
|
||||
job.id = 123
|
||||
job.payload = None
|
||||
|
||||
mock_conn = AsyncMock()
|
||||
mock_pool = create_mock_pool(mock_conn)
|
||||
|
||||
await process_random_number_job(job, mock_pool)
|
||||
|
||||
# Should not have called execute
|
||||
mock_conn.execute.assert_not_called()
|
||||
Loading…
Add table
Add a link
Reference in a new issue