Phase 2: Job enqueueing from counter

- Add backend/jobs.py with enqueue_random_number_job function
- Modify counter increment endpoint to enqueue job after incrementing
- Add mock_enqueue_job fixture to conftest.py for all tests
- Add test_increment_enqueues_job_with_user_id to verify correct user_id
- Job is enqueued synchronously; failure causes request to fail
This commit is contained in:
counterweight 2025-12-21 22:44:31 +01:00
parent 10c0316603
commit 6ca0ae88dd
Signed by: counterweight
GPG key ID: 883EDBAA726BD96C
4 changed files with 98 additions and 2 deletions

39
backend/jobs.py Normal file
View file

@ -0,0 +1,39 @@
"""Job definitions and enqueueing utilities using pgqueuer."""
import json
import os
import asyncpg
from pgqueuer.queries import Queries
# Job type constants
JOB_RANDOM_NUMBER = "random_number"
# SQLAlchemy uses postgresql+asyncpg://, but asyncpg needs postgresql://
_raw_url = os.getenv(
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
)
DATABASE_URL = _raw_url.replace("postgresql+asyncpg://", "postgresql://")
async def enqueue_random_number_job(user_id: int) -> int:
"""
Enqueue a random number job for the given user.
Args:
user_id: The ID of the user who triggered the job.
Returns:
The job ID.
Raises:
Exception: If enqueueing fails.
"""
conn = await asyncpg.connect(DATABASE_URL)
try:
queries = Queries.from_asyncpg_connection(conn)
payload = json.dumps({"user_id": user_id}).encode()
job_ids = await queries.enqueue(JOB_RANDOM_NUMBER, payload)
return job_ids[0]
finally:
await conn.close()