Phase 1: Add pgqueuer infrastructure and worker skeleton
- Add pgqueuer dependency to pyproject.toml - Create worker.py with basic setup: - Independent database connection using asyncpg - Install pgqueuer schema on startup - Register dummy job handler - Start consumer loop - Add 'make worker' command - Update 'make dev' to run worker alongside backend/frontend Validation: - Worker starts successfully - pgqueuer tables exist in database - All existing tests pass
This commit is contained in:
parent
607f872c71
commit
15bae15731
4 changed files with 69 additions and 1 deletions
|
|
@ -11,3 +11,4 @@ Use the `TEST` variable to select specific tests:
|
|||
- Backend: `make test-backend TEST="tests/test_booking.py"` or `TEST="tests/test_booking.py::TestClass::test_method"`
|
||||
- Frontend: `make test-frontend TEST="app/login"` (file pattern)
|
||||
- E2E: `make test-e2e TEST="auth"` (matches e2e/auth.spec.ts)
|
||||
- Don't do `2>&1 | tail`. Let the output hit the console when running the tests.
|
||||
|
|
|
|||
6
Makefile
6
Makefile
|
|
@ -1,4 +1,4 @@
|
|||
.PHONY: install-backend install-frontend install setup-hooks backend frontend db db-stop db-ready db-seed dev test test-backend test-frontend test-e2e typecheck generate-types generate-types-standalone check-types-fresh check-constants lint-backend format-backend fix-backend security-backend lint-frontend fix-frontend format-frontend pre-commit lint
|
||||
.PHONY: install-backend install-frontend install setup-hooks backend frontend worker db db-stop db-ready db-seed dev test test-backend test-frontend test-e2e typecheck generate-types generate-types-standalone check-types-fresh check-constants lint-backend format-backend fix-backend security-backend lint-frontend fix-frontend format-frontend pre-commit lint
|
||||
|
||||
-include .env
|
||||
export
|
||||
|
|
@ -20,6 +20,9 @@ backend:
|
|||
frontend:
|
||||
cd frontend && npm run dev
|
||||
|
||||
worker:
|
||||
cd backend && uv run python worker.py
|
||||
|
||||
db:
|
||||
docker compose up -d db
|
||||
|
||||
|
|
@ -46,6 +49,7 @@ dev:
|
|||
$(MAKE) db-seed
|
||||
cd backend && uv run uvicorn main:app --reload & \
|
||||
cd frontend && npm run dev & \
|
||||
cd backend && uv run python worker.py & \
|
||||
wait
|
||||
|
||||
# TEST variable can be used to select specific tests:
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ dependencies = [
|
|||
"python-jose[cryptography]>=3.3.0",
|
||||
"email-validator>=2.0.0",
|
||||
"bech32>=1.2.0",
|
||||
"pgqueuer>=0.1.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
|
|
|||
62
backend/worker.py
Normal file
62
backend/worker.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
"""Worker process for processing async jobs using pgqueuer."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import asyncpg
|
||||
from pgqueuer import AsyncpgDriver, PgQueuer, Queries
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Get DATABASE_URL and convert from SQLAlchemy format (postgresql+asyncpg://)
|
||||
# to asyncpg format (postgresql://)
|
||||
_raw_db_url = os.getenv(
|
||||
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
|
||||
)
|
||||
DATABASE_URL = _raw_db_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
"""Main worker loop."""
|
||||
logger.info("Starting worker...")
|
||||
|
||||
# Connect to database independently
|
||||
conn = await asyncpg.connect(DATABASE_URL)
|
||||
driver = AsyncpgDriver(conn)
|
||||
|
||||
# Install pgqueuer schema (creates tables if they don't exist)
|
||||
queries = Queries.from_asyncpg_connection(conn)
|
||||
try:
|
||||
await queries.install()
|
||||
logger.info("pgqueuer schema installed")
|
||||
except Exception as e:
|
||||
# Schema might already exist, which is fine
|
||||
if "already exists" in str(e).lower():
|
||||
logger.info("pgqueuer schema already exists")
|
||||
else:
|
||||
raise
|
||||
|
||||
# Initialize pgqueuer
|
||||
pgq = PgQueuer(connection=driver)
|
||||
|
||||
# Register job handlers using entrypoint decorator
|
||||
@pgq.entrypoint("dummy") # type: ignore[type-var]
|
||||
async def dummy_job_handler(payload: dict) -> None:
|
||||
"""Dummy job handler for testing."""
|
||||
logger.info(f"Processing dummy job with payload: {payload}")
|
||||
|
||||
logger.info("Worker started, waiting for jobs...")
|
||||
|
||||
# Start consuming jobs
|
||||
try:
|
||||
await pgq.run()
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Worker shutting down...")
|
||||
finally:
|
||||
await conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Loading…
Add table
Add a link
Reference in a new issue