2025-12-21 22:37:04 +01:00
|
|
|
"""Background job worker using pgqueuer."""
|
2025-12-21 22:25:37 +01:00
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
|
import logging
|
|
|
|
|
import os
|
|
|
|
|
|
|
|
|
|
import asyncpg
|
2025-12-21 22:37:04 +01:00
|
|
|
from pgqueuer import Job, QueueManager
|
|
|
|
|
from pgqueuer.db import AsyncpgDriver
|
|
|
|
|
from pgqueuer.queries import Queries
|
2025-12-21 22:25:37 +01:00
|
|
|
|
2025-12-21 22:37:04 +01:00
|
|
|
logging.basicConfig(
|
|
|
|
|
level=logging.INFO,
|
|
|
|
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
|
|
|
)
|
|
|
|
|
logger = logging.getLogger("worker")
|
2025-12-21 22:25:37 +01:00
|
|
|
|
2025-12-21 22:37:04 +01:00
|
|
|
# SQLAlchemy uses postgresql+asyncpg://, but asyncpg needs postgresql://
|
|
|
|
|
_raw_url = os.getenv(
|
2025-12-21 22:25:37 +01:00
|
|
|
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
|
|
|
|
|
)
|
2025-12-21 22:37:04 +01:00
|
|
|
DATABASE_URL = _raw_url.replace("postgresql+asyncpg://", "postgresql://")
|
2025-12-21 22:25:37 +01:00
|
|
|
|
|
|
|
|
|
2025-12-21 22:37:04 +01:00
|
|
|
async def install_schema() -> None:
|
|
|
|
|
"""Install pgqueuer schema if not already present."""
|
2025-12-21 22:25:37 +01:00
|
|
|
conn = await asyncpg.connect(DATABASE_URL)
|
|
|
|
|
try:
|
2025-12-21 22:37:04 +01:00
|
|
|
queries = Queries.from_asyncpg_connection(conn)
|
|
|
|
|
# Check if schema is already installed by looking for the main table
|
|
|
|
|
if not await queries.has_table("pgqueuer"):
|
|
|
|
|
await queries.install()
|
|
|
|
|
logger.info("pgqueuer schema installed")
|
2025-12-21 22:25:37 +01:00
|
|
|
else:
|
2025-12-21 22:37:04 +01:00
|
|
|
logger.info("pgqueuer schema already exists")
|
|
|
|
|
finally:
|
|
|
|
|
await conn.close()
|
2025-12-21 22:25:37 +01:00
|
|
|
|
|
|
|
|
|
2025-12-21 22:37:04 +01:00
|
|
|
def register_job_handlers(qm: QueueManager) -> None:
|
|
|
|
|
"""Register all job handlers with the queue manager."""
|
2025-12-21 22:25:37 +01:00
|
|
|
|
2025-12-21 22:37:04 +01:00
|
|
|
@qm.entrypoint("random_number")
|
|
|
|
|
async def process_random_number(job: Job) -> None:
|
|
|
|
|
"""Process a random number job (placeholder - just logs for now)."""
|
|
|
|
|
payload_str = job.payload.decode() if job.payload else ""
|
|
|
|
|
logger.info(f"Processing random_number job {job.id}: {payload_str}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def main() -> None:
|
|
|
|
|
"""Main worker entry point."""
|
|
|
|
|
logger.info("Installing pgqueuer schema...")
|
|
|
|
|
await install_schema()
|
|
|
|
|
|
|
|
|
|
logger.info("Connecting to database...")
|
|
|
|
|
conn = await asyncpg.connect(DATABASE_URL)
|
2025-12-21 22:25:37 +01:00
|
|
|
|
|
|
|
|
try:
|
2025-12-21 22:37:04 +01:00
|
|
|
driver = AsyncpgDriver(conn)
|
|
|
|
|
qm = QueueManager(driver)
|
|
|
|
|
|
|
|
|
|
# Register job handlers
|
|
|
|
|
register_job_handlers(qm)
|
|
|
|
|
|
|
|
|
|
logger.info("Worker started, waiting for jobs...")
|
|
|
|
|
await qm.run()
|
2025-12-21 22:25:37 +01:00
|
|
|
finally:
|
|
|
|
|
await conn.close()
|
2025-12-21 22:37:04 +01:00
|
|
|
logger.info("Worker stopped")
|
2025-12-21 22:25:37 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
asyncio.run(main())
|