Extract duplicated DATABASE_URL parsing to database.py

- Added ASYNCPG_DATABASE_URL constant in database.py
- Updated jobs.py to import from database module
- Updated worker.py to import from database module
- Removed duplicate URL parsing logic from both files
This commit is contained in:
counterweight 2025-12-21 23:16:29 +01:00
parent 405dfd526e
commit a8ad6e6384
Signed by: counterweight
GPG key ID: 883EDBAA726BD96C
3 changed files with 11 additions and 18 deletions

View file

@ -7,6 +7,9 @@ DATABASE_URL = os.getenv(
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret" "DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
) )
# asyncpg needs postgresql:// instead of postgresql+asyncpg://
ASYNCPG_DATABASE_URL = DATABASE_URL.replace("postgresql+asyncpg://", "postgresql://")
engine = create_async_engine(DATABASE_URL) engine = create_async_engine(DATABASE_URL)
async_session = async_sessionmaker(engine, expire_on_commit=False) async_session = async_sessionmaker(engine, expire_on_commit=False)

View file

@ -1,20 +1,15 @@
"""Job definitions and enqueueing utilities using pgqueuer.""" """Job definitions and enqueueing utilities using pgqueuer."""
import json import json
import os
import asyncpg import asyncpg
from pgqueuer.queries import Queries from pgqueuer.queries import Queries
from database import ASYNCPG_DATABASE_URL
# Job type constants # Job type constants
JOB_RANDOM_NUMBER = "random_number" JOB_RANDOM_NUMBER = "random_number"
# SQLAlchemy uses postgresql+asyncpg://, but asyncpg needs postgresql://
_raw_url = os.getenv(
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
)
DATABASE_URL = _raw_url.replace("postgresql+asyncpg://", "postgresql://")
# Connection pool for job enqueueing (lazy initialized) # Connection pool for job enqueueing (lazy initialized)
_pool: asyncpg.Pool | None = None _pool: asyncpg.Pool | None = None
@ -23,7 +18,7 @@ async def get_job_pool() -> asyncpg.Pool:
"""Get or create the connection pool for job enqueueing.""" """Get or create the connection pool for job enqueueing."""
global _pool global _pool
if _pool is None: if _pool is None:
_pool = await asyncpg.create_pool(DATABASE_URL, min_size=1, max_size=5) _pool = await asyncpg.create_pool(ASYNCPG_DATABASE_URL, min_size=1, max_size=5)
return _pool return _pool

View file

@ -3,7 +3,6 @@
import asyncio import asyncio
import json import json
import logging import logging
import os
import random import random
import time import time
@ -12,22 +11,18 @@ from pgqueuer import Job, QueueManager
from pgqueuer.db import AsyncpgDriver from pgqueuer.db import AsyncpgDriver
from pgqueuer.queries import Queries from pgqueuer.queries import Queries
from database import ASYNCPG_DATABASE_URL
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
) )
logger = logging.getLogger("worker") logger = logging.getLogger("worker")
# SQLAlchemy uses postgresql+asyncpg://, but asyncpg needs postgresql://
_raw_url = os.getenv(
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
)
DATABASE_URL = _raw_url.replace("postgresql+asyncpg://", "postgresql://")
async def install_schema() -> None: async def install_schema() -> None:
"""Install pgqueuer schema if not already present.""" """Install pgqueuer schema if not already present."""
conn = await asyncpg.connect(DATABASE_URL) conn = await asyncpg.connect(ASYNCPG_DATABASE_URL)
try: try:
queries = Queries.from_asyncpg_connection(conn) queries = Queries.from_asyncpg_connection(conn)
# Check if schema is already installed by looking for the main table # Check if schema is already installed by looking for the main table
@ -103,9 +98,9 @@ async def main() -> None:
logger.info("Connecting to database...") logger.info("Connecting to database...")
# Connection for pgqueuer # Connection for pgqueuer
queue_conn = await asyncpg.connect(DATABASE_URL) queue_conn = await asyncpg.connect(ASYNCPG_DATABASE_URL)
# Connection pool for application data # Connection pool for application data
db_pool = await asyncpg.create_pool(DATABASE_URL, min_size=1, max_size=5) db_pool = await asyncpg.create_pool(ASYNCPG_DATABASE_URL, min_size=1, max_size=5)
try: try:
driver = AsyncpgDriver(queue_conn) driver = AsyncpgDriver(queue_conn)