Extract duplicated DATABASE_URL parsing to database.py
- Added ASYNCPG_DATABASE_URL constant in database.py - Updated jobs.py to import from database module - Updated worker.py to import from database module - Removed duplicate URL parsing logic from both files
This commit is contained in:
parent
405dfd526e
commit
a8ad6e6384
3 changed files with 11 additions and 18 deletions
|
|
@ -1,20 +1,15 @@
|
|||
"""Job definitions and enqueueing utilities using pgqueuer."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import asyncpg
|
||||
from pgqueuer.queries import Queries
|
||||
|
||||
from database import ASYNCPG_DATABASE_URL
|
||||
|
||||
# Job type constants
|
||||
JOB_RANDOM_NUMBER = "random_number"
|
||||
|
||||
# SQLAlchemy uses postgresql+asyncpg://, but asyncpg needs postgresql://
|
||||
_raw_url = os.getenv(
|
||||
"DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/arbret"
|
||||
)
|
||||
DATABASE_URL = _raw_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
|
||||
# Connection pool for job enqueueing (lazy initialized)
|
||||
_pool: asyncpg.Pool | None = None
|
||||
|
||||
|
|
@ -23,7 +18,7 @@ async def get_job_pool() -> asyncpg.Pool:
|
|||
"""Get or create the connection pool for job enqueueing."""
|
||||
global _pool
|
||||
if _pool is None:
|
||||
_pool = await asyncpg.create_pool(DATABASE_URL, min_size=1, max_size=5)
|
||||
_pool = await asyncpg.create_pool(ASYNCPG_DATABASE_URL, min_size=1, max_size=5)
|
||||
return _pool
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue