refactor(log): refactor the whole project
format: {time:YYYY-MM-DD HH:mm:ss} [{level}] | {name} | {message}
{name} is:
- Uvicorn: log from uvicorn server (#228B22)
- Service: log from class of `app.service` (blue)
- Fetcher: log from fetchers (magenta)
- Task: log from `app.tasks` (#FFD700)
- System: log from `system_logger` (red)
- Normal: log from `log(name)` (#FFC1C1)
- Default: the module name of caller
if you are writing services or tasks, you can just call `logger.`, we will pack it with name `Service` or `Task`
if you want to print fetcher logs, system-related logs, or normal logs, use `logger = (fetcher_logger / system_logger / log)(name)`
This commit is contained in:
@@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.service.beatmapset_update_service import service
|
||||
from app.service.beatmapset_update_service import get_beatmapset_update_service
|
||||
from app.utils import bg_tasks
|
||||
|
||||
SCHEDULER_INTERVAL_MINUTES = 2
|
||||
@@ -16,6 +16,6 @@ SCHEDULER_INTERVAL_MINUTES = 2
|
||||
next_run_time=datetime.now() + timedelta(minutes=1),
|
||||
)
|
||||
async def beatmapset_update_job():
|
||||
if service is not None:
|
||||
bg_tasks.add_task(service.add_missing_beatmapsets)
|
||||
await service._update_beatmaps()
|
||||
service = get_beatmapset_update_service()
|
||||
bg_tasks.add_task(service.add_missing_beatmapsets)
|
||||
await service._update_beatmaps()
|
||||
|
||||
@@ -80,9 +80,7 @@ async def daily_challenge_job():
|
||||
allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
if beatmap is None or ruleset_id is None:
|
||||
logger.warning(
|
||||
f"[DailyChallenge] Missing required data for daily challenge {now}. Will try again in 5 minutes."
|
||||
)
|
||||
logger.warning(f"Missing required data for daily challenge {now}. Will try again in 5 minutes.")
|
||||
get_scheduler().add_job(
|
||||
daily_challenge_job,
|
||||
"date",
|
||||
@@ -111,12 +109,12 @@ async def daily_challenge_job():
|
||||
duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60),
|
||||
)
|
||||
await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id))
|
||||
logger.success(f"[DailyChallenge] Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
|
||||
logger.success(f"Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
|
||||
return
|
||||
except (ValueError, json.JSONDecodeError) as e:
|
||||
logger.warning(f"[DailyChallenge] Error processing daily challenge data: {e} Will try again in 5 minutes.")
|
||||
logger.warning(f"Error processing daily challenge data: {e} Will try again in 5 minutes.")
|
||||
except Exception as e:
|
||||
logger.exception(f"[DailyChallenge] Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
|
||||
logger.exception(f"Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
|
||||
get_scheduler().add_job(
|
||||
daily_challenge_job,
|
||||
"date",
|
||||
|
||||
Reference in New Issue
Block a user