diff --git a/app/auth.py b/app/auth.py
index 8a8bf01..6e43f8b 100644
--- a/app/auth.py
+++ b/app/auth.py
@@ -13,7 +13,7 @@ from app.database import (
User,
)
from app.database.auth import TotpKeys
-from app.log import logger
+from app.log import log
from app.models.totp import FinishStatus, StartCreateTotpKeyResp
from app.utils import utcnow
@@ -31,6 +31,8 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# bcrypt 缓存(模拟应用状态缓存)
bcrypt_cache = {}
+logger = log("Auth")
+
def validate_username(username: str) -> list[str]:
"""验证用户名"""
@@ -253,7 +255,7 @@ async def store_token(
tokens_to_delete = active_tokens[max_tokens_per_client - 1 :]
for token in tokens_to_delete:
await db.delete(token)
- logger.info(f"[Auth] Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}")
+ logger.info(f"Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}")
# 检查是否有重复的 access_token
duplicate_token = (await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))).first()
@@ -274,9 +276,7 @@ async def store_token(
await db.commit()
await db.refresh(token_record)
- logger.info(
- f"[Auth] Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})"
- )
+ logger.info(f"Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})")
return token_record
diff --git a/app/calculator.py b/app/calculator.py
index 36b5f2b..a408fcf 100644
--- a/app/calculator.py
+++ b/app/calculator.py
@@ -7,7 +7,7 @@ import math
from typing import TYPE_CHECKING
from app.config import settings
-from app.log import logger
+from app.log import log
from app.models.beatmap import BeatmapAttributes
from app.models.mods import APIMod, parse_enum_to_str
from app.models.score import GameMode
@@ -18,6 +18,8 @@ from redis.asyncio import Redis
from sqlmodel import col, exists, select
from sqlmodel.ext.asyncio.session import AsyncSession
+logger = log("Calculator")
+
try:
import rosu_pp_py as rosu
except ImportError:
diff --git a/app/database/score.py b/app/database/score.py
index 1aaf8b8..fd9668f 100644
--- a/app/database/score.py
+++ b/app/database/score.py
@@ -16,7 +16,7 @@ from app.calculator import (
from app.config import settings
from app.database.team import TeamMember
from app.dependencies.database import get_redis
-from app.log import logger
+from app.log import log
from app.models.beatmap import BeatmapRankStatus
from app.models.model import (
CurrentUserAttributes,
@@ -74,6 +74,8 @@ from sqlmodel.ext.asyncio.session import AsyncSession
if TYPE_CHECKING:
from app.fetcher import Fetcher
+logger = log("Score")
+
class ScoreBase(AsyncAttrs, SQLModel, UTCBaseModel):
# 基本字段
@@ -854,8 +856,7 @@ async def process_score(
) -> Score:
gamemode = GameMode.from_int(info.ruleset_id).to_special_mode(info.mods)
logger.info(
- "[Score] Creating score for user {user_id} | beatmap={beatmap_id} "
- "ruleset={ruleset} passed={passed} total={total}",
+ "Creating score for user {user_id} | beatmap={beatmap_id} ruleset={ruleset} passed={passed} total={total}",
user_id=user.id,
beatmap_id=beatmap_id,
ruleset=gamemode,
@@ -897,7 +898,7 @@ async def process_score(
)
session.add(score)
logger.debug(
- "[Score] Score staged for commit | token={token} mods={mods} total_hits={hits}",
+ "Score staged for commit | token={token} mods={mods} total_hits={hits}",
token=score_token.id,
mods=info.mods,
hits=sum(info.statistics.values()) if info.statistics else 0,
@@ -910,7 +911,7 @@ async def process_score(
async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, fetcher: "Fetcher"):
if score.pp != 0:
logger.debug(
- "[Score] Skipping PP calculation for score {score_id} | already set {pp:.2f}",
+ "Skipping PP calculation for score {score_id} | already set {pp:.2f}",
score_id=score.id,
pp=score.pp,
)
@@ -918,7 +919,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
can_get_pp = score.passed and score.ranked and mods_can_get_pp(int(score.gamemode), score.mods)
if not can_get_pp:
logger.debug(
- "[Score] Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}",
+ "Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}",
score_id=score.id,
passed=score.passed,
ranked=score.ranked,
@@ -928,10 +929,10 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
pp, successed = await pre_fetch_and_calculate_pp(score, session, redis, fetcher)
if not successed:
await redis.rpush("score:need_recalculate", score.id) # pyright: ignore[reportGeneralTypeIssues]
- logger.warning("[Score] Queued score {score_id} for PP recalculation", score_id=score.id)
+ logger.warning("Queued score {score_id} for PP recalculation", score_id=score.id)
return
score.pp = pp
- logger.info("[Score] Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp)
+ logger.info("Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp)
user_id = score.user_id
beatmap_id = score.beatmap_id
previous_pp_best = await get_user_best_pp_in_beatmap(session, beatmap_id, user_id, score.gamemode)
@@ -947,7 +948,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
session.add(best_score)
await session.delete(previous_pp_best) if previous_pp_best else None
logger.info(
- "[Score] Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}",
+ "Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}",
user_id=user_id,
score_id=score.id,
pp=score.pp,
@@ -966,15 +967,14 @@ async def _process_score_events(score: Score, session: AsyncSession):
if rank_global == 0 or total_users == 0:
logger.debug(
- "[Score] Skipping event creation for score {score_id} | "
- "rank_global={rank_global} total_users={total_users}",
+ "Skipping event creation for score {score_id} | rank_global={rank_global} total_users={total_users}",
score_id=score.id,
rank_global=rank_global,
total_users=total_users,
)
return
logger.debug(
- "[Score] Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}",
+ "Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}",
score_id=score.id,
rank_global=rank_global,
total_users=total_users,
@@ -1003,7 +1003,7 @@ async def _process_score_events(score: Score, session: AsyncSession):
}
session.add(rank_event)
logger.info(
- "[Score] Registered rank event for user {user_id} | score_id={score_id} rank={rank}",
+ "Registered rank event for user {user_id} | score_id={score_id} rank={rank}",
user_id=score.user_id,
score_id=score.id,
rank=rank_global,
@@ -1045,12 +1045,12 @@ async def _process_score_events(score: Score, session: AsyncSession):
}
session.add(rank_lost_event)
logger.info(
- "[Score] Registered rank lost event | displaced_user={user_id} new_score_id={score_id}",
+ "Registered rank lost event | displaced_user={user_id} new_score_id={score_id}",
user_id=displaced_score.user_id,
score_id=score.id,
)
logger.debug(
- "[Score] Event processing committed for score {score_id}",
+ "Event processing committed for score {score_id}",
score_id=score.id,
)
@@ -1074,7 +1074,7 @@ async def _process_statistics(
session, score.beatmap_id, user.id, mod_for_save, score.gamemode
)
logger.debug(
- "[Score] Existing best scores for user {user_id} | global={global_id} mod={mod_id}",
+ "Existing best scores for user {user_id} | global={global_id} mod={mod_id}",
user_id=user.id,
global_id=previous_score_best.score_id if previous_score_best else None,
mod_id=previous_score_best_mod.score_id if previous_score_best_mod else None,
@@ -1104,7 +1104,7 @@ async def _process_statistics(
statistics.total_score += score.total_score
difference = score.total_score - previous_score_best.total_score if previous_score_best else score.total_score
logger.debug(
- "[Score] Score delta computed for {score_id}: {difference}",
+ "Score delta computed for {score_id}: {difference}",
score_id=score.id,
difference=difference,
)
@@ -1151,7 +1151,7 @@ async def _process_statistics(
)
)
logger.info(
- "[Score] Created new best score entry for user {user_id} | score_id={score_id} mods={mods}",
+ "Created new best score entry for user {user_id} | score_id={score_id} mods={mods}",
user_id=user.id,
score_id=score.id,
mods=mod_for_save,
@@ -1163,7 +1163,7 @@ async def _process_statistics(
previous_score_best.rank = score.rank
previous_score_best.score_id = score.id
logger.info(
- "[Score] Updated existing best score for user {user_id} | score_id={score_id} total={total}",
+ "Updated existing best score for user {user_id} | score_id={score_id} total={total}",
user_id=user.id,
score_id=score.id,
total=score.total_score,
@@ -1175,7 +1175,7 @@ async def _process_statistics(
if difference > 0:
# 下方的 if 一定会触发。将高分设置为此分数,删除自己防止重复的 score_id
logger.info(
- "[Score] Replacing global best score for user {user_id} | old_score_id={old_score_id}",
+ "Replacing global best score for user {user_id} | old_score_id={old_score_id}",
user_id=user.id,
old_score_id=previous_score_best.score_id,
)
@@ -1188,7 +1188,7 @@ async def _process_statistics(
previous_score_best_mod.rank = score.rank
previous_score_best_mod.score_id = score.id
logger.info(
- "[Score] Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}",
+ "Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}",
user_id=user.id,
mods=mod_for_save,
score_id=score.id,
@@ -1202,14 +1202,14 @@ async def _process_statistics(
mouthly_playcount.count += 1
statistics.play_time += playtime
logger.debug(
- "[Score] Recorded playtime {playtime}s for score {score_id} (user {user_id})",
+ "Recorded playtime {playtime}s for score {score_id} (user {user_id})",
playtime=playtime,
score_id=score.id,
user_id=user.id,
)
else:
logger.debug(
- "[Score] Playtime {playtime}s for score {score_id} did not meet validity checks",
+ "Playtime {playtime}s for score {score_id} did not meet validity checks",
playtime=playtime,
score_id=score.id,
)
@@ -1242,7 +1242,7 @@ async def _process_statistics(
if add_to_db:
session.add(mouthly_playcount)
logger.debug(
- "[Score] Created monthly playcount record for user {user_id} ({year}-{month})",
+ "Created monthly playcount record for user {user_id} ({year}-{month})",
user_id=user.id,
year=mouthly_playcount.year,
month=mouthly_playcount.month,
@@ -1262,7 +1262,7 @@ async def process_user(
score_id = score.id
user_id = user.id
logger.info(
- "[Score] Processing score {score_id} for user {user_id} on beatmap {beatmap_id}",
+ "Processing score {score_id} for user {user_id} on beatmap {beatmap_id}",
score_id=score_id,
user_id=user_id,
beatmap_id=score.beatmap_id,
@@ -1287,14 +1287,14 @@ async def process_user(
score_ = (await session.exec(select(Score).where(Score.id == score_id).options(joinedload(Score.beatmap)))).first()
if score_ is None:
logger.warning(
- "[Score] Score {score_id} disappeared after commit, skipping event processing",
+ "Score {score_id} disappeared after commit, skipping event processing",
score_id=score_id,
)
return
await _process_score_events(score_, session)
await session.commit()
logger.info(
- "[Score] Finished processing score {score_id} for user {user_id}",
+ "Finished processing score {score_id} for user {user_id}",
score_id=score_id,
user_id=user_id,
)
diff --git a/app/dependencies/fetcher.py b/app/dependencies/fetcher.py
index ccc3f06..be75cc3 100644
--- a/app/dependencies/fetcher.py
+++ b/app/dependencies/fetcher.py
@@ -5,7 +5,7 @@ from typing import Annotated
from app.config import settings
from app.dependencies.database import get_redis
from app.fetcher import Fetcher as OriginFetcher
-from app.log import logger
+from app.log import fetcher_logger
from fastapi import Depends
@@ -29,7 +29,9 @@ async def get_fetcher() -> OriginFetcher:
if refresh_token:
fetcher.refresh_token = str(refresh_token)
if not fetcher.access_token or not fetcher.refresh_token:
- logger.opt(colors=True).info(f"Login to initialize fetcher: {fetcher.authorize_url}")
+ fetcher_logger("Fetcher").opt(colors=True).info(
+ f"Login to initialize fetcher: {fetcher.authorize_url}"
+ )
return fetcher
diff --git a/app/fetcher/_base.py b/app/fetcher/_base.py
index 86fc646..cf41be0 100644
--- a/app/fetcher/_base.py
+++ b/app/fetcher/_base.py
@@ -5,7 +5,7 @@ import time
from urllib.parse import quote
from app.dependencies.database import get_redis
-from app.log import logger
+from app.log import fetcher_logger
from httpx import AsyncClient
@@ -16,6 +16,9 @@ class TokenAuthError(Exception):
pass
+logger = fetcher_logger("Fetcher")
+
+
class BaseFetcher:
def __init__(
self,
diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py
index c0b822e..3d65371 100644
--- a/app/helpers/geoip_helper.py
+++ b/app/helpers/geoip_helper.py
@@ -55,7 +55,7 @@ class GeoIPHelper:
- 临时目录退出后自动清理
"""
if not self.license_key:
- raise ValueError("缺少 MaxMind License Key,请传入或设置环境变量 MAXMIND_LICENSE_KEY")
+ raise ValueError("MaxMind License Key is missing. Please configure it via env MAXMIND_LICENSE_KEY.")
url = f"{BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz"
diff --git a/app/log.py b/app/log.py
index 6a8c478..e171a0d 100644
--- a/app/log.py
+++ b/app/log.py
@@ -39,16 +39,18 @@ class InterceptHandler(logging.Handler):
depth += 1
message = record.getMessage()
-
+ _logger = logger
if record.name == "uvicorn.access":
message = self._format_uvicorn_access_log(message)
color = True
+ _logger = uvicorn_logger()
elif record.name == "uvicorn.error":
message = self._format_uvicorn_error_log(message)
+ _logger = uvicorn_logger()
color = True
else:
color = False
- logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message)
+ _logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message)
def _format_uvicorn_error_log(self, message: str) -> str:
websocket_pattern = r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"WebSocket\s+([^"]+)"\s+([\w\[\]]+)'
@@ -110,7 +112,6 @@ class InterceptHandler(logging.Handler):
def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) -> str | None:
- """获取调用类名/模块名,仅对指定模块前缀生效"""
stack = inspect.stack()
for frame_info in stack[2:]:
module = frame_info.frame.f_globals.get("__name__", "")
@@ -157,29 +158,49 @@ def system_logger(name: str) -> Logger:
return logger.bind(system=name)
-def dynamic_format(record):
- prefix = ""
+def uvicorn_logger() -> Logger:
+ return logger.bind(uvicorn="Uvicorn")
- fetcher = record["extra"].get("fetcher")
- if not fetcher:
- fetcher = get_caller_class_name("app.fetcher")
- if fetcher:
- prefix = f"[{fetcher}] "
+
+def log(name: str) -> Logger:
+ return logger.bind(real_name=name)
+
+
+def dynamic_format(record):
+ name = ""
+
+ uvicorn = record["extra"].get("uvicorn")
+ if uvicorn:
+ name = f"{uvicorn}"
service = record["extra"].get("service")
if not service:
service = get_caller_class_name("app.service")
if service:
- prefix = f"[{service}] "
+ name = f"{service}"
+
+ fetcher = record["extra"].get("fetcher")
+ if not fetcher:
+ fetcher = get_caller_class_name("app.fetcher")
+ if fetcher:
+ name = f"{fetcher}"
task = record["extra"].get("task")
if not task:
task = get_caller_class_name("app.tasks")
if task:
task = snake_to_pascal(task)
- prefix = f"[{task}] "
+ name = f"{task}"
- return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {prefix}{{message}}\n"
+ system = record["extra"].get("system")
+ if system:
+ name = f"{system}"
+
+ if name == "":
+ real_name = record["extra"].get("real_name", "") or record["name"]
+ name = f"{real_name}"
+
+ return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}\n"
logger.remove()
@@ -195,7 +216,7 @@ logger.add(
rotation="00:00",
retention="30 days",
colorize=False,
- format="{time:YYYY-MM-DD HH:mm:ss} {level} | {message}",
+ format=dynamic_format,
level=settings.log_level,
diagnose=settings.debug,
encoding="utf8",
@@ -210,9 +231,9 @@ uvicorn_loggers = [
]
for logger_name in uvicorn_loggers:
- uvicorn_logger = logging.getLogger(logger_name)
- uvicorn_logger.handlers = [InterceptHandler()]
- uvicorn_logger.propagate = False
+ _uvicorn_logger = logging.getLogger(logger_name)
+ _uvicorn_logger.handlers = [InterceptHandler()]
+ _uvicorn_logger.propagate = False
logging.getLogger("httpx").setLevel("WARNING")
logging.getLogger("apscheduler").setLevel("WARNING")
diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py
index 76277fc..2a9c911 100644
--- a/app/middleware/verify_session.py
+++ b/app/middleware/verify_session.py
@@ -14,7 +14,7 @@ from app.const import SUPPORT_TOTP_VERIFICATION_VER
from app.database.user import User
from app.database.verification import LoginSession
from app.dependencies.database import get_redis, with_db
-from app.log import logger
+from app.log import log
from app.service.verification_service import LoginSessionService
from app.utils import extract_user_agent
@@ -25,6 +25,8 @@ from sqlmodel import select
from sqlmodel.ext.asyncio.session import AsyncSession
from starlette.middleware.base import BaseHTTPMiddleware
+logger = log("Middleware")
+
class VerifySessionMiddleware(BaseHTTPMiddleware):
"""会话验证中间件
@@ -84,7 +86,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return await self._initiate_verification(request, session_state)
except Exception as e:
- logger.error(f"[Verify Session Middleware] Error: {e}")
+ logger.error(f"Error: {e}")
# 出错时允许请求继续,避免阻塞
return await call_next(request)
@@ -145,7 +147,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return user
except Exception as e:
- logger.debug(f"[Verify Session Middleware] Error getting user: {e}")
+ logger.debug(f"Error getting user: {e}")
return None
async def _get_session_state(self, request: Request, user: User) -> SessionState | None:
@@ -178,7 +180,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return SessionState(session, user, redis, db, api_version)
except Exception as e:
- logger.error(f"[Verify Session Middleware] Error getting session state: {e}")
+ logger.error(f"Error getting session state: {e}")
return None
async def _initiate_verification(self, request: Request, state: SessionState) -> Response:
@@ -195,7 +197,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
)
except Exception as e:
- logger.error(f"[Verify Session Middleware] Error initiating verification: {e}")
+ logger.error(f"Error initiating verification: {e}")
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"error": "Verification initiation failed"}
)
@@ -261,7 +263,7 @@ class SessionState:
self.session.web_uuid,
)
except Exception as e:
- logger.error(f"[Session State] Error marking verified: {e}")
+ logger.error(f"Error marking verified: {e}")
async def issue_mail_if_needed(self) -> None:
"""如果需要,发送验证邮件"""
@@ -274,7 +276,7 @@ class SessionState:
self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None
)
except Exception as e:
- logger.error(f"[Session State] Error issuing mail: {e}")
+ logger.error(f"Error issuing mail: {e}")
def get_key(self) -> str:
"""获取会话密钥"""
diff --git a/app/models/mods.py b/app/models/mods.py
index 88a375d..a928cab 100644
--- a/app/models/mods.py
+++ b/app/models/mods.py
@@ -5,7 +5,7 @@ import json
from typing import Any, Literal, NotRequired, TypedDict
from app.config import settings as app_settings
-from app.log import logger
+from app.log import log
from app.path import CONFIG_DIR, STATIC_DIR
from pydantic import ConfigDict, Field, create_model
@@ -268,7 +268,7 @@ def generate_ranked_mod_settings(enable_all: bool = False):
for mod_acronym in ruleset_mods:
result[ruleset_id][mod_acronym] = {}
if not enable_all:
- logger.info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json")
+ log("Mod").info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json")
result["$mods_checksum"] = checksum # pyright: ignore[reportArgumentType]
ranked_mods_file.write_text(json.dumps(result, indent=4))
diff --git a/app/models/tags.py b/app/models/tags.py
index e1415ca..abe728f 100644
--- a/app/models/tags.py
+++ b/app/models/tags.py
@@ -2,7 +2,7 @@ from __future__ import annotations
import json
-from app.log import logger
+from app.log import log
from app.path import STATIC_DIR
from pydantic import BaseModel
@@ -16,6 +16,7 @@ class BeatmapTags(BaseModel):
ALL_TAGS: dict[int, BeatmapTags] = {}
+logger = log("BeatmapTag")
def load_tags() -> None:
diff --git a/app/router/auth.py b/app/router/auth.py
index f2e7336..ff163b3 100644
--- a/app/router/auth.py
+++ b/app/router/auth.py
@@ -22,7 +22,7 @@ from app.database.statistics import UserStatistics
from app.dependencies.database import Database, Redis
from app.dependencies.geoip import GeoIPService, IPAddress
from app.dependencies.user_agent import UserAgentInfo
-from app.log import logger
+from app.log import log
from app.models.extended_auth import ExtendedTokenResponse
from app.models.oauth import (
OAuthErrorResponse,
@@ -44,6 +44,8 @@ from fastapi.responses import JSONResponse
from sqlalchemy import text
from sqlmodel import exists, select
+logger = log("Auth")
+
def create_oauth_error_response(error: str, description: str, hint: str, status_code: int = 400):
"""创建标准的 OAuth 错误响应"""
@@ -360,9 +362,7 @@ async def oauth_token(
await LoginSessionService.mark_session_verified(
db, redis, user_id, token_id, ip_address, user_agent, web_uuid
)
- logger.debug(
- f"[Auth] New location login detected but email verification disabled, auto-verifying user {user_id}"
- )
+ logger.debug(f"New location login detected but email verification disabled, auto-verifying user {user_id}")
else:
# 不是新设备登录,正常登录
await LoginLogService.record_login(
@@ -505,7 +505,7 @@ async def oauth_token(
)
# 打印jwt
- logger.info(f"[Auth] Generated JWT for user {user_id}: {access_token}")
+ logger.info(f"Generated JWT for user {user_id}: {access_token}")
return TokenResponse(
access_token=access_token,
diff --git a/app/router/lio.py b/app/router/lio.py
index 2cabebb..28d8f35 100644
--- a/app/router/lio.py
+++ b/app/router/lio.py
@@ -14,7 +14,7 @@ from app.database.user import User
from app.dependencies.database import Database, Redis
from app.dependencies.fetcher import Fetcher
from app.dependencies.storage import StorageService
-from app.log import logger
+from app.log import log
from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem
from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus
from app.utils import utcnow
@@ -27,6 +27,7 @@ from sqlalchemy import update
from sqlmodel import col, select
router = APIRouter(prefix="/_lio", include_in_schema=False)
+logger = log("LegacyIO")
async def _ensure_room_chat_channel(
diff --git a/app/router/notification/message.py b/app/router/notification/message.py
index 41ac452..8dec1cb 100644
--- a/app/router/notification/message.py
+++ b/app/router/notification/message.py
@@ -16,7 +16,7 @@ from app.database.user import User
from app.dependencies.database import Database, Redis
from app.dependencies.param import BodyOrForm
from app.dependencies.user import get_current_user
-from app.log import logger
+from app.log import log
from app.models.notification import ChannelMessage, ChannelMessageTeam
from app.router.v2 import api_v2_router as router
from app.service.redis_message_system import redis_message_system
@@ -33,6 +33,9 @@ class KeepAliveResp(BaseModel):
silences: list[UserSilenceResp] = Field(default_factory=list)
+logger = log("Chat")
+
+
@router.post(
"/chat/ack",
name="保持连接",
diff --git a/app/router/notification/server.py b/app/router/notification/server.py
index 29fc663..9940801 100644
--- a/app/router/notification/server.py
+++ b/app/router/notification/server.py
@@ -14,7 +14,7 @@ from app.dependencies.database import (
with_db,
)
from app.dependencies.user import get_current_user_and_token
-from app.log import logger
+from app.log import log
from app.models.chat import ChatEvent
from app.models.notification import NotificationDetail
from app.service.subscribers.chat import ChatSubscriber
@@ -26,6 +26,8 @@ from fastapi.websockets import WebSocketState
from sqlmodel import select
from sqlmodel.ext.asyncio.session import AsyncSession
+logger = log("NotificationServer")
+
class ChatServer:
def __init__(self):
@@ -285,10 +287,10 @@ async def _listen_stop(ws: WebSocket, user_id: int, factory: DBFactory):
await ws.close(code=1000)
break
except WebSocketDisconnect as e:
- logger.info(f"[NotificationServer] Client {user_id} disconnected: {e.code}, {e.reason}")
+ logger.info(f"Client {user_id} disconnected: {e.code}, {e.reason}")
except RuntimeError as e:
if "disconnect message" in str(e):
- logger.info(f"[NotificationServer] Client {user_id} closed the connection.")
+ logger.info(f"Client {user_id} closed the connection.")
else:
logger.exception(f"RuntimeError in client {user_id}: {e}")
except Exception:
diff --git a/app/router/v2/score.py b/app/router/v2/score.py
index 0da6de0..2b47c78 100644
--- a/app/router/v2/score.py
+++ b/app/router/v2/score.py
@@ -39,7 +39,7 @@ from app.dependencies.database import Database, Redis, get_redis, with_db
from app.dependencies.fetcher import Fetcher, get_fetcher
from app.dependencies.storage import StorageService
from app.dependencies.user import ClientUser, get_current_user
-from app.log import logger
+from app.log import log
from app.models.beatmap import BeatmapRankStatus
from app.models.room import RoomCategory
from app.models.score import (
@@ -73,6 +73,7 @@ from sqlmodel import col, exists, func, select
from sqlmodel.ext.asyncio.session import AsyncSession
READ_SCORE_TIMEOUT = 10
+logger = log("Score")
async def _process_user_achievement(score_id: int):
diff --git a/app/router/v2/session_verify.py b/app/router/v2/session_verify.py
index 079a0b3..424b988 100644
--- a/app/router/v2/session_verify.py
+++ b/app/router/v2/session_verify.py
@@ -15,7 +15,7 @@ from app.dependencies.database import Database, Redis, get_redis
from app.dependencies.geoip import IPAddress
from app.dependencies.user import UserAndToken, get_client_user_and_token
from app.dependencies.user_agent import UserAgentInfo
-from app.log import logger
+from app.log import log
from app.service.login_log_service import LoginLogService
from app.service.verification_service import (
EmailVerificationService,
@@ -254,7 +254,7 @@ async def fallback_email(
user_agent,
)
if not success:
- logger.error(
- f"[Email Fallback] Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}"
+ log("Verification").error(
+ f"Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}"
)
return VerifyMethod()
diff --git a/app/router/v2/user.py b/app/router/v2/user.py
index 7928029..98a0b93 100644
--- a/app/router/v2/user.py
+++ b/app/router/v2/user.py
@@ -19,7 +19,7 @@ from app.database.user import SEARCH_INCLUDED
from app.dependencies.api_version import APIVersion
from app.dependencies.database import Database, get_redis
from app.dependencies.user import get_current_user
-from app.log import logger
+from app.log import log
from app.models.score import GameMode
from app.models.user import BeatmapsetType
from app.service.asset_proxy_helper import process_response_assets
@@ -336,7 +336,7 @@ async def get_user_beatmapsets(
try:
await cache_service.cache_user_beatmapsets(user_id, type.value, resp, limit, offset)
except Exception as e:
- logger.error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}")
+ log("Beatmapset").error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}")
background_task.add_task(cache_beatmapsets)
diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py
index a03f91b..8852146 100644
--- a/app/service/beatmapset_update_service.py
+++ b/app/service/beatmapset_update_service.py
@@ -166,7 +166,7 @@ class BeatmapsetUpdateService:
except Exception as e:
logger.error(f"failed to add missing beatmapset {missing}: {e}")
if total > 0:
- logger.info(f"added {total} missing beatmapset")
+ logger.opt(colors=True).info(f"added {total} missing beatmapset")
self._adding_missing = False
async def add(self, beatmapset: BeatmapsetResp):
@@ -206,12 +206,14 @@ class BeatmapsetUpdateService:
processing = ProcessingBeatmapset(beatmapset, sync_record)
next_time_delta = processing.calculate_next_sync_time()
if not next_time_delta:
- logger.info(f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list")
+ logger.opt(colors=True).info(
+ f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list"
+ )
await session.delete(sync_record)
await session.commit()
return
sync_record.next_sync_time = utcnow() + next_time_delta
- logger.info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}")
+ logger.opt(colors=True).info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}")
await session.commit()
async def _update_beatmaps(self):
@@ -224,17 +226,19 @@ class BeatmapsetUpdateService:
.order_by(col(BeatmapSync.next_sync_time).desc())
)
for record in records:
- logger.info(f"[{record.beatmapset_id}] syncing...")
+ logger.opt(colors=True).info(f"[{record.beatmapset_id}] syncing...")
try:
beatmapset = await self.fetcher.get_beatmapset(record.beatmapset_id)
except Exception as e:
if isinstance(e, HTTPError):
- logger.warning(
- f"[{record.beatmapset_id}] "
+ logger.opt(colors=True).warning(
+ f"[{record.beatmapset_id}] "
f"failed to fetch beatmapset: [{e.__class__.__name__}] {e}, retrying later"
)
else:
- logger.exception(f"[{record.beatmapset_id}] unexpected error: {e}, retrying later")
+ logger.opt(colors=True).exception(
+ f"[{record.beatmapset_id}] unexpected error: {e}, retrying later"
+ )
record.next_sync_time = utcnow() + timedelta(seconds=MIN_DELTA)
continue
processing = ProcessingBeatmapset(beatmapset, record)
@@ -266,15 +270,16 @@ class BeatmapsetUpdateService:
next_time_delta = processing.calculate_next_sync_time()
if not next_time_delta:
- logger.info(
- f"[{record.beatmapset_id}] beatmapset "
- "has transformed to ranked or loved,"
- " removing from sync list"
+ logger.opt(colors=True).info(
+ f"[{beatmapset.id}] beatmapset has transformed to ranked or loved,"
+ f" removing from sync list"
)
await session.delete(record)
else:
record.next_sync_time = utcnow() + next_time_delta
- logger.info(f"[{record.beatmapset_id}] next sync at {record.next_sync_time}")
+ logger.opt(colors=True).info(
+ f"[{record.beatmapset_id}] next sync at {record.next_sync_time}"
+ )
await session.commit()
async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp):
@@ -304,7 +309,7 @@ class BeatmapsetUpdateService:
await score.ranked_score.delete(session)
total += 1
if total > 0:
- logger.info(f"[beatmap: {beatmap_id}] processed {total} old scores")
+ logger.opt(colors=True).info(f"[beatmap: {beatmap_id}] processed {total} old scores")
await session.commit()
for change in changed:
@@ -312,17 +317,24 @@ class BeatmapsetUpdateService:
try:
beatmap = await self.fetcher.get_beatmap(change.beatmap_id)
except Exception as e:
- logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping")
+ logger.opt(colors=True).error(
+ f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping"
+ )
continue
- logger.info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}")
+ logger.opt(colors=True).info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}")
await Beatmap.from_resp_no_save(session, beatmap)
else:
try:
beatmap = await self.fetcher.get_beatmap(change.beatmap_id)
except Exception as e:
- logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping")
+ logger.opt(colors=True).error(
+ f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping"
+ )
continue
- logger.info(f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} change {change.type}")
+ logger.opt(colors=True).info(
+ f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} "
+ f"change {change.type}"
+ )
new_db_beatmap = await Beatmap.from_resp_no_save(session, beatmap)
existing_beatmap = await session.get(Beatmap, change.beatmap_id)
if existing_beatmap:
diff --git a/app/service/database_cleanup_service.py b/app/service/database_cleanup_service.py
index c8ffd0f..6d80819 100644
--- a/app/service/database_cleanup_service.py
+++ b/app/service/database_cleanup_service.py
@@ -46,13 +46,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired email verification codes")
+ logger.debug(f"Cleaned up {deleted_count} expired email verification codes")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning expired verification codes: {e!s}")
+ logger.error(f"Error cleaning expired verification codes: {e!s}")
return 0
@staticmethod
@@ -85,13 +85,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired login sessions")
+ logger.debug(f"Cleaned up {deleted_count} expired login sessions")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning expired login sessions: {e!s}")
+ logger.error(f"Error cleaning expired login sessions: {e!s}")
return 0
@staticmethod
@@ -126,15 +126,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(
- f"[Cleanup Service] Cleaned up {deleted_count} used verification codes older than {days_old} days"
- )
+ logger.debug(f"Cleaned up {deleted_count} used verification codes older than {days_old} days")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning old used verification codes: {e!s}")
+ logger.error(f"Error cleaning old used verification codes: {e!s}")
return 0
@staticmethod
@@ -169,16 +167,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(
- f"[Cleanup Service] Cleaned up {deleted_count} unverified "
- f"login sessions older than {hours_old} hour(s)"
- )
+ logger.debug(f"Cleaned up {deleted_count} unverified login sessions older than {hours_old} hour(s)")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning unverified login sessions: {e!s}")
+ logger.error(f"Error cleaning unverified login sessions: {e!s}")
return 0
@staticmethod
@@ -206,13 +201,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} outdated verified sessions")
+ logger.debug(f"Cleaned up {deleted_count} outdated verified sessions")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning outdated verified sessions: {e!s}")
+ logger.error(f"Error cleaning outdated verified sessions: {e!s}")
return 0
@staticmethod
@@ -243,13 +238,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired trusted devices")
+ logger.debug(f"Cleaned up {deleted_count} expired trusted devices")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning expired trusted devices: {e!s}")
+ logger.error(f"Error cleaning expired trusted devices: {e!s}")
return 0
@staticmethod
@@ -278,13 +273,13 @@ class DatabaseCleanupService:
await db.commit()
if deleted_count > 0:
- logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired OAuth tokens")
+ logger.debug(f"Cleaned up {deleted_count} expired OAuth tokens")
return deleted_count
except Exception as e:
await db.rollback()
- logger.error(f"[Cleanup Service] Error cleaning expired OAuth tokens: {e!s}")
+ logger.error(f"Error cleaning expired OAuth tokens: {e!s}")
return 0
@staticmethod
@@ -323,9 +318,7 @@ class DatabaseCleanupService:
total_cleaned = sum(results.values())
if total_cleaned > 0:
- logger.debug(
- f"[Cleanup Service] Full cleanup completed, total cleaned: {total_cleaned} records - {results}"
- )
+ logger.debug(f"Full cleanup completed, total cleaned: {total_cleaned} records - {results}")
return results
@@ -421,7 +414,7 @@ class DatabaseCleanupService:
}
except Exception as e:
- logger.error(f"[Cleanup Service] Error getting cleanup statistics: {e!s}")
+ logger.error(f"Error getting cleanup statistics: {e!s}")
return {
"expired_verification_codes": 0,
"expired_login_sessions": 0,
diff --git a/app/service/email_queue.py b/app/service/email_queue.py
index 7a07291..be946fa 100644
--- a/app/service/email_queue.py
+++ b/app/service/email_queue.py
@@ -221,11 +221,6 @@ class EmailQueue:
是否发送成功
"""
try:
- # 如果邮件发送功能被禁用,则只记录日志
- if not getattr(settings, "enable_email_sending", True):
- logger.info(f"[Mock Email] Would send to {email_data.get('to_email')}: {email_data.get('subject')}")
- return True
-
# 创建邮件
msg = MIMEMultipart("alternative")
msg["From"] = f"{self.from_name} <{self.from_email}>"
diff --git a/app/service/email_service.py b/app/service/email_service.py
index 73c4b7c..8e8314f 100644
--- a/app/service/email_service.py
+++ b/app/service/email_service.py
@@ -147,11 +147,11 @@ class EmailService:
server.send_message(msg)
- logger.info(f"[Email Verification] Successfully sent verification code to {email}")
+ logger.info(f"Successfully sent verification code to {email}")
return True
except Exception as e:
- logger.error(f"[Email Verification] Failed to send email: {e}")
+ logger.error(f"Failed to send email: {e}")
return False
diff --git a/app/service/password_reset_service.py b/app/service/password_reset_service.py
index b822329..5d831d6 100644
--- a/app/service/password_reset_service.py
+++ b/app/service/password_reset_service.py
@@ -105,13 +105,13 @@ class PasswordResetService:
email_sent = await self.send_password_reset_email(email=email, code=reset_code, username=user.username)
if email_sent:
- logger.info(f"[Password Reset] Sent reset code to user {user.id} ({email})")
+ logger.info(f"Sent reset code to user {user.id} ({email})")
return True, "密码重置邮件已发送,请查收邮箱"
else:
# 邮件发送失败,清理Redis中的数据
await redis.delete(reset_code_key)
await redis.delete(rate_limit_key)
- logger.warning(f"[Password Reset] Email sending failed, cleaned up Redis data for {email}")
+ logger.warning(f"Email sending failed, cleaned up Redis data for {email}")
return False, "邮件发送失败,请稍后重试"
except Exception:
@@ -121,7 +121,7 @@ class PasswordResetService:
await redis.delete(rate_limit_key)
except Exception:
pass
- logger.exception("[Password Reset] Redis operation failed")
+ logger.exception("Redis operation failed")
return False, "服务暂时不可用,请稍后重试"
async def send_password_reset_email(self, email: str, code: str, username: str) -> bool:
@@ -269,11 +269,11 @@ class PasswordResetService:
metadata=metadata,
)
- logger.info(f"[Password Reset] Enqueued reset code email to {email}")
+ logger.info(f"Enqueued reset code email to {email}")
return True
except Exception as e:
- logger.error(f"[Password Reset] Failed to enqueue email: {e}")
+ logger.error(f"Failed to enqueue email: {e}")
return False
async def reset_password(
@@ -366,7 +366,7 @@ class PasswordResetService:
await redis.setex(reset_code_key, 300, json.dumps(reset_data)) # 保留5分钟用于日志记录
logger.info(
- f"[Password Reset] User {user_id} ({email}) successfully reset password from IP {ip_address},"
+ f"User {user_id} ({email}) successfully reset password from IP {ip_address},"
f" invalidated {tokens_deleted} tokens"
)
return True, "密码重置成功,所有设备已被登出"
@@ -374,7 +374,7 @@ class PasswordResetService:
except Exception as e:
# 不要在异常处理中访问user.id,可能触发数据库操作
user_id = reset_data.get("user_id", "未知")
- logger.error(f"[Password Reset] Failed to reset password for user {user_id}: {e}")
+ logger.error(f"Failed to reset password for user {user_id}: {e}")
await session.rollback()
# 数据库回滚时,需要恢复Redis中的验证码状态
@@ -401,14 +401,14 @@ class PasswordResetService:
remaining_ttl,
json.dumps(original_reset_data),
)
- logger.info(f"[Password Reset] Restored Redis state after database rollback for {email}")
+ logger.info(f"Restored Redis state after database rollback for {email}")
else:
# 如果已经过期,直接删除
await redis.delete(reset_code_key)
- logger.info(f"[Password Reset] Removed expired reset code after database rollback for {email}")
+ logger.info(f"Removed expired reset code after database rollback for {email}")
except Exception as redis_error:
- logger.error(f"[Password Reset] Failed to restore Redis state after rollback: {redis_error}")
+ logger.error(f"Failed to restore Redis state after rollback: {redis_error}")
return False, "密码重置失败,请稍后重试"
@@ -428,7 +428,7 @@ class PasswordResetService:
ttl = await redis.ttl(rate_limit_key)
return 1 if ttl > 0 else 0
except Exception as e:
- logger.error(f"[Password Reset] Failed to get attempts count: {e}")
+ logger.error(f"Failed to get attempts count: {e}")
return 0
diff --git a/app/service/subscribers/chat.py b/app/service/subscribers/chat.py
index 060ac3f..9512f9c 100644
--- a/app/service/subscribers/chat.py
+++ b/app/service/subscribers/chat.py
@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING
-from app.log import logger
+from app.log import log
from app.models.notification import NotificationDetails
from .base import RedisSubscriber
@@ -17,6 +17,8 @@ JOIN_CHANNEL = "chat:room:joined"
EXIT_CHANNEL = "chat:room:left"
ON_NOTIFICATION = "chat:notification"
+logger = log("Chat")
+
class ChatSubscriber(RedisSubscriber):
def __init__(self):
@@ -49,7 +51,7 @@ class ChatSubscriber(RedisSubscriber):
try:
detail = TypeAdapter(NotificationDetails).validate_json(s)
except ValueError:
- logger.exception("")
+ logger.exception("Failed to parse notification detail")
return
except Exception:
logger.exception("Failed to parse notification detail")
diff --git a/app/service/verification_service.py b/app/service/verification_service.py
index 778dc02..d1f2ee4 100644
--- a/app/service/verification_service.py
+++ b/app/service/verification_service.py
@@ -180,7 +180,7 @@ This email was sent automatically, please do not reply.
return True
except Exception as e:
- logger.error(f"[Email Verification] Failed to enqueue email: {e}")
+ logger.error(f"Failed to enqueue email: {e}")
return False
@staticmethod
@@ -237,7 +237,7 @@ This email was sent automatically, please do not reply.
str(verification.id) if verification.id else "0",
)
- logger.info(f"[Email Verification] Created verification code for user {user_id}: {code}")
+ logger.info(f"Created verification code for user {user_id}: {code}")
return verification, code
@staticmethod
@@ -254,11 +254,11 @@ This email was sent automatically, please do not reply.
try:
# 检查是否启用邮件验证功能
if not settings.enable_email_verification:
- logger.debug(f"[Email Verification] Email verification is disabled, skipping for user {user_id}")
+ logger.debug(f"Email verification is disabled, skipping for user {user_id}")
return True # 返回成功,但不执行验证流程
# 检测客户端信息
- logger.info(f"[Email Verification] Detected client for user {user_id}: {user_agent}")
+ logger.info(f"Detected client for user {user_id}: {user_agent}")
# 创建验证记录
(
@@ -272,16 +272,14 @@ This email was sent automatically, please do not reply.
success = await EmailVerificationService.send_verification_email_via_queue(email, code, username, user_id)
if success:
- logger.info(
- f"[Email Verification] Successfully enqueued verification email to {email} (user: {username})"
- )
+ logger.info(f"Successfully enqueued verification email to {email} (user: {username})")
return True
else:
- logger.error(f"[Email Verification] Failed to enqueue verification email: {email} (user: {username})")
+ logger.error(f"Failed to enqueue verification email: {email} (user: {username})")
return False
except Exception as e:
- logger.error(f"[Email Verification] Exception during sending verification email: {e}")
+ logger.error(f"Exception during sending verification email: {e}")
return False
@staticmethod
@@ -299,7 +297,7 @@ This email was sent automatically, please do not reply.
try:
# 检查是否启用邮件验证功能
if not settings.enable_email_verification:
- logger.debug(f"[Email Verification] Email verification is disabled, auto-approving for user {user_id}")
+ logger.debug(f"Email verification is disabled, auto-approving for user {user_id}")
return True, "验证成功(邮件验证功能已禁用)"
# 先从 Redis 检查
@@ -331,11 +329,11 @@ This email was sent automatically, please do not reply.
# 删除 Redis 记录
await redis.delete(f"email_verification:{user_id}:{code}")
- logger.info(f"[Email Verification] User {user_id} verification code verified successfully")
+ logger.info(f"User {user_id} verification code verified successfully")
return True, "验证成功"
except Exception as e:
- logger.error(f"[Email Verification] Exception during verification code validation: {e}")
+ logger.error(f"Exception during verification code validation: {e}")
return False, "验证过程中发生错误"
@staticmethod
@@ -354,7 +352,7 @@ This email was sent automatically, please do not reply.
_ = user_agent
# 检查是否启用邮件验证功能
if not settings.enable_email_verification:
- logger.debug(f"[Email Verification] Email verification is disabled, skipping resend for user {user_id}")
+ logger.debug(f"Email verification is disabled, skipping resend for user {user_id}")
return True, "验证码已发送(邮件验证功能已禁用)"
# 检查重发频率限制(60秒内只能发送一次)
@@ -376,7 +374,7 @@ This email was sent automatically, please do not reply.
return False, "重新发送失败,请稍后再试"
except Exception as e:
- logger.error(f"[Email Verification] Exception during resending verification code: {e}")
+ logger.error(f"Exception during resending verification code: {e}")
return False, "重新发送过程中发生错误"
diff --git a/app/tasks/beatmapset_update.py b/app/tasks/beatmapset_update.py
index 4e1492e..e04e860 100644
--- a/app/tasks/beatmapset_update.py
+++ b/app/tasks/beatmapset_update.py
@@ -3,7 +3,7 @@ from __future__ import annotations
from datetime import datetime, timedelta
from app.dependencies.scheduler import get_scheduler
-from app.service.beatmapset_update_service import service
+from app.service.beatmapset_update_service import get_beatmapset_update_service
from app.utils import bg_tasks
SCHEDULER_INTERVAL_MINUTES = 2
@@ -16,6 +16,6 @@ SCHEDULER_INTERVAL_MINUTES = 2
next_run_time=datetime.now() + timedelta(minutes=1),
)
async def beatmapset_update_job():
- if service is not None:
- bg_tasks.add_task(service.add_missing_beatmapsets)
- await service._update_beatmaps()
+ service = get_beatmapset_update_service()
+ bg_tasks.add_task(service.add_missing_beatmapsets)
+ await service._update_beatmaps()
diff --git a/app/tasks/daily_challenge.py b/app/tasks/daily_challenge.py
index e8a7fa3..3057fcb 100644
--- a/app/tasks/daily_challenge.py
+++ b/app/tasks/daily_challenge.py
@@ -80,9 +80,7 @@ async def daily_challenge_job():
allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues]
if beatmap is None or ruleset_id is None:
- logger.warning(
- f"[DailyChallenge] Missing required data for daily challenge {now}. Will try again in 5 minutes."
- )
+ logger.warning(f"Missing required data for daily challenge {now}. Will try again in 5 minutes.")
get_scheduler().add_job(
daily_challenge_job,
"date",
@@ -111,12 +109,12 @@ async def daily_challenge_job():
duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60),
)
await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id))
- logger.success(f"[DailyChallenge] Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
+ logger.success(f"Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
return
except (ValueError, json.JSONDecodeError) as e:
- logger.warning(f"[DailyChallenge] Error processing daily challenge data: {e} Will try again in 5 minutes.")
+ logger.warning(f"Error processing daily challenge data: {e} Will try again in 5 minutes.")
except Exception as e:
- logger.exception(f"[DailyChallenge] Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
+ logger.exception(f"Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
get_scheduler().add_job(
daily_challenge_job,
"date",
diff --git a/main.py b/main.py
index d74acfa..ee8dcb4 100644
--- a/main.py
+++ b/main.py
@@ -8,7 +8,7 @@ from app.database import User
from app.dependencies.database import Database, engine, get_redis, redis_client
from app.dependencies.fetcher import get_fetcher
from app.dependencies.scheduler import start_scheduler, stop_scheduler
-from app.log import logger
+from app.log import logger, system_logger
from app.middleware.verify_session import VerifySessionMiddleware
from app.models.mods import init_mods, init_ranked_mods
from app.router import (
@@ -136,13 +136,9 @@ if newrelic_config_path.exists():
environment = settings.new_relic_environment or ("production" if not settings.debug else "development")
newrelic.agent.initialize(newrelic_config_path, environment)
- logger.info(f"[NewRelic] Enabled, environment: {environment}")
- except ImportError:
- logger.warning("[NewRelic] Config file found but 'newrelic' package is not installed")
+ system_logger("NewRelic").info(f"Enabled, environment: {environment}")
except Exception as e:
- logger.error(f"[NewRelic] Initialization failed: {e}")
-else:
- logger.info("[NewRelic] No newrelic.ini config file found, skipping initialization")
+ system_logger("NewRelic").error(f"Initialization failed: {e}")
if settings.sentry_dsn is not None:
sentry_sdk.init(
@@ -245,10 +241,14 @@ async def http_exception_handler(requst: Request, exc: HTTPException):
if settings.secret_key == "your_jwt_secret_here":
- logger.warning("jwt_secret_key is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 32")
+ system_logger("Security").opt(colors=True).warning(
+ "jwt_secret_key is unset. Your server is unsafe. "
+ "Use this command to generate: openssl rand -hex 32."
+ )
if settings.osu_web_client_secret == "your_osu_web_client_secret_here":
- logger.warning(
- "osu_web_client_secret is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 40"
+ system_logger("Security").opt(colors=True).warning(
+ "osu_web_client_secret is unset. Your server is unsafe. "
+ "Use this command to generate: openssl rand -hex 40."
)
if __name__ == "__main__":