refactor(log): refactor the whole project

format: {time:YYYY-MM-DD HH:mm:ss} [{level}] | {name} | {message}
{name} is:
- Uvicorn: log from uvicorn server (#228B22)
- Service: log from class of `app.service` (blue)
- Fetcher: log from fetchers (magenta)
- Task: log from `app.tasks` (#FFD700)
- System: log from `system_logger` (red)
- Normal: log from `log(name)` (#FFC1C1)
- Default: the module name of caller

if you are writing services or tasks, you can just call `logger.`, we will pack it with name `Service` or `Task`
if you want to print fetcher logs, system-related logs, or normal logs, use `logger = (fetcher_logger / system_logger / log)(name)`
This commit is contained in:
MingxuanGame
2025-10-03 11:44:47 +00:00
parent fce88272b5
commit d23f32f08d
27 changed files with 210 additions and 174 deletions

View File

@@ -13,7 +13,7 @@ from app.database import (
User, User,
) )
from app.database.auth import TotpKeys from app.database.auth import TotpKeys
from app.log import logger from app.log import log
from app.models.totp import FinishStatus, StartCreateTotpKeyResp from app.models.totp import FinishStatus, StartCreateTotpKeyResp
from app.utils import utcnow from app.utils import utcnow
@@ -31,6 +31,8 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# bcrypt 缓存(模拟应用状态缓存) # bcrypt 缓存(模拟应用状态缓存)
bcrypt_cache = {} bcrypt_cache = {}
logger = log("Auth")
def validate_username(username: str) -> list[str]: def validate_username(username: str) -> list[str]:
"""验证用户名""" """验证用户名"""
@@ -253,7 +255,7 @@ async def store_token(
tokens_to_delete = active_tokens[max_tokens_per_client - 1 :] tokens_to_delete = active_tokens[max_tokens_per_client - 1 :]
for token in tokens_to_delete: for token in tokens_to_delete:
await db.delete(token) await db.delete(token)
logger.info(f"[Auth] Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}") logger.info(f"Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}")
# 检查是否有重复的 access_token # 检查是否有重复的 access_token
duplicate_token = (await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))).first() duplicate_token = (await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))).first()
@@ -274,9 +276,7 @@ async def store_token(
await db.commit() await db.commit()
await db.refresh(token_record) await db.refresh(token_record)
logger.info( logger.info(f"Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})")
f"[Auth] Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})"
)
return token_record return token_record

View File

@@ -7,7 +7,7 @@ import math
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from app.config import settings from app.config import settings
from app.log import logger from app.log import log
from app.models.beatmap import BeatmapAttributes from app.models.beatmap import BeatmapAttributes
from app.models.mods import APIMod, parse_enum_to_str from app.models.mods import APIMod, parse_enum_to_str
from app.models.score import GameMode from app.models.score import GameMode
@@ -18,6 +18,8 @@ from redis.asyncio import Redis
from sqlmodel import col, exists, select from sqlmodel import col, exists, select
from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.ext.asyncio.session import AsyncSession
logger = log("Calculator")
try: try:
import rosu_pp_py as rosu import rosu_pp_py as rosu
except ImportError: except ImportError:

View File

@@ -16,7 +16,7 @@ from app.calculator import (
from app.config import settings from app.config import settings
from app.database.team import TeamMember from app.database.team import TeamMember
from app.dependencies.database import get_redis from app.dependencies.database import get_redis
from app.log import logger from app.log import log
from app.models.beatmap import BeatmapRankStatus from app.models.beatmap import BeatmapRankStatus
from app.models.model import ( from app.models.model import (
CurrentUserAttributes, CurrentUserAttributes,
@@ -74,6 +74,8 @@ from sqlmodel.ext.asyncio.session import AsyncSession
if TYPE_CHECKING: if TYPE_CHECKING:
from app.fetcher import Fetcher from app.fetcher import Fetcher
logger = log("Score")
class ScoreBase(AsyncAttrs, SQLModel, UTCBaseModel): class ScoreBase(AsyncAttrs, SQLModel, UTCBaseModel):
# 基本字段 # 基本字段
@@ -854,8 +856,7 @@ async def process_score(
) -> Score: ) -> Score:
gamemode = GameMode.from_int(info.ruleset_id).to_special_mode(info.mods) gamemode = GameMode.from_int(info.ruleset_id).to_special_mode(info.mods)
logger.info( logger.info(
"[Score] Creating score for user {user_id} | beatmap={beatmap_id} " "Creating score for user {user_id} | beatmap={beatmap_id} ruleset={ruleset} passed={passed} total={total}",
"ruleset={ruleset} passed={passed} total={total}",
user_id=user.id, user_id=user.id,
beatmap_id=beatmap_id, beatmap_id=beatmap_id,
ruleset=gamemode, ruleset=gamemode,
@@ -897,7 +898,7 @@ async def process_score(
) )
session.add(score) session.add(score)
logger.debug( logger.debug(
"[Score] Score staged for commit | token={token} mods={mods} total_hits={hits}", "Score staged for commit | token={token} mods={mods} total_hits={hits}",
token=score_token.id, token=score_token.id,
mods=info.mods, mods=info.mods,
hits=sum(info.statistics.values()) if info.statistics else 0, hits=sum(info.statistics.values()) if info.statistics else 0,
@@ -910,7 +911,7 @@ async def process_score(
async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, fetcher: "Fetcher"): async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, fetcher: "Fetcher"):
if score.pp != 0: if score.pp != 0:
logger.debug( logger.debug(
"[Score] Skipping PP calculation for score {score_id} | already set {pp:.2f}", "Skipping PP calculation for score {score_id} | already set {pp:.2f}",
score_id=score.id, score_id=score.id,
pp=score.pp, pp=score.pp,
) )
@@ -918,7 +919,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
can_get_pp = score.passed and score.ranked and mods_can_get_pp(int(score.gamemode), score.mods) can_get_pp = score.passed and score.ranked and mods_can_get_pp(int(score.gamemode), score.mods)
if not can_get_pp: if not can_get_pp:
logger.debug( logger.debug(
"[Score] Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}", "Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}",
score_id=score.id, score_id=score.id,
passed=score.passed, passed=score.passed,
ranked=score.ranked, ranked=score.ranked,
@@ -928,10 +929,10 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
pp, successed = await pre_fetch_and_calculate_pp(score, session, redis, fetcher) pp, successed = await pre_fetch_and_calculate_pp(score, session, redis, fetcher)
if not successed: if not successed:
await redis.rpush("score:need_recalculate", score.id) # pyright: ignore[reportGeneralTypeIssues] await redis.rpush("score:need_recalculate", score.id) # pyright: ignore[reportGeneralTypeIssues]
logger.warning("[Score] Queued score {score_id} for PP recalculation", score_id=score.id) logger.warning("Queued score {score_id} for PP recalculation", score_id=score.id)
return return
score.pp = pp score.pp = pp
logger.info("[Score] Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp) logger.info("Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp)
user_id = score.user_id user_id = score.user_id
beatmap_id = score.beatmap_id beatmap_id = score.beatmap_id
previous_pp_best = await get_user_best_pp_in_beatmap(session, beatmap_id, user_id, score.gamemode) previous_pp_best = await get_user_best_pp_in_beatmap(session, beatmap_id, user_id, score.gamemode)
@@ -947,7 +948,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f
session.add(best_score) session.add(best_score)
await session.delete(previous_pp_best) if previous_pp_best else None await session.delete(previous_pp_best) if previous_pp_best else None
logger.info( logger.info(
"[Score] Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}", "Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}",
user_id=user_id, user_id=user_id,
score_id=score.id, score_id=score.id,
pp=score.pp, pp=score.pp,
@@ -966,15 +967,14 @@ async def _process_score_events(score: Score, session: AsyncSession):
if rank_global == 0 or total_users == 0: if rank_global == 0 or total_users == 0:
logger.debug( logger.debug(
"[Score] Skipping event creation for score {score_id} | " "Skipping event creation for score {score_id} | rank_global={rank_global} total_users={total_users}",
"rank_global={rank_global} total_users={total_users}",
score_id=score.id, score_id=score.id,
rank_global=rank_global, rank_global=rank_global,
total_users=total_users, total_users=total_users,
) )
return return
logger.debug( logger.debug(
"[Score] Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}", "Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}",
score_id=score.id, score_id=score.id,
rank_global=rank_global, rank_global=rank_global,
total_users=total_users, total_users=total_users,
@@ -1003,7 +1003,7 @@ async def _process_score_events(score: Score, session: AsyncSession):
} }
session.add(rank_event) session.add(rank_event)
logger.info( logger.info(
"[Score] Registered rank event for user {user_id} | score_id={score_id} rank={rank}", "Registered rank event for user {user_id} | score_id={score_id} rank={rank}",
user_id=score.user_id, user_id=score.user_id,
score_id=score.id, score_id=score.id,
rank=rank_global, rank=rank_global,
@@ -1045,12 +1045,12 @@ async def _process_score_events(score: Score, session: AsyncSession):
} }
session.add(rank_lost_event) session.add(rank_lost_event)
logger.info( logger.info(
"[Score] Registered rank lost event | displaced_user={user_id} new_score_id={score_id}", "Registered rank lost event | displaced_user={user_id} new_score_id={score_id}",
user_id=displaced_score.user_id, user_id=displaced_score.user_id,
score_id=score.id, score_id=score.id,
) )
logger.debug( logger.debug(
"[Score] Event processing committed for score {score_id}", "Event processing committed for score {score_id}",
score_id=score.id, score_id=score.id,
) )
@@ -1074,7 +1074,7 @@ async def _process_statistics(
session, score.beatmap_id, user.id, mod_for_save, score.gamemode session, score.beatmap_id, user.id, mod_for_save, score.gamemode
) )
logger.debug( logger.debug(
"[Score] Existing best scores for user {user_id} | global={global_id} mod={mod_id}", "Existing best scores for user {user_id} | global={global_id} mod={mod_id}",
user_id=user.id, user_id=user.id,
global_id=previous_score_best.score_id if previous_score_best else None, global_id=previous_score_best.score_id if previous_score_best else None,
mod_id=previous_score_best_mod.score_id if previous_score_best_mod else None, mod_id=previous_score_best_mod.score_id if previous_score_best_mod else None,
@@ -1104,7 +1104,7 @@ async def _process_statistics(
statistics.total_score += score.total_score statistics.total_score += score.total_score
difference = score.total_score - previous_score_best.total_score if previous_score_best else score.total_score difference = score.total_score - previous_score_best.total_score if previous_score_best else score.total_score
logger.debug( logger.debug(
"[Score] Score delta computed for {score_id}: {difference}", "Score delta computed for {score_id}: {difference}",
score_id=score.id, score_id=score.id,
difference=difference, difference=difference,
) )
@@ -1151,7 +1151,7 @@ async def _process_statistics(
) )
) )
logger.info( logger.info(
"[Score] Created new best score entry for user {user_id} | score_id={score_id} mods={mods}", "Created new best score entry for user {user_id} | score_id={score_id} mods={mods}",
user_id=user.id, user_id=user.id,
score_id=score.id, score_id=score.id,
mods=mod_for_save, mods=mod_for_save,
@@ -1163,7 +1163,7 @@ async def _process_statistics(
previous_score_best.rank = score.rank previous_score_best.rank = score.rank
previous_score_best.score_id = score.id previous_score_best.score_id = score.id
logger.info( logger.info(
"[Score] Updated existing best score for user {user_id} | score_id={score_id} total={total}", "Updated existing best score for user {user_id} | score_id={score_id} total={total}",
user_id=user.id, user_id=user.id,
score_id=score.id, score_id=score.id,
total=score.total_score, total=score.total_score,
@@ -1175,7 +1175,7 @@ async def _process_statistics(
if difference > 0: if difference > 0:
# 下方的 if 一定会触发。将高分设置为此分数,删除自己防止重复的 score_id # 下方的 if 一定会触发。将高分设置为此分数,删除自己防止重复的 score_id
logger.info( logger.info(
"[Score] Replacing global best score for user {user_id} | old_score_id={old_score_id}", "Replacing global best score for user {user_id} | old_score_id={old_score_id}",
user_id=user.id, user_id=user.id,
old_score_id=previous_score_best.score_id, old_score_id=previous_score_best.score_id,
) )
@@ -1188,7 +1188,7 @@ async def _process_statistics(
previous_score_best_mod.rank = score.rank previous_score_best_mod.rank = score.rank
previous_score_best_mod.score_id = score.id previous_score_best_mod.score_id = score.id
logger.info( logger.info(
"[Score] Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}", "Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}",
user_id=user.id, user_id=user.id,
mods=mod_for_save, mods=mod_for_save,
score_id=score.id, score_id=score.id,
@@ -1202,14 +1202,14 @@ async def _process_statistics(
mouthly_playcount.count += 1 mouthly_playcount.count += 1
statistics.play_time += playtime statistics.play_time += playtime
logger.debug( logger.debug(
"[Score] Recorded playtime {playtime}s for score {score_id} (user {user_id})", "Recorded playtime {playtime}s for score {score_id} (user {user_id})",
playtime=playtime, playtime=playtime,
score_id=score.id, score_id=score.id,
user_id=user.id, user_id=user.id,
) )
else: else:
logger.debug( logger.debug(
"[Score] Playtime {playtime}s for score {score_id} did not meet validity checks", "Playtime {playtime}s for score {score_id} did not meet validity checks",
playtime=playtime, playtime=playtime,
score_id=score.id, score_id=score.id,
) )
@@ -1242,7 +1242,7 @@ async def _process_statistics(
if add_to_db: if add_to_db:
session.add(mouthly_playcount) session.add(mouthly_playcount)
logger.debug( logger.debug(
"[Score] Created monthly playcount record for user {user_id} ({year}-{month})", "Created monthly playcount record for user {user_id} ({year}-{month})",
user_id=user.id, user_id=user.id,
year=mouthly_playcount.year, year=mouthly_playcount.year,
month=mouthly_playcount.month, month=mouthly_playcount.month,
@@ -1262,7 +1262,7 @@ async def process_user(
score_id = score.id score_id = score.id
user_id = user.id user_id = user.id
logger.info( logger.info(
"[Score] Processing score {score_id} for user {user_id} on beatmap {beatmap_id}", "Processing score {score_id} for user {user_id} on beatmap {beatmap_id}",
score_id=score_id, score_id=score_id,
user_id=user_id, user_id=user_id,
beatmap_id=score.beatmap_id, beatmap_id=score.beatmap_id,
@@ -1287,14 +1287,14 @@ async def process_user(
score_ = (await session.exec(select(Score).where(Score.id == score_id).options(joinedload(Score.beatmap)))).first() score_ = (await session.exec(select(Score).where(Score.id == score_id).options(joinedload(Score.beatmap)))).first()
if score_ is None: if score_ is None:
logger.warning( logger.warning(
"[Score] Score {score_id} disappeared after commit, skipping event processing", "Score {score_id} disappeared after commit, skipping event processing",
score_id=score_id, score_id=score_id,
) )
return return
await _process_score_events(score_, session) await _process_score_events(score_, session)
await session.commit() await session.commit()
logger.info( logger.info(
"[Score] Finished processing score {score_id} for user {user_id}", "Finished processing score {score_id} for user {user_id}",
score_id=score_id, score_id=score_id,
user_id=user_id, user_id=user_id,
) )

View File

@@ -5,7 +5,7 @@ from typing import Annotated
from app.config import settings from app.config import settings
from app.dependencies.database import get_redis from app.dependencies.database import get_redis
from app.fetcher import Fetcher as OriginFetcher from app.fetcher import Fetcher as OriginFetcher
from app.log import logger from app.log import fetcher_logger
from fastapi import Depends from fastapi import Depends
@@ -29,7 +29,9 @@ async def get_fetcher() -> OriginFetcher:
if refresh_token: if refresh_token:
fetcher.refresh_token = str(refresh_token) fetcher.refresh_token = str(refresh_token)
if not fetcher.access_token or not fetcher.refresh_token: if not fetcher.access_token or not fetcher.refresh_token:
logger.opt(colors=True).info(f"Login to initialize fetcher: <y>{fetcher.authorize_url}</y>") fetcher_logger("Fetcher").opt(colors=True).info(
f"Login to initialize fetcher: <y>{fetcher.authorize_url}</y>"
)
return fetcher return fetcher

View File

@@ -5,7 +5,7 @@ import time
from urllib.parse import quote from urllib.parse import quote
from app.dependencies.database import get_redis from app.dependencies.database import get_redis
from app.log import logger from app.log import fetcher_logger
from httpx import AsyncClient from httpx import AsyncClient
@@ -16,6 +16,9 @@ class TokenAuthError(Exception):
pass pass
logger = fetcher_logger("Fetcher")
class BaseFetcher: class BaseFetcher:
def __init__( def __init__(
self, self,

View File

@@ -55,7 +55,7 @@ class GeoIPHelper:
- 临时目录退出后自动清理 - 临时目录退出后自动清理
""" """
if not self.license_key: if not self.license_key:
raise ValueError("缺少 MaxMind License Key,请传入或设置环境变量 MAXMIND_LICENSE_KEY") raise ValueError("MaxMind License Key is missing. Please configure it via env MAXMIND_LICENSE_KEY.")
url = f"{BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz" url = f"{BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz"

View File

@@ -39,16 +39,18 @@ class InterceptHandler(logging.Handler):
depth += 1 depth += 1
message = record.getMessage() message = record.getMessage()
_logger = logger
if record.name == "uvicorn.access": if record.name == "uvicorn.access":
message = self._format_uvicorn_access_log(message) message = self._format_uvicorn_access_log(message)
color = True color = True
_logger = uvicorn_logger()
elif record.name == "uvicorn.error": elif record.name == "uvicorn.error":
message = self._format_uvicorn_error_log(message) message = self._format_uvicorn_error_log(message)
_logger = uvicorn_logger()
color = True color = True
else: else:
color = False color = False
logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message) _logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message)
def _format_uvicorn_error_log(self, message: str) -> str: def _format_uvicorn_error_log(self, message: str) -> str:
websocket_pattern = r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"WebSocket\s+([^"]+)"\s+([\w\[\]]+)' websocket_pattern = r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"WebSocket\s+([^"]+)"\s+([\w\[\]]+)'
@@ -110,7 +112,6 @@ class InterceptHandler(logging.Handler):
def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) -> str | None: def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) -> str | None:
"""获取调用类名/模块名,仅对指定模块前缀生效"""
stack = inspect.stack() stack = inspect.stack()
for frame_info in stack[2:]: for frame_info in stack[2:]:
module = frame_info.frame.f_globals.get("__name__", "") module = frame_info.frame.f_globals.get("__name__", "")
@@ -157,29 +158,49 @@ def system_logger(name: str) -> Logger:
return logger.bind(system=name) return logger.bind(system=name)
def dynamic_format(record): def uvicorn_logger() -> Logger:
prefix = "" return logger.bind(uvicorn="Uvicorn")
fetcher = record["extra"].get("fetcher")
if not fetcher: def log(name: str) -> Logger:
fetcher = get_caller_class_name("app.fetcher") return logger.bind(real_name=name)
if fetcher:
prefix = f"<magenta>[{fetcher}]</magenta> "
def dynamic_format(record):
name = ""
uvicorn = record["extra"].get("uvicorn")
if uvicorn:
name = f"<fg #228B22>{uvicorn}</fg #228B22>"
service = record["extra"].get("service") service = record["extra"].get("service")
if not service: if not service:
service = get_caller_class_name("app.service") service = get_caller_class_name("app.service")
if service: if service:
prefix = f"<blue>[{service}]</blue> " name = f"<blue>{service}</blue>"
fetcher = record["extra"].get("fetcher")
if not fetcher:
fetcher = get_caller_class_name("app.fetcher")
if fetcher:
name = f"<magenta>{fetcher}</magenta>"
task = record["extra"].get("task") task = record["extra"].get("task")
if not task: if not task:
task = get_caller_class_name("app.tasks") task = get_caller_class_name("app.tasks")
if task: if task:
task = snake_to_pascal(task) task = snake_to_pascal(task)
prefix = f"<green>[{task}]</green> " name = f"<fg #FFD700>{task}</fg #FFD700>"
return f"<green>{{time:YYYY-MM-DD HH:mm:ss}}</green> [<level>{{level}}</level>] | {prefix}{{message}}\n" system = record["extra"].get("system")
if system:
name = f"<red>{system}</red>"
if name == "":
real_name = record["extra"].get("real_name", "") or record["name"]
name = f"<fg #FFC1C1>{real_name}</fg #FFC1C1>"
return f"<green>{{time:YYYY-MM-DD HH:mm:ss}}</green> [<level>{{level}}</level>] | {name} | {{message}}\n"
logger.remove() logger.remove()
@@ -195,7 +216,7 @@ logger.add(
rotation="00:00", rotation="00:00",
retention="30 days", retention="30 days",
colorize=False, colorize=False,
format="{time:YYYY-MM-DD HH:mm:ss} {level} | {message}", format=dynamic_format,
level=settings.log_level, level=settings.log_level,
diagnose=settings.debug, diagnose=settings.debug,
encoding="utf8", encoding="utf8",
@@ -210,9 +231,9 @@ uvicorn_loggers = [
] ]
for logger_name in uvicorn_loggers: for logger_name in uvicorn_loggers:
uvicorn_logger = logging.getLogger(logger_name) _uvicorn_logger = logging.getLogger(logger_name)
uvicorn_logger.handlers = [InterceptHandler()] _uvicorn_logger.handlers = [InterceptHandler()]
uvicorn_logger.propagate = False _uvicorn_logger.propagate = False
logging.getLogger("httpx").setLevel("WARNING") logging.getLogger("httpx").setLevel("WARNING")
logging.getLogger("apscheduler").setLevel("WARNING") logging.getLogger("apscheduler").setLevel("WARNING")

View File

@@ -14,7 +14,7 @@ from app.const import SUPPORT_TOTP_VERIFICATION_VER
from app.database.user import User from app.database.user import User
from app.database.verification import LoginSession from app.database.verification import LoginSession
from app.dependencies.database import get_redis, with_db from app.dependencies.database import get_redis, with_db
from app.log import logger from app.log import log
from app.service.verification_service import LoginSessionService from app.service.verification_service import LoginSessionService
from app.utils import extract_user_agent from app.utils import extract_user_agent
@@ -25,6 +25,8 @@ from sqlmodel import select
from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.ext.asyncio.session import AsyncSession
from starlette.middleware.base import BaseHTTPMiddleware from starlette.middleware.base import BaseHTTPMiddleware
logger = log("Middleware")
class VerifySessionMiddleware(BaseHTTPMiddleware): class VerifySessionMiddleware(BaseHTTPMiddleware):
"""会话验证中间件 """会话验证中间件
@@ -84,7 +86,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return await self._initiate_verification(request, session_state) return await self._initiate_verification(request, session_state)
except Exception as e: except Exception as e:
logger.error(f"[Verify Session Middleware] Error: {e}") logger.error(f"Error: {e}")
# 出错时允许请求继续,避免阻塞 # 出错时允许请求继续,避免阻塞
return await call_next(request) return await call_next(request)
@@ -145,7 +147,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return user return user
except Exception as e: except Exception as e:
logger.debug(f"[Verify Session Middleware] Error getting user: {e}") logger.debug(f"Error getting user: {e}")
return None return None
async def _get_session_state(self, request: Request, user: User) -> SessionState | None: async def _get_session_state(self, request: Request, user: User) -> SessionState | None:
@@ -178,7 +180,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
return SessionState(session, user, redis, db, api_version) return SessionState(session, user, redis, db, api_version)
except Exception as e: except Exception as e:
logger.error(f"[Verify Session Middleware] Error getting session state: {e}") logger.error(f"Error getting session state: {e}")
return None return None
async def _initiate_verification(self, request: Request, state: SessionState) -> Response: async def _initiate_verification(self, request: Request, state: SessionState) -> Response:
@@ -195,7 +197,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware):
) )
except Exception as e: except Exception as e:
logger.error(f"[Verify Session Middleware] Error initiating verification: {e}") logger.error(f"Error initiating verification: {e}")
return JSONResponse( return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"error": "Verification initiation failed"} status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"error": "Verification initiation failed"}
) )
@@ -261,7 +263,7 @@ class SessionState:
self.session.web_uuid, self.session.web_uuid,
) )
except Exception as e: except Exception as e:
logger.error(f"[Session State] Error marking verified: {e}") logger.error(f"Error marking verified: {e}")
async def issue_mail_if_needed(self) -> None: async def issue_mail_if_needed(self) -> None:
"""如果需要,发送验证邮件""" """如果需要,发送验证邮件"""
@@ -274,7 +276,7 @@ class SessionState:
self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None
) )
except Exception as e: except Exception as e:
logger.error(f"[Session State] Error issuing mail: {e}") logger.error(f"Error issuing mail: {e}")
def get_key(self) -> str: def get_key(self) -> str:
"""获取会话密钥""" """获取会话密钥"""

View File

@@ -5,7 +5,7 @@ import json
from typing import Any, Literal, NotRequired, TypedDict from typing import Any, Literal, NotRequired, TypedDict
from app.config import settings as app_settings from app.config import settings as app_settings
from app.log import logger from app.log import log
from app.path import CONFIG_DIR, STATIC_DIR from app.path import CONFIG_DIR, STATIC_DIR
from pydantic import ConfigDict, Field, create_model from pydantic import ConfigDict, Field, create_model
@@ -268,7 +268,7 @@ def generate_ranked_mod_settings(enable_all: bool = False):
for mod_acronym in ruleset_mods: for mod_acronym in ruleset_mods:
result[ruleset_id][mod_acronym] = {} result[ruleset_id][mod_acronym] = {}
if not enable_all: if not enable_all:
logger.info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json") log("Mod").info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json")
result["$mods_checksum"] = checksum # pyright: ignore[reportArgumentType] result["$mods_checksum"] = checksum # pyright: ignore[reportArgumentType]
ranked_mods_file.write_text(json.dumps(result, indent=4)) ranked_mods_file.write_text(json.dumps(result, indent=4))

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
import json import json
from app.log import logger from app.log import log
from app.path import STATIC_DIR from app.path import STATIC_DIR
from pydantic import BaseModel from pydantic import BaseModel
@@ -16,6 +16,7 @@ class BeatmapTags(BaseModel):
ALL_TAGS: dict[int, BeatmapTags] = {} ALL_TAGS: dict[int, BeatmapTags] = {}
logger = log("BeatmapTag")
def load_tags() -> None: def load_tags() -> None:

View File

@@ -22,7 +22,7 @@ from app.database.statistics import UserStatistics
from app.dependencies.database import Database, Redis from app.dependencies.database import Database, Redis
from app.dependencies.geoip import GeoIPService, IPAddress from app.dependencies.geoip import GeoIPService, IPAddress
from app.dependencies.user_agent import UserAgentInfo from app.dependencies.user_agent import UserAgentInfo
from app.log import logger from app.log import log
from app.models.extended_auth import ExtendedTokenResponse from app.models.extended_auth import ExtendedTokenResponse
from app.models.oauth import ( from app.models.oauth import (
OAuthErrorResponse, OAuthErrorResponse,
@@ -44,6 +44,8 @@ from fastapi.responses import JSONResponse
from sqlalchemy import text from sqlalchemy import text
from sqlmodel import exists, select from sqlmodel import exists, select
logger = log("Auth")
def create_oauth_error_response(error: str, description: str, hint: str, status_code: int = 400): def create_oauth_error_response(error: str, description: str, hint: str, status_code: int = 400):
"""创建标准的 OAuth 错误响应""" """创建标准的 OAuth 错误响应"""
@@ -360,9 +362,7 @@ async def oauth_token(
await LoginSessionService.mark_session_verified( await LoginSessionService.mark_session_verified(
db, redis, user_id, token_id, ip_address, user_agent, web_uuid db, redis, user_id, token_id, ip_address, user_agent, web_uuid
) )
logger.debug( logger.debug(f"New location login detected but email verification disabled, auto-verifying user {user_id}")
f"[Auth] New location login detected but email verification disabled, auto-verifying user {user_id}"
)
else: else:
# 不是新设备登录,正常登录 # 不是新设备登录,正常登录
await LoginLogService.record_login( await LoginLogService.record_login(
@@ -505,7 +505,7 @@ async def oauth_token(
) )
# 打印jwt # 打印jwt
logger.info(f"[Auth] Generated JWT for user {user_id}: {access_token}") logger.info(f"Generated JWT for user {user_id}: {access_token}")
return TokenResponse( return TokenResponse(
access_token=access_token, access_token=access_token,

View File

@@ -14,7 +14,7 @@ from app.database.user import User
from app.dependencies.database import Database, Redis from app.dependencies.database import Database, Redis
from app.dependencies.fetcher import Fetcher from app.dependencies.fetcher import Fetcher
from app.dependencies.storage import StorageService from app.dependencies.storage import StorageService
from app.log import logger from app.log import log
from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem
from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus
from app.utils import utcnow from app.utils import utcnow
@@ -27,6 +27,7 @@ from sqlalchemy import update
from sqlmodel import col, select from sqlmodel import col, select
router = APIRouter(prefix="/_lio", include_in_schema=False) router = APIRouter(prefix="/_lio", include_in_schema=False)
logger = log("LegacyIO")
async def _ensure_room_chat_channel( async def _ensure_room_chat_channel(

View File

@@ -16,7 +16,7 @@ from app.database.user import User
from app.dependencies.database import Database, Redis from app.dependencies.database import Database, Redis
from app.dependencies.param import BodyOrForm from app.dependencies.param import BodyOrForm
from app.dependencies.user import get_current_user from app.dependencies.user import get_current_user
from app.log import logger from app.log import log
from app.models.notification import ChannelMessage, ChannelMessageTeam from app.models.notification import ChannelMessage, ChannelMessageTeam
from app.router.v2 import api_v2_router as router from app.router.v2 import api_v2_router as router
from app.service.redis_message_system import redis_message_system from app.service.redis_message_system import redis_message_system
@@ -33,6 +33,9 @@ class KeepAliveResp(BaseModel):
silences: list[UserSilenceResp] = Field(default_factory=list) silences: list[UserSilenceResp] = Field(default_factory=list)
logger = log("Chat")
@router.post( @router.post(
"/chat/ack", "/chat/ack",
name="保持连接", name="保持连接",

View File

@@ -14,7 +14,7 @@ from app.dependencies.database import (
with_db, with_db,
) )
from app.dependencies.user import get_current_user_and_token from app.dependencies.user import get_current_user_and_token
from app.log import logger from app.log import log
from app.models.chat import ChatEvent from app.models.chat import ChatEvent
from app.models.notification import NotificationDetail from app.models.notification import NotificationDetail
from app.service.subscribers.chat import ChatSubscriber from app.service.subscribers.chat import ChatSubscriber
@@ -26,6 +26,8 @@ from fastapi.websockets import WebSocketState
from sqlmodel import select from sqlmodel import select
from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.ext.asyncio.session import AsyncSession
logger = log("NotificationServer")
class ChatServer: class ChatServer:
def __init__(self): def __init__(self):
@@ -285,10 +287,10 @@ async def _listen_stop(ws: WebSocket, user_id: int, factory: DBFactory):
await ws.close(code=1000) await ws.close(code=1000)
break break
except WebSocketDisconnect as e: except WebSocketDisconnect as e:
logger.info(f"[NotificationServer] Client {user_id} disconnected: {e.code}, {e.reason}") logger.info(f"Client {user_id} disconnected: {e.code}, {e.reason}")
except RuntimeError as e: except RuntimeError as e:
if "disconnect message" in str(e): if "disconnect message" in str(e):
logger.info(f"[NotificationServer] Client {user_id} closed the connection.") logger.info(f"Client {user_id} closed the connection.")
else: else:
logger.exception(f"RuntimeError in client {user_id}: {e}") logger.exception(f"RuntimeError in client {user_id}: {e}")
except Exception: except Exception:

View File

@@ -39,7 +39,7 @@ from app.dependencies.database import Database, Redis, get_redis, with_db
from app.dependencies.fetcher import Fetcher, get_fetcher from app.dependencies.fetcher import Fetcher, get_fetcher
from app.dependencies.storage import StorageService from app.dependencies.storage import StorageService
from app.dependencies.user import ClientUser, get_current_user from app.dependencies.user import ClientUser, get_current_user
from app.log import logger from app.log import log
from app.models.beatmap import BeatmapRankStatus from app.models.beatmap import BeatmapRankStatus
from app.models.room import RoomCategory from app.models.room import RoomCategory
from app.models.score import ( from app.models.score import (
@@ -73,6 +73,7 @@ from sqlmodel import col, exists, func, select
from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.ext.asyncio.session import AsyncSession
READ_SCORE_TIMEOUT = 10 READ_SCORE_TIMEOUT = 10
logger = log("Score")
async def _process_user_achievement(score_id: int): async def _process_user_achievement(score_id: int):

View File

@@ -15,7 +15,7 @@ from app.dependencies.database import Database, Redis, get_redis
from app.dependencies.geoip import IPAddress from app.dependencies.geoip import IPAddress
from app.dependencies.user import UserAndToken, get_client_user_and_token from app.dependencies.user import UserAndToken, get_client_user_and_token
from app.dependencies.user_agent import UserAgentInfo from app.dependencies.user_agent import UserAgentInfo
from app.log import logger from app.log import log
from app.service.login_log_service import LoginLogService from app.service.login_log_service import LoginLogService
from app.service.verification_service import ( from app.service.verification_service import (
EmailVerificationService, EmailVerificationService,
@@ -254,7 +254,7 @@ async def fallback_email(
user_agent, user_agent,
) )
if not success: if not success:
logger.error( log("Verification").error(
f"[Email Fallback] Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}" f"Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}"
) )
return VerifyMethod() return VerifyMethod()

View File

@@ -19,7 +19,7 @@ from app.database.user import SEARCH_INCLUDED
from app.dependencies.api_version import APIVersion from app.dependencies.api_version import APIVersion
from app.dependencies.database import Database, get_redis from app.dependencies.database import Database, get_redis
from app.dependencies.user import get_current_user from app.dependencies.user import get_current_user
from app.log import logger from app.log import log
from app.models.score import GameMode from app.models.score import GameMode
from app.models.user import BeatmapsetType from app.models.user import BeatmapsetType
from app.service.asset_proxy_helper import process_response_assets from app.service.asset_proxy_helper import process_response_assets
@@ -336,7 +336,7 @@ async def get_user_beatmapsets(
try: try:
await cache_service.cache_user_beatmapsets(user_id, type.value, resp, limit, offset) await cache_service.cache_user_beatmapsets(user_id, type.value, resp, limit, offset)
except Exception as e: except Exception as e:
logger.error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}") log("Beatmapset").error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}")
background_task.add_task(cache_beatmapsets) background_task.add_task(cache_beatmapsets)

View File

@@ -166,7 +166,7 @@ class BeatmapsetUpdateService:
except Exception as e: except Exception as e:
logger.error(f"failed to add missing beatmapset {missing}: {e}") logger.error(f"failed to add missing beatmapset {missing}: {e}")
if total > 0: if total > 0:
logger.info(f"added {total} missing beatmapset") logger.opt(colors=True).info(f"added {total} missing beatmapset")
self._adding_missing = False self._adding_missing = False
async def add(self, beatmapset: BeatmapsetResp): async def add(self, beatmapset: BeatmapsetResp):
@@ -206,12 +206,14 @@ class BeatmapsetUpdateService:
processing = ProcessingBeatmapset(beatmapset, sync_record) processing = ProcessingBeatmapset(beatmapset, sync_record)
next_time_delta = processing.calculate_next_sync_time() next_time_delta = processing.calculate_next_sync_time()
if not next_time_delta: if not next_time_delta:
logger.info(f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list") logger.opt(colors=True).info(
f"<g>[{beatmapset.id}]</g> beatmapset has transformed to ranked or loved, removing from sync list"
)
await session.delete(sync_record) await session.delete(sync_record)
await session.commit() await session.commit()
return return
sync_record.next_sync_time = utcnow() + next_time_delta sync_record.next_sync_time = utcnow() + next_time_delta
logger.info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}") logger.opt(colors=True).info(f"<g>[{beatmapset.id}]</g> next sync at {sync_record.next_sync_time}")
await session.commit() await session.commit()
async def _update_beatmaps(self): async def _update_beatmaps(self):
@@ -224,17 +226,19 @@ class BeatmapsetUpdateService:
.order_by(col(BeatmapSync.next_sync_time).desc()) .order_by(col(BeatmapSync.next_sync_time).desc())
) )
for record in records: for record in records:
logger.info(f"[{record.beatmapset_id}] syncing...") logger.opt(colors=True).info(f"<g>[{record.beatmapset_id}]</g> syncing...")
try: try:
beatmapset = await self.fetcher.get_beatmapset(record.beatmapset_id) beatmapset = await self.fetcher.get_beatmapset(record.beatmapset_id)
except Exception as e: except Exception as e:
if isinstance(e, HTTPError): if isinstance(e, HTTPError):
logger.warning( logger.opt(colors=True).warning(
f"[{record.beatmapset_id}] " f"<g>[{record.beatmapset_id}]</g> "
f"failed to fetch beatmapset: [{e.__class__.__name__}] {e}, retrying later" f"failed to fetch beatmapset: [{e.__class__.__name__}] {e}, retrying later"
) )
else: else:
logger.exception(f"[{record.beatmapset_id}] unexpected error: {e}, retrying later") logger.opt(colors=True).exception(
f"<g>[{record.beatmapset_id}]</g> unexpected error: {e}, retrying later"
)
record.next_sync_time = utcnow() + timedelta(seconds=MIN_DELTA) record.next_sync_time = utcnow() + timedelta(seconds=MIN_DELTA)
continue continue
processing = ProcessingBeatmapset(beatmapset, record) processing = ProcessingBeatmapset(beatmapset, record)
@@ -266,15 +270,16 @@ class BeatmapsetUpdateService:
next_time_delta = processing.calculate_next_sync_time() next_time_delta = processing.calculate_next_sync_time()
if not next_time_delta: if not next_time_delta:
logger.info( logger.opt(colors=True).info(
f"[{record.beatmapset_id}] beatmapset " f"<yellow>[{beatmapset.id}]</yellow> beatmapset has transformed to ranked or loved,"
"has transformed to ranked or loved," f" removing from sync list"
" removing from sync list"
) )
await session.delete(record) await session.delete(record)
else: else:
record.next_sync_time = utcnow() + next_time_delta record.next_sync_time = utcnow() + next_time_delta
logger.info(f"[{record.beatmapset_id}] next sync at {record.next_sync_time}") logger.opt(colors=True).info(
f"<g>[{record.beatmapset_id}]</g> next sync at {record.next_sync_time}"
)
await session.commit() await session.commit()
async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp): async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp):
@@ -304,7 +309,7 @@ class BeatmapsetUpdateService:
await score.ranked_score.delete(session) await score.ranked_score.delete(session)
total += 1 total += 1
if total > 0: if total > 0:
logger.info(f"[beatmap: {beatmap_id}] processed {total} old scores") logger.opt(colors=True).info(f"<g>[beatmap: {beatmap_id}]</g> processed {total} old scores")
await session.commit() await session.commit()
for change in changed: for change in changed:
@@ -312,17 +317,24 @@ class BeatmapsetUpdateService:
try: try:
beatmap = await self.fetcher.get_beatmap(change.beatmap_id) beatmap = await self.fetcher.get_beatmap(change.beatmap_id)
except Exception as e: except Exception as e:
logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping") logger.opt(colors=True).error(
f"<g>[beatmap: {change.beatmap_id}]</g> failed to fetch added beatmap: {e}, skipping"
)
continue continue
logger.info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}") logger.opt(colors=True).info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}")
await Beatmap.from_resp_no_save(session, beatmap) await Beatmap.from_resp_no_save(session, beatmap)
else: else:
try: try:
beatmap = await self.fetcher.get_beatmap(change.beatmap_id) beatmap = await self.fetcher.get_beatmap(change.beatmap_id)
except Exception as e: except Exception as e:
logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping") logger.opt(colors=True).error(
f"<g>[beatmap: {change.beatmap_id}]</g> failed to fetch changed beatmap: {e}, skipping"
)
continue continue
logger.info(f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} change {change.type}") logger.opt(colors=True).info(
f"<g>[{beatmap.beatmapset_id}]</g> processing beatmap <blue>{beatmap.id}</blue> "
f"change <cyan>{change.type}</cyan>"
)
new_db_beatmap = await Beatmap.from_resp_no_save(session, beatmap) new_db_beatmap = await Beatmap.from_resp_no_save(session, beatmap)
existing_beatmap = await session.get(Beatmap, change.beatmap_id) existing_beatmap = await session.get(Beatmap, change.beatmap_id)
if existing_beatmap: if existing_beatmap:

View File

@@ -46,13 +46,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired email verification codes") logger.debug(f"Cleaned up {deleted_count} expired email verification codes")
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning expired verification codes: {e!s}") logger.error(f"Error cleaning expired verification codes: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -85,13 +85,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired login sessions") logger.debug(f"Cleaned up {deleted_count} expired login sessions")
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning expired login sessions: {e!s}") logger.error(f"Error cleaning expired login sessions: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -126,15 +126,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug( logger.debug(f"Cleaned up {deleted_count} used verification codes older than {days_old} days")
f"[Cleanup Service] Cleaned up {deleted_count} used verification codes older than {days_old} days"
)
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning old used verification codes: {e!s}") logger.error(f"Error cleaning old used verification codes: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -169,16 +167,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug( logger.debug(f"Cleaned up {deleted_count} unverified login sessions older than {hours_old} hour(s)")
f"[Cleanup Service] Cleaned up {deleted_count} unverified "
f"login sessions older than {hours_old} hour(s)"
)
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning unverified login sessions: {e!s}") logger.error(f"Error cleaning unverified login sessions: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -206,13 +201,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} outdated verified sessions") logger.debug(f"Cleaned up {deleted_count} outdated verified sessions")
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning outdated verified sessions: {e!s}") logger.error(f"Error cleaning outdated verified sessions: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -243,13 +238,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired trusted devices") logger.debug(f"Cleaned up {deleted_count} expired trusted devices")
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning expired trusted devices: {e!s}") logger.error(f"Error cleaning expired trusted devices: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -278,13 +273,13 @@ class DatabaseCleanupService:
await db.commit() await db.commit()
if deleted_count > 0: if deleted_count > 0:
logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired OAuth tokens") logger.debug(f"Cleaned up {deleted_count} expired OAuth tokens")
return deleted_count return deleted_count
except Exception as e: except Exception as e:
await db.rollback() await db.rollback()
logger.error(f"[Cleanup Service] Error cleaning expired OAuth tokens: {e!s}") logger.error(f"Error cleaning expired OAuth tokens: {e!s}")
return 0 return 0
@staticmethod @staticmethod
@@ -323,9 +318,7 @@ class DatabaseCleanupService:
total_cleaned = sum(results.values()) total_cleaned = sum(results.values())
if total_cleaned > 0: if total_cleaned > 0:
logger.debug( logger.debug(f"Full cleanup completed, total cleaned: {total_cleaned} records - {results}")
f"[Cleanup Service] Full cleanup completed, total cleaned: {total_cleaned} records - {results}"
)
return results return results
@@ -421,7 +414,7 @@ class DatabaseCleanupService:
} }
except Exception as e: except Exception as e:
logger.error(f"[Cleanup Service] Error getting cleanup statistics: {e!s}") logger.error(f"Error getting cleanup statistics: {e!s}")
return { return {
"expired_verification_codes": 0, "expired_verification_codes": 0,
"expired_login_sessions": 0, "expired_login_sessions": 0,

View File

@@ -221,11 +221,6 @@ class EmailQueue:
是否发送成功 是否发送成功
""" """
try: try:
# 如果邮件发送功能被禁用,则只记录日志
if not getattr(settings, "enable_email_sending", True):
logger.info(f"[Mock Email] Would send to {email_data.get('to_email')}: {email_data.get('subject')}")
return True
# 创建邮件 # 创建邮件
msg = MIMEMultipart("alternative") msg = MIMEMultipart("alternative")
msg["From"] = f"{self.from_name} <{self.from_email}>" msg["From"] = f"{self.from_name} <{self.from_email}>"

View File

@@ -147,11 +147,11 @@ class EmailService:
server.send_message(msg) server.send_message(msg)
logger.info(f"[Email Verification] Successfully sent verification code to {email}") logger.info(f"Successfully sent verification code to {email}")
return True return True
except Exception as e: except Exception as e:
logger.error(f"[Email Verification] Failed to send email: {e}") logger.error(f"Failed to send email: {e}")
return False return False

View File

@@ -105,13 +105,13 @@ class PasswordResetService:
email_sent = await self.send_password_reset_email(email=email, code=reset_code, username=user.username) email_sent = await self.send_password_reset_email(email=email, code=reset_code, username=user.username)
if email_sent: if email_sent:
logger.info(f"[Password Reset] Sent reset code to user {user.id} ({email})") logger.info(f"Sent reset code to user {user.id} ({email})")
return True, "密码重置邮件已发送,请查收邮箱" return True, "密码重置邮件已发送,请查收邮箱"
else: else:
# 邮件发送失败清理Redis中的数据 # 邮件发送失败清理Redis中的数据
await redis.delete(reset_code_key) await redis.delete(reset_code_key)
await redis.delete(rate_limit_key) await redis.delete(rate_limit_key)
logger.warning(f"[Password Reset] Email sending failed, cleaned up Redis data for {email}") logger.warning(f"Email sending failed, cleaned up Redis data for {email}")
return False, "邮件发送失败,请稍后重试" return False, "邮件发送失败,请稍后重试"
except Exception: except Exception:
@@ -121,7 +121,7 @@ class PasswordResetService:
await redis.delete(rate_limit_key) await redis.delete(rate_limit_key)
except Exception: except Exception:
pass pass
logger.exception("[Password Reset] Redis operation failed") logger.exception("Redis operation failed")
return False, "服务暂时不可用,请稍后重试" return False, "服务暂时不可用,请稍后重试"
async def send_password_reset_email(self, email: str, code: str, username: str) -> bool: async def send_password_reset_email(self, email: str, code: str, username: str) -> bool:
@@ -269,11 +269,11 @@ class PasswordResetService:
metadata=metadata, metadata=metadata,
) )
logger.info(f"[Password Reset] Enqueued reset code email to {email}") logger.info(f"Enqueued reset code email to {email}")
return True return True
except Exception as e: except Exception as e:
logger.error(f"[Password Reset] Failed to enqueue email: {e}") logger.error(f"Failed to enqueue email: {e}")
return False return False
async def reset_password( async def reset_password(
@@ -366,7 +366,7 @@ class PasswordResetService:
await redis.setex(reset_code_key, 300, json.dumps(reset_data)) # 保留5分钟用于日志记录 await redis.setex(reset_code_key, 300, json.dumps(reset_data)) # 保留5分钟用于日志记录
logger.info( logger.info(
f"[Password Reset] User {user_id} ({email}) successfully reset password from IP {ip_address}," f"User {user_id} ({email}) successfully reset password from IP {ip_address},"
f" invalidated {tokens_deleted} tokens" f" invalidated {tokens_deleted} tokens"
) )
return True, "密码重置成功,所有设备已被登出" return True, "密码重置成功,所有设备已被登出"
@@ -374,7 +374,7 @@ class PasswordResetService:
except Exception as e: except Exception as e:
# 不要在异常处理中访问user.id可能触发数据库操作 # 不要在异常处理中访问user.id可能触发数据库操作
user_id = reset_data.get("user_id", "未知") user_id = reset_data.get("user_id", "未知")
logger.error(f"[Password Reset] Failed to reset password for user {user_id}: {e}") logger.error(f"Failed to reset password for user {user_id}: {e}")
await session.rollback() await session.rollback()
# 数据库回滚时需要恢复Redis中的验证码状态 # 数据库回滚时需要恢复Redis中的验证码状态
@@ -401,14 +401,14 @@ class PasswordResetService:
remaining_ttl, remaining_ttl,
json.dumps(original_reset_data), json.dumps(original_reset_data),
) )
logger.info(f"[Password Reset] Restored Redis state after database rollback for {email}") logger.info(f"Restored Redis state after database rollback for {email}")
else: else:
# 如果已经过期,直接删除 # 如果已经过期,直接删除
await redis.delete(reset_code_key) await redis.delete(reset_code_key)
logger.info(f"[Password Reset] Removed expired reset code after database rollback for {email}") logger.info(f"Removed expired reset code after database rollback for {email}")
except Exception as redis_error: except Exception as redis_error:
logger.error(f"[Password Reset] Failed to restore Redis state after rollback: {redis_error}") logger.error(f"Failed to restore Redis state after rollback: {redis_error}")
return False, "密码重置失败,请稍后重试" return False, "密码重置失败,请稍后重试"
@@ -428,7 +428,7 @@ class PasswordResetService:
ttl = await redis.ttl(rate_limit_key) ttl = await redis.ttl(rate_limit_key)
return 1 if ttl > 0 else 0 return 1 if ttl > 0 else 0
except Exception as e: except Exception as e:
logger.error(f"[Password Reset] Failed to get attempts count: {e}") logger.error(f"Failed to get attempts count: {e}")
return 0 return 0

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from app.log import logger from app.log import log
from app.models.notification import NotificationDetails from app.models.notification import NotificationDetails
from .base import RedisSubscriber from .base import RedisSubscriber
@@ -17,6 +17,8 @@ JOIN_CHANNEL = "chat:room:joined"
EXIT_CHANNEL = "chat:room:left" EXIT_CHANNEL = "chat:room:left"
ON_NOTIFICATION = "chat:notification" ON_NOTIFICATION = "chat:notification"
logger = log("Chat")
class ChatSubscriber(RedisSubscriber): class ChatSubscriber(RedisSubscriber):
def __init__(self): def __init__(self):
@@ -49,7 +51,7 @@ class ChatSubscriber(RedisSubscriber):
try: try:
detail = TypeAdapter(NotificationDetails).validate_json(s) detail = TypeAdapter(NotificationDetails).validate_json(s)
except ValueError: except ValueError:
logger.exception("") logger.exception("Failed to parse notification detail")
return return
except Exception: except Exception:
logger.exception("Failed to parse notification detail") logger.exception("Failed to parse notification detail")

View File

@@ -180,7 +180,7 @@ This email was sent automatically, please do not reply.
return True return True
except Exception as e: except Exception as e:
logger.error(f"[Email Verification] Failed to enqueue email: {e}") logger.error(f"Failed to enqueue email: {e}")
return False return False
@staticmethod @staticmethod
@@ -237,7 +237,7 @@ This email was sent automatically, please do not reply.
str(verification.id) if verification.id else "0", str(verification.id) if verification.id else "0",
) )
logger.info(f"[Email Verification] Created verification code for user {user_id}: {code}") logger.info(f"Created verification code for user {user_id}: {code}")
return verification, code return verification, code
@staticmethod @staticmethod
@@ -254,11 +254,11 @@ This email was sent automatically, please do not reply.
try: try:
# 检查是否启用邮件验证功能 # 检查是否启用邮件验证功能
if not settings.enable_email_verification: if not settings.enable_email_verification:
logger.debug(f"[Email Verification] Email verification is disabled, skipping for user {user_id}") logger.debug(f"Email verification is disabled, skipping for user {user_id}")
return True # 返回成功,但不执行验证流程 return True # 返回成功,但不执行验证流程
# 检测客户端信息 # 检测客户端信息
logger.info(f"[Email Verification] Detected client for user {user_id}: {user_agent}") logger.info(f"Detected client for user {user_id}: {user_agent}")
# 创建验证记录 # 创建验证记录
( (
@@ -272,16 +272,14 @@ This email was sent automatically, please do not reply.
success = await EmailVerificationService.send_verification_email_via_queue(email, code, username, user_id) success = await EmailVerificationService.send_verification_email_via_queue(email, code, username, user_id)
if success: if success:
logger.info( logger.info(f"Successfully enqueued verification email to {email} (user: {username})")
f"[Email Verification] Successfully enqueued verification email to {email} (user: {username})"
)
return True return True
else: else:
logger.error(f"[Email Verification] Failed to enqueue verification email: {email} (user: {username})") logger.error(f"Failed to enqueue verification email: {email} (user: {username})")
return False return False
except Exception as e: except Exception as e:
logger.error(f"[Email Verification] Exception during sending verification email: {e}") logger.error(f"Exception during sending verification email: {e}")
return False return False
@staticmethod @staticmethod
@@ -299,7 +297,7 @@ This email was sent automatically, please do not reply.
try: try:
# 检查是否启用邮件验证功能 # 检查是否启用邮件验证功能
if not settings.enable_email_verification: if not settings.enable_email_verification:
logger.debug(f"[Email Verification] Email verification is disabled, auto-approving for user {user_id}") logger.debug(f"Email verification is disabled, auto-approving for user {user_id}")
return True, "验证成功(邮件验证功能已禁用)" return True, "验证成功(邮件验证功能已禁用)"
# 先从 Redis 检查 # 先从 Redis 检查
@@ -331,11 +329,11 @@ This email was sent automatically, please do not reply.
# 删除 Redis 记录 # 删除 Redis 记录
await redis.delete(f"email_verification:{user_id}:{code}") await redis.delete(f"email_verification:{user_id}:{code}")
logger.info(f"[Email Verification] User {user_id} verification code verified successfully") logger.info(f"User {user_id} verification code verified successfully")
return True, "验证成功" return True, "验证成功"
except Exception as e: except Exception as e:
logger.error(f"[Email Verification] Exception during verification code validation: {e}") logger.error(f"Exception during verification code validation: {e}")
return False, "验证过程中发生错误" return False, "验证过程中发生错误"
@staticmethod @staticmethod
@@ -354,7 +352,7 @@ This email was sent automatically, please do not reply.
_ = user_agent _ = user_agent
# 检查是否启用邮件验证功能 # 检查是否启用邮件验证功能
if not settings.enable_email_verification: if not settings.enable_email_verification:
logger.debug(f"[Email Verification] Email verification is disabled, skipping resend for user {user_id}") logger.debug(f"Email verification is disabled, skipping resend for user {user_id}")
return True, "验证码已发送(邮件验证功能已禁用)" return True, "验证码已发送(邮件验证功能已禁用)"
# 检查重发频率限制60秒内只能发送一次 # 检查重发频率限制60秒内只能发送一次
@@ -376,7 +374,7 @@ This email was sent automatically, please do not reply.
return False, "重新发送失败,请稍后再试" return False, "重新发送失败,请稍后再试"
except Exception as e: except Exception as e:
logger.error(f"[Email Verification] Exception during resending verification code: {e}") logger.error(f"Exception during resending verification code: {e}")
return False, "重新发送过程中发生错误" return False, "重新发送过程中发生错误"

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from datetime import datetime, timedelta from datetime import datetime, timedelta
from app.dependencies.scheduler import get_scheduler from app.dependencies.scheduler import get_scheduler
from app.service.beatmapset_update_service import service from app.service.beatmapset_update_service import get_beatmapset_update_service
from app.utils import bg_tasks from app.utils import bg_tasks
SCHEDULER_INTERVAL_MINUTES = 2 SCHEDULER_INTERVAL_MINUTES = 2
@@ -16,6 +16,6 @@ SCHEDULER_INTERVAL_MINUTES = 2
next_run_time=datetime.now() + timedelta(minutes=1), next_run_time=datetime.now() + timedelta(minutes=1),
) )
async def beatmapset_update_job(): async def beatmapset_update_job():
if service is not None: service = get_beatmapset_update_service()
bg_tasks.add_task(service.add_missing_beatmapsets) bg_tasks.add_task(service.add_missing_beatmapsets)
await service._update_beatmaps() await service._update_beatmaps()

View File

@@ -80,9 +80,7 @@ async def daily_challenge_job():
allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues] allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues]
if beatmap is None or ruleset_id is None: if beatmap is None or ruleset_id is None:
logger.warning( logger.warning(f"Missing required data for daily challenge {now}. Will try again in 5 minutes.")
f"[DailyChallenge] Missing required data for daily challenge {now}. Will try again in 5 minutes."
)
get_scheduler().add_job( get_scheduler().add_job(
daily_challenge_job, daily_challenge_job,
"date", "date",
@@ -111,12 +109,12 @@ async def daily_challenge_job():
duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60), duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60),
) )
await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id)) await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id))
logger.success(f"[DailyChallenge] Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}") logger.success(f"Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
return return
except (ValueError, json.JSONDecodeError) as e: except (ValueError, json.JSONDecodeError) as e:
logger.warning(f"[DailyChallenge] Error processing daily challenge data: {e} Will try again in 5 minutes.") logger.warning(f"Error processing daily challenge data: {e} Will try again in 5 minutes.")
except Exception as e: except Exception as e:
logger.exception(f"[DailyChallenge] Unexpected error in daily challenge job: {e} Will try again in 5 minutes.") logger.exception(f"Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
get_scheduler().add_job( get_scheduler().add_job(
daily_challenge_job, daily_challenge_job,
"date", "date",

20
main.py
View File

@@ -8,7 +8,7 @@ from app.database import User
from app.dependencies.database import Database, engine, get_redis, redis_client from app.dependencies.database import Database, engine, get_redis, redis_client
from app.dependencies.fetcher import get_fetcher from app.dependencies.fetcher import get_fetcher
from app.dependencies.scheduler import start_scheduler, stop_scheduler from app.dependencies.scheduler import start_scheduler, stop_scheduler
from app.log import logger from app.log import logger, system_logger
from app.middleware.verify_session import VerifySessionMiddleware from app.middleware.verify_session import VerifySessionMiddleware
from app.models.mods import init_mods, init_ranked_mods from app.models.mods import init_mods, init_ranked_mods
from app.router import ( from app.router import (
@@ -136,13 +136,9 @@ if newrelic_config_path.exists():
environment = settings.new_relic_environment or ("production" if not settings.debug else "development") environment = settings.new_relic_environment or ("production" if not settings.debug else "development")
newrelic.agent.initialize(newrelic_config_path, environment) newrelic.agent.initialize(newrelic_config_path, environment)
logger.info(f"[NewRelic] Enabled, environment: {environment}") system_logger("NewRelic").info(f"Enabled, environment: {environment}")
except ImportError:
logger.warning("[NewRelic] Config file found but 'newrelic' package is not installed")
except Exception as e: except Exception as e:
logger.error(f"[NewRelic] Initialization failed: {e}") system_logger("NewRelic").error(f"Initialization failed: {e}")
else:
logger.info("[NewRelic] No newrelic.ini config file found, skipping initialization")
if settings.sentry_dsn is not None: if settings.sentry_dsn is not None:
sentry_sdk.init( sentry_sdk.init(
@@ -245,10 +241,14 @@ async def http_exception_handler(requst: Request, exc: HTTPException):
if settings.secret_key == "your_jwt_secret_here": if settings.secret_key == "your_jwt_secret_here":
logger.warning("jwt_secret_key is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 32") system_logger("Security").opt(colors=True).warning(
"<y>jwt_secret_key</y> is unset. Your server is unsafe. "
"Use this command to generate: <blue>openssl rand -hex 32</blue>."
)
if settings.osu_web_client_secret == "your_osu_web_client_secret_here": if settings.osu_web_client_secret == "your_osu_web_client_secret_here":
logger.warning( system_logger("Security").opt(colors=True).warning(
"osu_web_client_secret is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 40" "<y>osu_web_client_secret</y> is unset. Your server is unsafe. "
"Use this command to generate: <blue>openssl rand -hex 40</blue>."
) )
if __name__ == "__main__": if __name__ == "__main__":