From 37b4eadf79766a4b508ef1b2d8b1f46c00e50e0b Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 03:33:47 +0000 Subject: [PATCH 01/26] refactor(database): rename filename to find the models by table name easily --- app/database/__init__.py | 22 +++++------ app/database/achievement.py | 2 +- app/database/auth.py | 2 +- app/database/beatmap.py | 2 +- app/database/beatmap_playcounts.py | 2 +- app/database/beatmapset.py | 2 +- app/database/beatmapset_ratings.py | 2 +- .../{pp_best_score.py => best_scores.py} | 2 +- app/database/chat.py | 2 +- app/database/counts.py | 2 +- app/database/daily_challenge.py | 2 +- app/database/events.py | 2 +- app/database/favourite_beatmapset.py | 2 +- app/database/field_utils.py | 39 ------------------- ...ist_attempts.py => item_attempts_count.py} | 2 +- app/database/playlist_best_score.py | 2 +- app/database/rank_history.py | 2 +- app/database/relationship.py | 2 +- app/database/room.py | 4 +- app/database/room_participated_user.py | 2 +- app/database/score.py | 6 +-- app/database/score_token.py | 2 +- app/database/statistics.py | 6 +-- app/database/team.py | 2 +- ...st_score.py => total_score_best_scores.py} | 2 +- app/database/{lazer_user.py => user.py} | 4 +- app/middleware/verify_session.py | 2 +- app/router/lio.py | 2 +- app/router/notification/__init__.py | 2 +- app/router/notification/banchobot.py | 2 +- app/router/notification/channel.py | 2 +- app/router/notification/message.py | 2 +- app/router/notification/server.py | 2 +- app/router/private/avatar.py | 2 +- app/router/private/beatmapset.py | 2 +- app/router/private/cover.py | 2 +- app/router/private/oauth.py | 2 +- app/router/private/score.py | 2 +- app/router/private/team.py | 2 +- app/router/private/totp.py | 2 +- app/router/private/username.py | 2 +- app/router/v1/public_user.py | 2 +- app/router/v1/score.py | 2 +- app/router/v1/user.py | 2 +- app/router/v2/relationship.py | 2 +- app/router/v2/room.py | 4 +- app/router/v2/score.py | 2 +- app/router/v2/tags.py | 2 +- app/router/v2/user.py | 4 +- app/service/create_banchobot.py | 2 +- app/service/daily_challenge.py | 2 +- app/service/optimized_message.py | 4 +- app/service/osu_rx_statistics.py | 2 +- app/service/recalculate_banned_beatmap.py | 2 +- app/service/redis_message_system.py | 2 +- app/service/user_cache_service.py | 2 +- app/signalr/hub/multiplayer.py | 2 +- tools/recalculate.py | 2 +- 58 files changed, 76 insertions(+), 115 deletions(-) rename app/database/{pp_best_score.py => best_scores.py} (98%) delete mode 100644 app/database/field_utils.py rename app/database/{playlist_attempts.py => item_attempts_count.py} (99%) rename app/database/{best_score.py => total_score_best_scores.py} (98%) rename app/database/{lazer_user.py => user.py} (99%) diff --git a/app/database/__init__.py b/app/database/__init__.py index 328710a..7140ea1 100644 --- a/app/database/__init__.py +++ b/app/database/__init__.py @@ -12,7 +12,7 @@ from .beatmapset import ( BeatmapsetResp, ) from .beatmapset_ratings import BeatmapRating -from .best_score import BestScore +from .best_scores import PPBestScore from .chat import ( ChannelType, ChatChannel, @@ -28,22 +28,16 @@ from .counts import ( from .daily_challenge import DailyChallengeStats, DailyChallengeStatsResp from .events import Event from .favourite_beatmapset import FavouriteBeatmapset -from .lazer_user import ( - MeResp, - User, - UserResp, -) -from .multiplayer_event import MultiplayerEvent, MultiplayerEventResp -from .notification import Notification, UserNotification -from .password_reset import PasswordReset -from .playlist_attempts import ( +from .item_attempts_count import ( ItemAttemptsCount, ItemAttemptsResp, PlaylistAggregateScore, ) +from .multiplayer_event import MultiplayerEvent, MultiplayerEventResp +from .notification import Notification, UserNotification +from .password_reset import PasswordReset from .playlist_best_score import PlaylistBestScore from .playlists import Playlist, PlaylistResp -from .pp_best_score import PPBestScore from .rank_history import RankHistory, RankHistoryResp, RankTop from .relationship import Relationship, RelationshipResp, RelationshipType from .room import APIUploadedRoom, Room, RoomResp @@ -62,6 +56,12 @@ from .statistics import ( UserStatisticsResp, ) from .team import Team, TeamMember, TeamRequest +from .total_score_best_scores import BestScore +from .user import ( + MeResp, + User, + UserResp, +) from .user_account_history import ( UserAccountHistory, UserAccountHistoryResp, diff --git a/app/database/achievement.py b/app/database/achievement.py index fcaacf4..ca5580c 100644 --- a/app/database/achievement.py +++ b/app/database/achievement.py @@ -24,7 +24,7 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: - from .lazer_user import User + from .user import User class UserAchievementBase(SQLModel, UTCBaseModel): diff --git a/app/database/auth.py b/app/database/auth.py index 11995d8..443ba8c 100644 --- a/app/database/auth.py +++ b/app/database/auth.py @@ -19,7 +19,7 @@ from sqlmodel import ( ) if TYPE_CHECKING: - from .lazer_user import User + from .user import User class OAuthToken(UTCBaseModel, SQLModel, table=True): diff --git a/app/database/beatmap.py b/app/database/beatmap.py index ab849e6..ff920b1 100644 --- a/app/database/beatmap.py +++ b/app/database/beatmap.py @@ -23,7 +23,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: from app.fetcher import Fetcher - from .lazer_user import User + from .user import User class BeatmapOwner(SQLModel): diff --git a/app/database/beatmap_playcounts.py b/app/database/beatmap_playcounts.py index af99f29..2635d6a 100644 --- a/app/database/beatmap_playcounts.py +++ b/app/database/beatmap_playcounts.py @@ -20,7 +20,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: from .beatmap import Beatmap, BeatmapResp from .beatmapset import BeatmapsetResp - from .lazer_user import User + from .user import User class BeatmapPlaycounts(AsyncAttrs, SQLModel, table=True): diff --git a/app/database/beatmapset.py b/app/database/beatmapset.py index 8754a02..fa9abf4 100644 --- a/app/database/beatmapset.py +++ b/app/database/beatmapset.py @@ -5,7 +5,7 @@ from app.config import settings from app.models.beatmap import BeatmapRankStatus, Genre, Language from app.models.score import GameMode -from .lazer_user import BASE_INCLUDES, User, UserResp +from .user import BASE_INCLUDES, User, UserResp from pydantic import BaseModel, field_validator, model_validator from sqlalchemy import JSON, Boolean, Column, DateTime, Text diff --git a/app/database/beatmapset_ratings.py b/app/database/beatmapset_ratings.py index 07627b4..48ab0c1 100644 --- a/app/database/beatmapset_ratings.py +++ b/app/database/beatmapset_ratings.py @@ -1,7 +1,7 @@ from __future__ import annotations from app.database.beatmapset import Beatmapset -from app.database.lazer_user import User +from app.database.user import User from sqlmodel import BigInteger, Column, Field, ForeignKey, Relationship, SQLModel diff --git a/app/database/pp_best_score.py b/app/database/best_scores.py similarity index 98% rename from app/database/pp_best_score.py rename to app/database/best_scores.py index ad82114..ac3b44b 100644 --- a/app/database/pp_best_score.py +++ b/app/database/best_scores.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING from app.database.statistics import UserStatistics from app.models.score import GameMode -from .lazer_user import User +from .user import User from sqlmodel import ( BigInteger, diff --git a/app/database/chat.py b/app/database/chat.py index a56c6e4..f0d1c47 100644 --- a/app/database/chat.py +++ b/app/database/chat.py @@ -2,7 +2,7 @@ from datetime import datetime from enum import Enum from typing import Self -from app.database.lazer_user import RANKING_INCLUDES, User, UserResp +from app.database.user import RANKING_INCLUDES, User, UserResp from app.models.model import UTCBaseModel from app.utils import utcnow diff --git a/app/database/counts.py b/app/database/counts.py index 57e2b46..2390c2d 100644 --- a/app/database/counts.py +++ b/app/database/counts.py @@ -11,7 +11,7 @@ from sqlmodel import ( ) if TYPE_CHECKING: - from .lazer_user import User + from .user import User class CountBase(SQLModel): diff --git a/app/database/daily_challenge.py b/app/database/daily_challenge.py index 5c9e520..98626c3 100644 --- a/app/database/daily_challenge.py +++ b/app/database/daily_challenge.py @@ -17,7 +17,7 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: - from .lazer_user import User + from .user import User class DailyChallengeStatsBase(SQLModel, UTCBaseModel): diff --git a/app/database/events.py b/app/database/events.py index 2df37b4..617a08e 100644 --- a/app/database/events.py +++ b/app/database/events.py @@ -18,7 +18,7 @@ from sqlmodel import ( ) if TYPE_CHECKING: - from .lazer_user import User + from .user import User class EventType(str, Enum): diff --git a/app/database/favourite_beatmapset.py b/app/database/favourite_beatmapset.py index c59521c..308bc30 100644 --- a/app/database/favourite_beatmapset.py +++ b/app/database/favourite_beatmapset.py @@ -1,7 +1,7 @@ import datetime from app.database.beatmapset import Beatmapset -from app.database.lazer_user import User +from app.database.user import User from sqlalchemy.ext.asyncio import AsyncAttrs from sqlmodel import ( diff --git a/app/database/field_utils.py b/app/database/field_utils.py deleted file mode 100644 index 5f18134..0000000 --- a/app/database/field_utils.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -数据库字段类型工具 -提供处理数据库和 Pydantic 之间类型转换的工具 -""" - -from typing import Any - -from pydantic import field_validator -from sqlalchemy import Boolean - - -def bool_field_validator(field_name: str): - """为特定布尔字段创建验证器,处理数据库中的 0/1 整数""" - - @field_validator(field_name, mode="before") - @classmethod - def validate_bool_field(cls, v: Any) -> bool: - """将整数 0/1 转换为布尔值""" - if isinstance(v, int): - return bool(v) - return v - - return validate_bool_field - - -def create_bool_field(**kwargs): - """创建一个带有正确 SQLAlchemy 列定义的布尔字段""" - from sqlmodel import Column, Field - - # 如果没有指定 sa_column,则使用 Boolean 类型 - if "sa_column" not in kwargs: - # 处理 index 参数 - index = kwargs.pop("index", False) - if index: - kwargs["sa_column"] = Column(Boolean, index=True) - else: - kwargs["sa_column"] = Column(Boolean) - - return Field(**kwargs) diff --git a/app/database/playlist_attempts.py b/app/database/item_attempts_count.py similarity index 99% rename from app/database/playlist_attempts.py rename to app/database/item_attempts_count.py index ccb7704..a4487c2 100644 --- a/app/database/playlist_attempts.py +++ b/app/database/item_attempts_count.py @@ -1,5 +1,5 @@ -from .lazer_user import User, UserResp from .playlist_best_score import PlaylistBestScore +from .user import User, UserResp from pydantic import BaseModel from sqlalchemy.ext.asyncio import AsyncAttrs diff --git a/app/database/playlist_best_score.py b/app/database/playlist_best_score.py index 411797d..43d8c88 100644 --- a/app/database/playlist_best_score.py +++ b/app/database/playlist_best_score.py @@ -1,6 +1,6 @@ from typing import TYPE_CHECKING -from .lazer_user import User +from .user import User from redis.asyncio import Redis from sqlmodel import ( diff --git a/app/database/rank_history.py b/app/database/rank_history.py index 8124501..6ecd6a9 100644 --- a/app/database/rank_history.py +++ b/app/database/rank_history.py @@ -21,7 +21,7 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: - from .lazer_user import User + from .user import User class RankHistory(SQLModel, table=True): diff --git a/app/database/relationship.py b/app/database/relationship.py index a6d109e..f792f31 100644 --- a/app/database/relationship.py +++ b/app/database/relationship.py @@ -1,6 +1,6 @@ from enum import Enum -from .lazer_user import User, UserResp +from .user import User, UserResp from pydantic import BaseModel from sqlmodel import ( diff --git a/app/database/room.py b/app/database/room.py index f4844b1..647b7ca 100644 --- a/app/database/room.py +++ b/app/database/room.py @@ -1,6 +1,6 @@ from datetime import datetime -from app.database.playlist_attempts import PlaylistAggregateScore +from app.database.item_attempts_count import PlaylistAggregateScore from app.database.room_participated_user import RoomParticipatedUser from app.models.model import UTCBaseModel from app.models.multiplayer_hub import ServerMultiplayerRoom @@ -14,8 +14,8 @@ from app.models.room import ( ) from app.utils import utcnow -from .lazer_user import User, UserResp from .playlists import Playlist, PlaylistResp +from .user import User, UserResp from sqlalchemy.ext.asyncio import AsyncAttrs from sqlmodel import ( diff --git a/app/database/room_participated_user.py b/app/database/room_participated_user.py index 4ef6526..b57e72b 100644 --- a/app/database/room_participated_user.py +++ b/app/database/room_participated_user.py @@ -15,8 +15,8 @@ from sqlmodel import ( ) if TYPE_CHECKING: - from .lazer_user import User from .room import Room + from .user import User class RoomParticipatedUser(AsyncAttrs, SQLModel, table=True): diff --git a/app/database/score.py b/app/database/score.py index fb1065f..1aaf8b8 100644 --- a/app/database/score.py +++ b/app/database/score.py @@ -38,17 +38,17 @@ from app.utils import utcnow from .beatmap import Beatmap, BeatmapResp from .beatmapset import BeatmapsetResp -from .best_score import BestScore +from .best_scores import PPBestScore from .counts import MonthlyPlaycounts from .events import Event, EventType -from .lazer_user import User, UserResp from .playlist_best_score import PlaylistBestScore -from .pp_best_score import PPBestScore from .relationship import ( Relationship as DBRelationship, RelationshipType, ) from .score_token import ScoreToken +from .total_score_best_scores import BestScore +from .user import User, UserResp from pydantic import BaseModel, field_serializer, field_validator from redis.asyncio import Redis diff --git a/app/database/score_token.py b/app/database/score_token.py index 32eba89..f6762bb 100644 --- a/app/database/score_token.py +++ b/app/database/score_token.py @@ -5,7 +5,7 @@ from app.models.score import GameMode from app.utils import utcnow from .beatmap import Beatmap -from .lazer_user import User +from .user import User from sqlalchemy import Column, DateTime, Index from sqlalchemy.orm import Mapped diff --git a/app/database/statistics.py b/app/database/statistics.py index 55a6200..2529b7f 100644 --- a/app/database/statistics.py +++ b/app/database/statistics.py @@ -23,7 +23,7 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: - from .lazer_user import User, UserResp + from .user import User, UserResp class UserStatisticsBase(SQLModel): @@ -122,7 +122,7 @@ class UserStatisticsResp(UserStatisticsBase): "progress": int(math.fmod(obj.level_current, 1) * 100), } if "user" in include: - from .lazer_user import RANKING_INCLUDES, UserResp + from .user import RANKING_INCLUDES, UserResp user = await UserResp.from_db(await obj.awaitable_attrs.user, session, include=RANKING_INCLUDES) s.user = user @@ -149,7 +149,7 @@ class UserStatisticsResp(UserStatisticsBase): async def get_rank(session: AsyncSession, statistics: UserStatistics, country: str | None = None) -> int | None: - from .lazer_user import User + from .user import User query = select( UserStatistics.user_id, diff --git a/app/database/team.py b/app/database/team.py index afdc42c..7788f99 100644 --- a/app/database/team.py +++ b/app/database/team.py @@ -8,7 +8,7 @@ from sqlalchemy import Column, DateTime from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel if TYPE_CHECKING: - from .lazer_user import User + from .user import User class Team(SQLModel, UTCBaseModel, table=True): diff --git a/app/database/best_score.py b/app/database/total_score_best_scores.py similarity index 98% rename from app/database/best_score.py rename to app/database/total_score_best_scores.py index eaaa5d8..963678c 100644 --- a/app/database/best_score.py +++ b/app/database/total_score_best_scores.py @@ -4,7 +4,7 @@ from app.calculator import calculate_score_to_level from app.database.statistics import UserStatistics from app.models.score import GameMode, Rank -from .lazer_user import User +from .user import User from sqlmodel import ( JSON, diff --git a/app/database/lazer_user.py b/app/database/user.py similarity index 99% rename from app/database/lazer_user.py rename to app/database/user.py index 8d34778..053c1b0 100644 --- a/app/database/lazer_user.py +++ b/app/database/user.py @@ -261,11 +261,11 @@ class UserResp(UserBase): ) -> "UserResp": from app.dependencies.database import get_redis - from .best_score import BestScore + from .best_scores import PPBestScore from .favourite_beatmapset import FavouriteBeatmapset - from .pp_best_score import PPBestScore from .relationship import Relationship, RelationshipResp, RelationshipType from .score import Score, get_user_first_score_count + from .total_score_best_scores import BestScore ruleset = ruleset or obj.playmode diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py index 2124ab5..76277fc 100644 --- a/app/middleware/verify_session.py +++ b/app/middleware/verify_session.py @@ -11,7 +11,7 @@ from typing import ClassVar from app.auth import get_token_by_access_token from app.const import SUPPORT_TOTP_VERIFICATION_VER -from app.database.lazer_user import User +from app.database.user import User from app.database.verification import LoginSession from app.dependencies.database import get_redis, with_db from app.log import logger diff --git a/app/router/lio.py b/app/router/lio.py index 1fbc048..8d3d960 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -7,10 +7,10 @@ import json from typing import Any from app.database.chat import ChannelType, ChatChannel # ChatChannel 模型 & 枚举 -from app.database.lazer_user import User from app.database.playlists import Playlist as DBPlaylist from app.database.room import Room from app.database.room_participated_user import RoomParticipatedUser +from app.database.user import User from app.dependencies.database import Database, get_redis from app.dependencies.fetcher import get_fetcher from app.dependencies.storage import get_storage_service diff --git a/app/router/notification/__init__.py b/app/router/notification/__init__.py index d4c0b8b..176fbdb 100644 --- a/app/router/notification/__init__.py +++ b/app/router/notification/__init__.py @@ -1,8 +1,8 @@ from __future__ import annotations from app.config import settings -from app.database.lazer_user import User from app.database.notification import Notification, UserNotification +from app.database.user import User from app.dependencies.database import Database from app.dependencies.user import get_client_user from app.models.chat import ChatEvent diff --git a/app/router/notification/banchobot.py b/app/router/notification/banchobot.py index 5b1a7ed..c140650 100644 --- a/app/router/notification/banchobot.py +++ b/app/router/notification/banchobot.py @@ -12,9 +12,9 @@ from app.const import BANCHOBOT_ID from app.database import ChatMessageResp from app.database.beatmap import Beatmap from app.database.chat import ChannelType, ChatChannel, ChatMessage, MessageType -from app.database.lazer_user import User from app.database.score import Score, get_best_id from app.database.statistics import UserStatistics, get_rank +from app.database.user import User from app.dependencies.fetcher import get_fetcher from app.exception import InvokeException from app.models.mods import APIMod, get_available_mods, mod_to_save diff --git a/app/router/notification/channel.py b/app/router/notification/channel.py index 9f76d7e..bc251ca 100644 --- a/app/router/notification/channel.py +++ b/app/router/notification/channel.py @@ -10,7 +10,7 @@ from app.database.chat import ( SilenceUser, UserSilenceResp, ) -from app.database.lazer_user import User, UserResp +from app.database.user import User, UserResp from app.dependencies.database import Database, get_redis from app.dependencies.param import BodyOrForm from app.dependencies.user import get_current_user diff --git a/app/router/notification/message.py b/app/router/notification/message.py index 2a58af0..6470c36 100644 --- a/app/router/notification/message.py +++ b/app/router/notification/message.py @@ -10,7 +10,7 @@ from app.database.chat import ( SilenceUser, UserSilenceResp, ) -from app.database.lazer_user import User +from app.database.user import User from app.dependencies.database import Database, get_redis from app.dependencies.param import BodyOrForm from app.dependencies.user import get_current_user diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 7fe8859..778338c 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -4,8 +4,8 @@ import asyncio from typing import overload from app.database.chat import ChannelType, ChatChannel, ChatChannelResp, ChatMessageResp -from app.database.lazer_user import User from app.database.notification import UserNotification, insert_notification +from app.database.user import User from app.dependencies.database import ( DBFactory, get_db_factory, diff --git a/app/router/private/avatar.py b/app/router/private/avatar.py index 3530c4e..e37596f 100644 --- a/app/router/private/avatar.py +++ b/app/router/private/avatar.py @@ -2,7 +2,7 @@ from __future__ import annotations import hashlib -from app.database.lazer_user import User +from app.database.user import User from app.dependencies.database import Database from app.dependencies.storage import get_storage_service from app.dependencies.user import get_client_user diff --git a/app/router/private/beatmapset.py b/app/router/private/beatmapset.py index 3c78028..ba3c4ee 100644 --- a/app/router/private/beatmapset.py +++ b/app/router/private/beatmapset.py @@ -3,8 +3,8 @@ from __future__ import annotations from app.database.beatmap import Beatmap from app.database.beatmapset import Beatmapset from app.database.beatmapset_ratings import BeatmapRating -from app.database.lazer_user import User from app.database.score import Score +from app.database.user import User from app.dependencies.database import Database from app.dependencies.user import get_client_user from app.service.beatmapset_update_service import get_beatmapset_update_service diff --git a/app/router/private/cover.py b/app/router/private/cover.py index adc44c5..04f8d1b 100644 --- a/app/router/private/cover.py +++ b/app/router/private/cover.py @@ -2,7 +2,7 @@ from __future__ import annotations import hashlib -from app.database.lazer_user import User, UserProfileCover +from app.database.user import User, UserProfileCover from app.dependencies.database import Database from app.dependencies.storage import get_storage_service from app.dependencies.user import get_client_user diff --git a/app/router/private/oauth.py b/app/router/private/oauth.py index d3561dc..f4f5d78 100644 --- a/app/router/private/oauth.py +++ b/app/router/private/oauth.py @@ -3,7 +3,7 @@ from __future__ import annotations import secrets from app.database.auth import OAuthClient, OAuthToken -from app.database.lazer_user import User +from app.database.user import User from app.dependencies.database import Database, get_redis from app.dependencies.user import get_client_user diff --git a/app/router/private/score.py b/app/router/private/score.py index 44fdc2a..75225bd 100644 --- a/app/router/private/score.py +++ b/app/router/private/score.py @@ -1,7 +1,7 @@ from __future__ import annotations -from app.database.lazer_user import User from app.database.score import Score +from app.database.user import User from app.dependencies.database import Database, get_redis from app.dependencies.storage import get_storage_service from app.dependencies.user import get_client_user diff --git a/app/router/private/team.py b/app/router/private/team.py index 26ac527..681cd13 100644 --- a/app/router/private/team.py +++ b/app/router/private/team.py @@ -2,8 +2,8 @@ from __future__ import annotations import hashlib -from app.database.lazer_user import BASE_INCLUDES, User, UserResp from app.database.team import Team, TeamMember, TeamRequest +from app.database.user import BASE_INCLUDES, User, UserResp from app.dependencies.database import Database, get_redis from app.dependencies.storage import get_storage_service from app.dependencies.user import get_client_user diff --git a/app/router/private/totp.py b/app/router/private/totp.py index 3fa3ef3..2435567 100644 --- a/app/router/private/totp.py +++ b/app/router/private/totp.py @@ -9,7 +9,7 @@ from app.auth import ( ) from app.const import BACKUP_CODE_LENGTH from app.database.auth import TotpKeys -from app.database.lazer_user import User +from app.database.user import User from app.dependencies.database import Database, get_redis from app.dependencies.user import get_client_user from app.models.totp import FinishStatus, StartCreateTotpKeyResp diff --git a/app/router/private/username.py b/app/router/private/username.py index 8f2f6e3..571cd40 100644 --- a/app/router/private/username.py +++ b/app/router/private/username.py @@ -3,7 +3,7 @@ from __future__ import annotations from app.auth import validate_username from app.config import settings from app.database.events import Event, EventType -from app.database.lazer_user import User +from app.database.user import User from app.dependencies.database import Database from app.dependencies.user import get_client_user from app.utils import utcnow diff --git a/app/router/v1/public_user.py b/app/router/v1/public_user.py index a29d830..1f5df71 100644 --- a/app/router/v1/public_user.py +++ b/app/router/v1/public_user.py @@ -2,8 +2,8 @@ from __future__ import annotations from typing import Literal -from app.database.lazer_user import User from app.database.statistics import UserStatistics +from app.database.user import User from app.dependencies.database import Database, get_redis from app.log import logger from app.models.score import GameMode diff --git a/app/router/v1/score.py b/app/router/v1/score.py index 27e8240..ccbcf4f 100644 --- a/app/router/v1/score.py +++ b/app/router/v1/score.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import datetime, timedelta from typing import Literal -from app.database.pp_best_score import PPBestScore +from app.database.best_scores import PPBestScore from app.database.score import Score, get_leaderboard from app.dependencies.database import Database from app.models.mods import int_to_mods, mod_to_save, mods_to_int diff --git a/app/router/v1/user.py b/app/router/v1/user.py index fdb28b2..77e0369 100644 --- a/app/router/v1/user.py +++ b/app/router/v1/user.py @@ -3,8 +3,8 @@ from __future__ import annotations from datetime import datetime from typing import Literal -from app.database.lazer_user import User from app.database.statistics import UserStatistics, UserStatisticsResp +from app.database.user import User from app.dependencies.database import Database, get_redis from app.log import logger from app.models.score import GameMode diff --git a/app/router/v2/relationship.py b/app/router/v2/relationship.py index 431098e..b028951 100644 --- a/app/router/v2/relationship.py +++ b/app/router/v2/relationship.py @@ -1,7 +1,7 @@ from __future__ import annotations from app.database import Relationship, RelationshipResp, RelationshipType, User -from app.database.lazer_user import UserResp +from app.database.user import UserResp from app.dependencies.api_version import APIVersion from app.dependencies.database import Database from app.dependencies.user import get_client_user, get_current_user diff --git a/app/router/v2/room.py b/app/router/v2/room.py index 7b20413..6a42dbc 100644 --- a/app/router/v2/room.py +++ b/app/router/v2/room.py @@ -5,13 +5,13 @@ from typing import Literal from app.database.beatmap import Beatmap, BeatmapResp from app.database.beatmapset import BeatmapsetResp -from app.database.lazer_user import User, UserResp +from app.database.item_attempts_count import ItemAttemptsCount, ItemAttemptsResp from app.database.multiplayer_event import MultiplayerEvent, MultiplayerEventResp -from app.database.playlist_attempts import ItemAttemptsCount, ItemAttemptsResp from app.database.playlists import Playlist, PlaylistResp from app.database.room import APIUploadedRoom, Room, RoomResp from app.database.room_participated_user import RoomParticipatedUser from app.database.score import Score +from app.database.user import User, UserResp from app.dependencies.database import Database, get_redis from app.dependencies.user import get_client_user, get_current_user from app.models.room import RoomCategory, RoomStatus diff --git a/app/router/v2/score.py b/app/router/v2/score.py index 7472fbb..ff5765f 100644 --- a/app/router/v2/score.py +++ b/app/router/v2/score.py @@ -18,7 +18,7 @@ from app.database import ( from app.database.achievement import process_achievements from app.database.counts import ReplayWatchedCount from app.database.daily_challenge import process_daily_challenge_score -from app.database.playlist_attempts import ItemAttemptsCount +from app.database.item_attempts_count import ItemAttemptsCount from app.database.playlist_best_score import ( PlaylistBestScore, get_position, diff --git a/app/router/v2/tags.py b/app/router/v2/tags.py index af124ef..99fccb2 100644 --- a/app/router/v2/tags.py +++ b/app/router/v2/tags.py @@ -2,8 +2,8 @@ from __future__ import annotations from app.database.beatmap import Beatmap from app.database.beatmap_tags import BeatmapTagVote -from app.database.lazer_user import User from app.database.score import Score +from app.database.user import User from app.dependencies.database import get_db from app.dependencies.user import get_client_user from app.models.score import Rank diff --git a/app/router/v2/user.py b/app/router/v2/user.py index ed3a076..7ff444a 100644 --- a/app/router/v2/user.py +++ b/app/router/v2/user.py @@ -12,10 +12,10 @@ from app.database import ( User, UserResp, ) +from app.database.best_scores import PPBestScore from app.database.events import Event -from app.database.lazer_user import SEARCH_INCLUDED -from app.database.pp_best_score import PPBestScore from app.database.score import LegacyScoreResp, Score, ScoreResp, get_user_first_scores +from app.database.user import SEARCH_INCLUDED from app.dependencies.api_version import APIVersion from app.dependencies.database import Database, get_redis from app.dependencies.user import get_current_user diff --git a/app/service/create_banchobot.py b/app/service/create_banchobot.py index 0393396..16605c5 100644 --- a/app/service/create_banchobot.py +++ b/app/service/create_banchobot.py @@ -1,8 +1,8 @@ from __future__ import annotations from app.const import BANCHOBOT_ID -from app.database.lazer_user import User from app.database.statistics import UserStatistics +from app.database.user import User from app.dependencies.database import with_db from app.models.score import GameMode diff --git a/app/service/daily_challenge.py b/app/service/daily_challenge.py index 1d421c2..c0f5aac 100644 --- a/app/service/daily_challenge.py +++ b/app/service/daily_challenge.py @@ -6,11 +6,11 @@ from math import ceil from app.const import BANCHOBOT_ID from app.database.daily_challenge import DailyChallengeStats -from app.database.lazer_user import User from app.database.playlist_best_score import PlaylistBestScore from app.database.playlists import Playlist from app.database.room import Room from app.database.score import Score +from app.database.user import User from app.dependencies.database import get_redis, with_db from app.dependencies.scheduler import get_scheduler from app.log import logger diff --git a/app/service/optimized_message.py b/app/service/optimized_message.py index 06a5d99..c68f5c1 100644 --- a/app/service/optimized_message.py +++ b/app/service/optimized_message.py @@ -10,7 +10,7 @@ from app.database.chat import ( ChatMessageResp, MessageType, ) -from app.database.lazer_user import User +from app.database.user import User from app.log import logger from app.service.message_queue import message_queue @@ -71,7 +71,7 @@ class OptimizedMessageService: # 创建临时响应对象(简化版本,用于立即响应) from datetime import datetime - from app.database.lazer_user import UserResp + from app.database.user import UserResp # 创建基本的用户响应对象 user_resp = UserResp( diff --git a/app/service/osu_rx_statistics.py b/app/service/osu_rx_statistics.py index b53082c..ed82189 100644 --- a/app/service/osu_rx_statistics.py +++ b/app/service/osu_rx_statistics.py @@ -2,8 +2,8 @@ from __future__ import annotations from app.config import settings from app.const import BANCHOBOT_ID -from app.database.lazer_user import User from app.database.statistics import UserStatistics +from app.database.user import User from app.dependencies.database import with_db from app.models.score import GameMode diff --git a/app/service/recalculate_banned_beatmap.py b/app/service/recalculate_banned_beatmap.py index 6010e2b..0ed4d78 100644 --- a/app/service/recalculate_banned_beatmap.py +++ b/app/service/recalculate_banned_beatmap.py @@ -6,7 +6,7 @@ import json from app.calculator import calculate_pp from app.config import settings from app.database.beatmap import BannedBeatmaps, Beatmap -from app.database.pp_best_score import PPBestScore +from app.database.best_scores import PPBestScore from app.database.score import Score, calculate_user_pp from app.database.statistics import UserStatistics from app.dependencies.database import get_redis, with_db diff --git a/app/service/redis_message_system.py b/app/service/redis_message_system.py index 1f15a59..2ca6d99 100644 --- a/app/service/redis_message_system.py +++ b/app/service/redis_message_system.py @@ -15,7 +15,7 @@ import time from typing import Any from app.database.chat import ChatMessage, ChatMessageResp, MessageType -from app.database.lazer_user import RANKING_INCLUDES, User, UserResp +from app.database.user import RANKING_INCLUDES, User, UserResp from app.dependencies.database import get_redis_message, with_db from app.log import logger from app.utils import bg_tasks diff --git a/app/service/user_cache_service.py b/app/service/user_cache_service.py index f95bcfc..ae0022e 100644 --- a/app/service/user_cache_service.py +++ b/app/service/user_cache_service.py @@ -12,8 +12,8 @@ from typing import TYPE_CHECKING, Any from app.config import settings from app.const import BANCHOBOT_ID from app.database import User, UserResp -from app.database.lazer_user import SEARCH_INCLUDED from app.database.score import LegacyScoreResp, ScoreResp +from app.database.user import SEARCH_INCLUDED from app.dependencies.database import with_db from app.log import logger from app.models.score import GameMode diff --git a/app/signalr/hub/multiplayer.py b/app/signalr/hub/multiplayer.py index e294984..345de1d 100644 --- a/app/signalr/hub/multiplayer.py +++ b/app/signalr/hub/multiplayer.py @@ -7,11 +7,11 @@ from typing import override from app.database import Room from app.database.beatmap import Beatmap from app.database.chat import ChannelType, ChatChannel -from app.database.lazer_user import User from app.database.multiplayer_event import MultiplayerEvent from app.database.playlists import Playlist from app.database.relationship import Relationship, RelationshipType from app.database.room_participated_user import RoomParticipatedUser +from app.database.user import User from app.dependencies.database import get_redis, with_db from app.dependencies.fetcher import get_fetcher from app.exception import InvokeException diff --git a/tools/recalculate.py b/tools/recalculate.py index 791488d..97f8dfb 100644 --- a/tools/recalculate.py +++ b/tools/recalculate.py @@ -14,7 +14,7 @@ from app.config import settings from app.const import BANCHOBOT_ID from app.database import BestScore, UserStatistics from app.database.beatmap import Beatmap -from app.database.pp_best_score import PPBestScore +from app.database.best_scores import PPBestScore from app.database.score import Score, calculate_playtime, calculate_user_pp from app.dependencies.database import engine, get_redis from app.dependencies.fetcher import get_fetcher From 346c2557cfec9012a4af7da7fb8b6b30fdfeda1a Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 05:41:31 +0000 Subject: [PATCH 02/26] refactor(api): use Annotated-style dependency injection --- app/database/playlists.py | 9 +- app/database/room.py | 7 +- app/dependencies/__init__.py | 3 - app/dependencies/api_version.py | 2 +- app/dependencies/beatmap_download.py | 9 +- app/dependencies/beatmapset_cache.py | 19 ++-- app/dependencies/database.py | 3 + app/dependencies/fetcher.py | 15 +++- app/dependencies/geoip.py | 9 +- app/dependencies/storage.py | 11 ++- app/dependencies/user.py | 10 ++- app/router/auth.py | 73 +++++++--------- app/router/fetcher.py | 7 +- app/router/file.py | 8 +- app/router/lio.py | 17 ++-- app/router/notification/channel.py | 46 +++++----- app/router/notification/message.py | 37 ++++---- app/router/notification/server.py | 12 +-- app/router/private/admin.py | 19 ++-- app/router/private/avatar.py | 15 ++-- app/router/private/beatmapset.py | 15 ++-- app/router/private/cover.py | 16 ++-- app/router/private/oauth.py | 41 +++++---- app/router/private/relationship.py | 12 +-- app/router/private/score.py | 17 ++-- app/router/private/team.py | 67 +++++++------- app/router/private/totp.py | 28 +++--- app/router/private/username.py | 10 ++- app/router/v1/beatmap.py | 34 ++++---- app/router/v1/public_user.py | 8 +- app/router/v1/replay.py | 31 +++---- app/router/v1/score.py | 30 +++---- app/router/v1/user.py | 10 +-- app/router/v2/beatmap.py | 57 ++++++------ app/router/v2/beatmapset.py | 62 +++++++------ app/router/v2/cache.py | 13 ++- app/router/v2/me.py | 15 ++-- app/router/v2/ranking.py | 68 ++++++++------- app/router/v2/relationship.py | 14 +-- app/router/v2/room.py | 87 +++++++++--------- app/router/v2/score.py | 126 +++++++++++++-------------- app/router/v2/session_verify.py | 27 +++--- app/router/v2/tags.py | 20 +++-- app/router/v2/user.py | 58 ++++++------ app/signalr/router.py | 3 +- 45 files changed, 623 insertions(+), 577 deletions(-) diff --git a/app/database/playlists.py b/app/database/playlists.py index e36441e..b589e96 100644 --- a/app/database/playlists.py +++ b/app/database/playlists.py @@ -3,7 +3,6 @@ from typing import TYPE_CHECKING from app.models.model import UTCBaseModel from app.models.mods import APIMod -from app.models.multiplayer_hub import PlaylistItem from .beatmap import Beatmap, BeatmapResp @@ -22,6 +21,8 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: + from app.models.multiplayer_hub import PlaylistItem + from .room import Room @@ -72,7 +73,7 @@ class Playlist(PlaylistBase, table=True): return result.one() @classmethod - async def from_hub(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession) -> "Playlist": + async def from_hub(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession) -> "Playlist": next_id = await cls.get_next_id_for_room(room_id, session=session) return cls( id=next_id, @@ -89,7 +90,7 @@ class Playlist(PlaylistBase, table=True): ) @classmethod - async def update(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession): + async def update(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession): db_playlist = await session.exec(select(cls).where(cls.id == playlist.id, cls.room_id == room_id)) db_playlist = db_playlist.first() if db_playlist is None: @@ -106,7 +107,7 @@ class Playlist(PlaylistBase, table=True): await session.commit() @classmethod - async def add_to_db(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession): + async def add_to_db(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession): db_playlist = await cls.from_hub(playlist, room_id, session) session.add(db_playlist) await session.commit() diff --git a/app/database/room.py b/app/database/room.py index 647b7ca..2729e37 100644 --- a/app/database/room.py +++ b/app/database/room.py @@ -1,9 +1,9 @@ from datetime import datetime +from typing import TYPE_CHECKING from app.database.item_attempts_count import PlaylistAggregateScore from app.database.room_participated_user import RoomParticipatedUser from app.models.model import UTCBaseModel -from app.models.multiplayer_hub import ServerMultiplayerRoom from app.models.room import ( MatchType, QueueMode, @@ -32,6 +32,9 @@ from sqlmodel import ( ) from sqlmodel.ext.asyncio.session import AsyncSession +if TYPE_CHECKING: + from app.models.multiplayer_hub import ServerMultiplayerRoom + class RoomBase(SQLModel, UTCBaseModel): name: str = Field(index=True) @@ -161,7 +164,7 @@ class RoomResp(RoomBase): return resp @classmethod - async def from_hub(cls, server_room: ServerMultiplayerRoom) -> "RoomResp": + async def from_hub(cls, server_room: "ServerMultiplayerRoom") -> "RoomResp": room = server_room.room resp = cls( id=room.room_id, diff --git a/app/dependencies/__init__.py b/app/dependencies/__init__.py index cdcce5a..8b13789 100644 --- a/app/dependencies/__init__.py +++ b/app/dependencies/__init__.py @@ -1,4 +1 @@ -from __future__ import annotations -from .database import get_db as get_db -from .user import get_current_user as get_current_user diff --git a/app/dependencies/api_version.py b/app/dependencies/api_version.py index 7cfa1c7..af8489a 100644 --- a/app/dependencies/api_version.py +++ b/app/dependencies/api_version.py @@ -5,7 +5,7 @@ from typing import Annotated from fastapi import Depends, Header -def get_api_version(version: int | None = Header(None, alias="x-api-version")) -> int: +def get_api_version(version: int | None = Header(None, alias="x-api-version", include_in_schema=False)) -> int: if version is None: return 0 if version < 1: diff --git a/app/dependencies/beatmap_download.py b/app/dependencies/beatmap_download.py index ffed3a0..818dc7e 100644 --- a/app/dependencies/beatmap_download.py +++ b/app/dependencies/beatmap_download.py @@ -1,8 +1,15 @@ from __future__ import annotations -from app.service.beatmap_download_service import download_service +from typing import Annotated + +from app.service.beatmap_download_service import BeatmapDownloadService, download_service + +from fastapi import Depends def get_beatmap_download_service(): """获取谱面下载服务实例""" return download_service + + +DownloadService = Annotated[BeatmapDownloadService, Depends(get_beatmap_download_service)] diff --git a/app/dependencies/beatmapset_cache.py b/app/dependencies/beatmapset_cache.py index f5ac96b..df177e2 100644 --- a/app/dependencies/beatmapset_cache.py +++ b/app/dependencies/beatmapset_cache.py @@ -1,16 +1,19 @@ -""" -Beatmapset缓存服务依赖注入 -""" - from __future__ import annotations -from app.dependencies.database import get_redis -from app.service.beatmapset_cache_service import BeatmapsetCacheService, get_beatmapset_cache_service +from typing import Annotated + +from app.dependencies.database import Redis +from app.service.beatmapset_cache_service import ( + BeatmapsetCacheService as OriginBeatmapsetCacheService, + get_beatmapset_cache_service, +) from fastapi import Depends -from redis.asyncio import Redis -def get_beatmapset_cache_dependency(redis: Redis = Depends(get_redis)) -> BeatmapsetCacheService: +def get_beatmapset_cache_dependency(redis: Redis) -> OriginBeatmapsetCacheService: """获取beatmapset缓存服务依赖""" return get_beatmapset_cache_service(redis) + + +BeatmapsetCacheService = Annotated[OriginBeatmapsetCacheService, Depends(get_beatmapset_cache_dependency)] diff --git a/app/dependencies/database.py b/app/dependencies/database.py index 2fc11fb..1e0a29a 100644 --- a/app/dependencies/database.py +++ b/app/dependencies/database.py @@ -91,6 +91,9 @@ def get_redis(): return redis_client +Redis = Annotated[redis.Redis, Depends(get_redis)] + + def get_redis_binary(): """获取二进制数据专用的 Redis 客户端 (不自动解码响应)""" return redis_binary_client diff --git a/app/dependencies/fetcher.py b/app/dependencies/fetcher.py index b4db26c..ccc3f06 100644 --- a/app/dependencies/fetcher.py +++ b/app/dependencies/fetcher.py @@ -1,17 +1,21 @@ from __future__ import annotations +from typing import Annotated + from app.config import settings from app.dependencies.database import get_redis -from app.fetcher import Fetcher +from app.fetcher import Fetcher as OriginFetcher from app.log import logger -fetcher: Fetcher | None = None +from fastapi import Depends + +fetcher: OriginFetcher | None = None -async def get_fetcher() -> Fetcher: +async def get_fetcher() -> OriginFetcher: global fetcher if fetcher is None: - fetcher = Fetcher( + fetcher = OriginFetcher( settings.fetcher_client_id, settings.fetcher_client_secret, settings.fetcher_scopes, @@ -27,3 +31,6 @@ async def get_fetcher() -> Fetcher: if not fetcher.access_token or not fetcher.refresh_token: logger.opt(colors=True).info(f"Login to initialize fetcher: {fetcher.authorize_url}") return fetcher + + +Fetcher = Annotated[OriginFetcher, Depends(get_fetcher)] diff --git a/app/dependencies/geoip.py b/app/dependencies/geoip.py index 9aafafd..089b90c 100644 --- a/app/dependencies/geoip.py +++ b/app/dependencies/geoip.py @@ -6,10 +6,13 @@ from __future__ import annotations from functools import lru_cache import ipaddress +from typing import Annotated from app.config import settings from app.helpers.geoip_helper import GeoIPHelper +from fastapi import Depends, Request + @lru_cache def get_geoip_helper() -> GeoIPHelper: @@ -26,7 +29,7 @@ def get_geoip_helper() -> GeoIPHelper: ) -def get_client_ip(request) -> str: +def get_client_ip(request: Request) -> str: """ 获取客户端真实 IP 地址 支持 IPv4 和 IPv6,考虑代理、负载均衡器等情况 @@ -66,6 +69,10 @@ def get_client_ip(request) -> str: return client_ip if is_valid_ip(client_ip) else "127.0.0.1" +IPAddress = Annotated[str, Depends(get_client_ip)] +GeoIPService = Annotated[GeoIPHelper, Depends(get_geoip_helper)] + + def is_valid_ip(ip_str: str) -> bool: """ 验证 IP 地址是否有效(支持 IPv4 和 IPv6) diff --git a/app/dependencies/storage.py b/app/dependencies/storage.py index 22906e0..413e5b0 100644 --- a/app/dependencies/storage.py +++ b/app/dependencies/storage.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import cast +from typing import Annotated, cast from app.config import ( AWSS3StorageSettings, @@ -9,11 +9,13 @@ from app.config import ( StorageServiceType, settings, ) -from app.storage import StorageService +from app.storage import StorageService as OriginStorageService from app.storage.cloudflare_r2 import AWSS3StorageService, CloudflareR2StorageService from app.storage.local import LocalStorageService -storage: StorageService | None = None +from fastapi import Depends + +storage: OriginStorageService | None = None def init_storage_service(): @@ -50,3 +52,6 @@ def get_storage_service(): if storage is None: return init_storage_service() return storage + + +StorageService = Annotated[OriginStorageService, Depends(get_storage_service)] diff --git a/app/dependencies/user.py b/app/dependencies/user.py index 449b550..ff3ff53 100644 --- a/app/dependencies/user.py +++ b/app/dependencies/user.py @@ -4,6 +4,7 @@ from typing import Annotated from app.auth import get_token_by_access_token from app.config import settings +from app.const import SUPPORT_TOTP_VERIFICATION_VER from app.database import User from app.database.auth import OAuthToken, V1APIKeys from app.models.oauth import OAuth2ClientCredentialsBearer @@ -11,7 +12,7 @@ from app.models.oauth import OAuth2ClientCredentialsBearer from .api_version import APIVersion from .database import Database, get_redis -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, Security from fastapi.security import ( APIKeyQuery, HTTPBearer, @@ -112,13 +113,13 @@ async def get_client_user( if await LoginSessionService.check_is_need_verification(db, user.id, token.id): # 获取当前验证方式 verify_method = None - if api_version >= 20250913: + if api_version >= SUPPORT_TOTP_VERIFICATION_VER: verify_method = await LoginSessionService.get_login_method(user.id, token.id, redis) if verify_method is None: # 智能选择验证方式(有TOTP优先TOTP) totp_key = await user.awaitable_attrs.totp_key - if totp_key is not None and api_version >= 20240101: + if totp_key is not None and api_version >= SUPPORT_TOTP_VERIFICATION_VER: verify_method = "totp" else: verify_method = "mail" @@ -169,3 +170,6 @@ async def get_current_user( user_and_token: UserAndToken = Depends(get_current_user_and_token), ) -> User: return user_and_token[0] + + +ClientUser = Annotated[User, Security(get_client_user, scopes=["*"])] diff --git a/app/router/auth.py b/app/router/auth.py index 544ce50..f2e7336 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -2,7 +2,7 @@ from __future__ import annotations from datetime import timedelta import re -from typing import Literal +from typing import Annotated, Literal from app.auth import ( authenticate_user, @@ -19,10 +19,9 @@ from app.const import BANCHOBOT_ID from app.database import DailyChallengeStats, OAuthClient, User from app.database.auth import TotpKeys from app.database.statistics import UserStatistics -from app.dependencies.database import Database, get_redis -from app.dependencies.geoip import get_client_ip, get_geoip_helper +from app.dependencies.database import Database, Redis +from app.dependencies.geoip import GeoIPService, IPAddress from app.dependencies.user_agent import UserAgentInfo -from app.helpers.geoip_helper import GeoIPHelper from app.log import logger from app.models.extended_auth import ExtendedTokenResponse from app.models.oauth import ( @@ -40,9 +39,8 @@ from app.service.verification_service import ( ) from app.utils import utcnow -from fastapi import APIRouter, Depends, Form, Header, Request +from fastapi import APIRouter, Form, Header, Request from fastapi.responses import JSONResponse -from redis.asyncio import Redis from sqlalchemy import text from sqlmodel import exists, select @@ -93,11 +91,11 @@ router = APIRouter(tags=["osu! OAuth 认证"]) ) async def register_user( db: Database, - request: Request, - user_username: str = Form(..., alias="user[username]", description="用户名"), - user_email: str = Form(..., alias="user[user_email]", description="电子邮箱"), - user_password: str = Form(..., alias="user[password]", description="密码"), - geoip: GeoIPHelper = Depends(get_geoip_helper), + user_username: Annotated[str, Form(..., alias="user[username]", description="用户名")], + user_email: Annotated[str, Form(..., alias="user[user_email]", description="电子邮箱")], + user_password: Annotated[str, Form(..., alias="user[password]", description="密码")], + geoip: GeoIPService, + client_ip: IPAddress, ): username_errors = validate_username(user_username) email_errors = validate_email(user_email) @@ -126,7 +124,6 @@ async def register_user( try: # 获取客户端 IP 并查询地理位置 - client_ip = get_client_ip(request) country_code = "CN" # 默认国家代码 try: @@ -201,19 +198,21 @@ async def oauth_token( db: Database, request: Request, user_agent: UserAgentInfo, - grant_type: Literal["authorization_code", "refresh_token", "password", "client_credentials"] = Form( - ..., description="授权类型:密码/刷新令牌/授权码/客户端凭证" - ), - client_id: int = Form(..., description="客户端 ID"), - client_secret: str = Form(..., description="客户端密钥"), - code: str | None = Form(None, description="授权码(仅授权码模式需要)"), - scope: str = Form("*", description="权限范围(空格分隔,默认为 '*')"), - username: str | None = Form(None, description="用户名(仅密码模式需要)"), - password: str | None = Form(None, description="密码(仅密码模式需要)"), - refresh_token: str | None = Form(None, description="刷新令牌(仅刷新令牌模式需要)"), - redis: Redis = Depends(get_redis), - geoip: GeoIPHelper = Depends(get_geoip_helper), - web_uuid: str | None = Header(None, include_in_schema=False, alias="X-UUID"), + ip_address: IPAddress, + grant_type: Annotated[ + Literal["authorization_code", "refresh_token", "password", "client_credentials"], + Form(..., description="授权类型:密码、刷新令牌和授权码三种授权方式。"), + ], + client_id: Annotated[int, Form(..., description="客户端 ID")], + client_secret: Annotated[str, Form(..., description="客户端密钥")], + redis: Redis, + geoip: GeoIPService, + code: Annotated[str | None, Form(description="授权码(仅授权码模式需要)")] = None, + scope: Annotated[str, Form(description="权限范围(空格分隔,默认为 '*')")] = "*", + username: Annotated[str | None, Form(description="用户名(仅密码模式需要)")] = None, + password: Annotated[str | None, Form(description="密码(仅密码模式需要)")] = None, + refresh_token: Annotated[str | None, Form(description="刷新令牌(仅刷新令牌模式需要)")] = None, + web_uuid: Annotated[str | None, Header(include_in_schema=False, alias="X-UUID")] = None, ): scopes = scope.split(" ") @@ -311,8 +310,6 @@ async def oauth_token( ) token_id = token.id - ip_address = get_client_ip(request) - # 获取国家代码 geo_info = geoip.lookup(ip_address) country_code = geo_info.get("country_iso", "XX") @@ -571,16 +568,14 @@ async def oauth_token( ) async def request_password_reset( request: Request, - email: str = Form(..., description="邮箱地址"), - redis: Redis = Depends(get_redis), + email: Annotated[str, Form(..., description="邮箱地址")], + redis: Redis, + ip_address: IPAddress, ): """ 请求密码重置 """ - from app.dependencies.geoip import get_client_ip - # 获取客户端信息 - ip_address = get_client_ip(request) user_agent = request.headers.get("User-Agent", "") # 请求密码重置 @@ -599,20 +594,16 @@ async def request_password_reset( @router.post("/password-reset/reset", name="重置密码", description="使用验证码重置密码") async def reset_password( - request: Request, - email: str = Form(..., description="邮箱地址"), - reset_code: str = Form(..., description="重置验证码"), - new_password: str = Form(..., description="新密码"), - redis: Redis = Depends(get_redis), + email: Annotated[str, Form(..., description="邮箱地址")], + reset_code: Annotated[str, Form(..., description="重置验证码")], + new_password: Annotated[str, Form(..., description="新密码")], + redis: Redis, + ip_address: IPAddress, ): """ 重置密码 """ - from app.dependencies.geoip import get_client_ip - # 获取客户端信息 - ip_address = get_client_ip(request) - # 重置密码 success, message = await password_reset_service.reset_password( email=email.lower().strip(), diff --git a/app/router/fetcher.py b/app/router/fetcher.py index f936ed6..887eabf 100644 --- a/app/router/fetcher.py +++ b/app/router/fetcher.py @@ -1,14 +1,13 @@ from __future__ import annotations -from app.dependencies.fetcher import get_fetcher -from app.fetcher import Fetcher +from app.dependencies.fetcher import Fetcher -from fastapi import APIRouter, Depends +from fastapi import APIRouter fetcher_router = APIRouter(prefix="/fetcher", include_in_schema=False) @fetcher_router.get("/callback") -async def callback(code: str, fetcher: Fetcher = Depends(get_fetcher)): +async def callback(code: str, fetcher: Fetcher): await fetcher.grant_access_token(code) return {"message": "Login successful"} diff --git a/app/router/file.py b/app/router/file.py index bd35a7e..14263f9 100644 --- a/app/router/file.py +++ b/app/router/file.py @@ -1,16 +1,16 @@ from __future__ import annotations -from app.dependencies.storage import get_storage_service -from app.storage import LocalStorageService, StorageService +from app.dependencies.storage import StorageService as StorageServiceDep +from app.storage import LocalStorageService -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, HTTPException from fastapi.responses import FileResponse file_router = APIRouter(prefix="/file", include_in_schema=False) @file_router.get("/{path:path}") -async def get_file(path: str, storage: StorageService = Depends(get_storage_service)): +async def get_file(path: str, storage: StorageServiceDep): if not isinstance(storage, LocalStorageService): raise HTTPException(404, "Not Found") if not await storage.is_exists(path): diff --git a/app/router/lio.py b/app/router/lio.py index 8d3d960..2cabebb 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -11,21 +11,18 @@ from app.database.playlists import Playlist as DBPlaylist from app.database.room import Room from app.database.room_participated_user import RoomParticipatedUser from app.database.user import User -from app.dependencies.database import Database, get_redis -from app.dependencies.fetcher import get_fetcher -from app.dependencies.storage import get_storage_service -from app.fetcher import Fetcher +from app.dependencies.database import Database, Redis +from app.dependencies.fetcher import Fetcher +from app.dependencies.storage import StorageService from app.log import logger from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus -from app.storage.base import StorageService from app.utils import utcnow from .notification.server import server -from fastapi import APIRouter, Depends, HTTPException, Request, status +from fastapi import APIRouter, HTTPException, Request, status from pydantic import BaseModel -from redis.asyncio import Redis from sqlalchemy import update from sqlmodel import col, select @@ -637,8 +634,8 @@ async def add_user_to_room( async def ensure_beatmap_present( beatmap_data: BeatmapEnsureRequest, db: Database, - redis: Redis = Depends(get_redis), - fetcher: Fetcher = Depends(get_fetcher), + redis: Redis, + fetcher: Fetcher, ) -> dict[str, Any]: """ 确保谱面在服务器中存在(包括元数据和原始文件缓存)。 @@ -677,7 +674,7 @@ class ReplayDataRequest(BaseModel): @router.post("/scores/replay") async def save_replay( req: ReplayDataRequest, - storage_service: StorageService = Depends(get_storage_service), + storage_service: StorageService, ): replay_data = req.mreplay replay_path = f"replays/{req.score_id}_{req.beatmap_id}_{req.user_id}_lazer_replay.osr" diff --git a/app/router/notification/channel.py b/app/router/notification/channel.py index bc251ca..62861ec 100644 --- a/app/router/notification/channel.py +++ b/app/router/notification/channel.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Literal, Self +from typing import Annotated, Any, Literal, Self from app.database.chat import ( ChannelType, @@ -11,7 +11,7 @@ from app.database.chat import ( UserSilenceResp, ) from app.database.user import User, UserResp -from app.dependencies.database import Database, get_redis +from app.dependencies.database import Database, Redis from app.dependencies.param import BodyOrForm from app.dependencies.user import get_current_user from app.router.v2 import api_v2_router as router @@ -20,7 +20,6 @@ from .server import server from fastapi import Depends, HTTPException, Path, Query, Security from pydantic import BaseModel, Field, model_validator -from redis.asyncio import Redis from sqlmodel import col, select @@ -38,11 +37,14 @@ class UpdateResponse(BaseModel): ) async def get_update( session: Database, - history_since: int | None = Query(None, description="获取自此禁言 ID 之后的禁言记录"), - since: int | None = Query(None, description="获取自此消息 ID 之后的禁言记录"), - includes: list[str] = Query(["presence", "silences"], alias="includes[]", description="要包含的更新类型"), - current_user: User = Security(get_current_user, scopes=["chat.read"]), - redis: Redis = Depends(get_redis), + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], + redis: Redis, + history_since: Annotated[int | None, Query(description="获取自此禁言 ID 之后的禁言记录")] = None, + since: Annotated[int | None, Query(description="获取自此消息 ID 之后的禁言记录")] = None, + includes: Annotated[ + list[str], + Query(alias="includes[]", description="要包含的更新类型"), + ] = ["presence", "silences"], ): resp = UpdateResponse() if "presence" in includes: @@ -86,9 +88,9 @@ async def get_update( ) async def join_channel( session: Database, - channel: str = Path(..., description="频道 ID/名称"), - user: str = Path(..., description="用户 ID"), - current_user: User = Security(get_current_user, scopes=["chat.write_manage"]), + channel: Annotated[str, Path(..., description="频道 ID/名称")], + user: Annotated[str, Path(..., description="用户 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.write_manage"])], ): # 使用明确的查询避免延迟加载 if channel.isdigit(): @@ -110,9 +112,9 @@ async def join_channel( ) async def leave_channel( session: Database, - channel: str = Path(..., description="频道 ID/名称"), - user: str = Path(..., description="用户 ID"), - current_user: User = Security(get_current_user, scopes=["chat.write_manage"]), + channel: Annotated[str, Path(..., description="频道 ID/名称")], + user: Annotated[str, Path(..., description="用户 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.write_manage"])], ): # 使用明确的查询避免延迟加载 if channel.isdigit(): @@ -135,8 +137,8 @@ async def leave_channel( ) async def get_channel_list( session: Database, - current_user: User = Security(get_current_user, scopes=["chat.read"]), - redis: Redis = Depends(get_redis), + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], + redis: Redis, ): channels = (await session.exec(select(ChatChannel).where(ChatChannel.type == ChannelType.PUBLIC))).all() results = [] @@ -171,9 +173,9 @@ class GetChannelResp(BaseModel): ) async def get_channel( session: Database, - channel: str = Path(..., description="频道 ID/名称"), - current_user: User = Security(get_current_user, scopes=["chat.read"]), - redis: Redis = Depends(get_redis), + channel: Annotated[str, Path(..., description="频道 ID/名称")], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], + redis: Redis, ): # 使用明确的查询避免延迟加载 if channel.isdigit(): @@ -245,9 +247,9 @@ class CreateChannelReq(BaseModel): ) async def create_channel( session: Database, - req: CreateChannelReq = Depends(BodyOrForm(CreateChannelReq)), - current_user: User = Security(get_current_user, scopes=["chat.write_manage"]), - redis: Redis = Depends(get_redis), + req: Annotated[CreateChannelReq, Depends(BodyOrForm(CreateChannelReq))], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.write_manage"])], + redis: Redis, ): if req.type == "PM": target = await session.get(User, req.target_id) diff --git a/app/router/notification/message.py b/app/router/notification/message.py index 6470c36..41ac452 100644 --- a/app/router/notification/message.py +++ b/app/router/notification/message.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Annotated + from app.database import ChatMessageResp from app.database.chat import ( ChannelType, @@ -11,7 +13,7 @@ from app.database.chat import ( UserSilenceResp, ) from app.database.user import User -from app.dependencies.database import Database, get_redis +from app.dependencies.database import Database, Redis from app.dependencies.param import BodyOrForm from app.dependencies.user import get_current_user from app.log import logger @@ -24,7 +26,6 @@ from .server import server from fastapi import Depends, HTTPException, Path, Query, Security from pydantic import BaseModel, Field -from redis.asyncio import Redis from sqlmodel import col, select @@ -41,9 +42,9 @@ class KeepAliveResp(BaseModel): ) async def keep_alive( session: Database, - history_since: int | None = Query(None, description="获取自此禁言 ID 之后的禁言记录"), - since: int | None = Query(None, description="获取自此消息 ID 之后的禁言记录"), - current_user: User = Security(get_current_user, scopes=["chat.read"]), + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], + history_since: Annotated[int | None, Query(description="获取自此禁言 ID 之后的禁言记录")] = None, + since: Annotated[int | None, Query(description="获取自此消息 ID 之后的禁言记录")] = None, ): resp = KeepAliveResp() if history_since: @@ -73,9 +74,9 @@ class MessageReq(BaseModel): ) async def send_message( session: Database, - channel: str = Path(..., description="频道 ID/名称"), - req: MessageReq = Depends(BodyOrForm(MessageReq)), - current_user: User = Security(get_current_user, scopes=["chat.write"]), + channel: Annotated[str, Path(..., description="频道 ID/名称")], + req: Annotated[MessageReq, Depends(BodyOrForm(MessageReq))], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.write"])], ): # 使用明确的查询来获取 channel,避免延迟加载 if channel.isdigit(): @@ -156,10 +157,10 @@ async def send_message( async def get_message( session: Database, channel: str, - limit: int = Query(50, ge=1, le=50, description="获取消息的数量"), - since: int = Query(0, ge=0, description="获取自此消息 ID 之后的消息(向前加载新消息)"), - until: int | None = Query(None, description="获取自此消息 ID 之前的消息(向后翻历史)"), - current_user: User = Security(get_current_user, scopes=["chat.read"]), + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], + limit: Annotated[int, Query(ge=1, le=50, description="获取消息的数量")] = 50, + since: Annotated[int, Query(ge=0, description="获取自此消息 ID 之后的消息(向前加载新消息)")] = 0, + until: Annotated[int | None, Query(description="获取自此消息 ID 之前的消息(向后翻历史)")] = None, ): # 1) 查频道 if channel.isdigit(): @@ -220,9 +221,9 @@ async def get_message( ) async def mark_as_read( session: Database, - channel: str = Path(..., description="频道 ID/名称"), - message: int = Path(..., description="消息 ID"), - current_user: User = Security(get_current_user, scopes=["chat.read"]), + channel: Annotated[str, Path(..., description="频道 ID/名称")], + message: Annotated[int, Path(..., description="消息 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.read"])], ): # 使用明确的查询获取 channel,避免延迟加载 if channel.isdigit(): @@ -259,9 +260,9 @@ class NewPMResp(BaseModel): ) async def create_new_pm( session: Database, - req: PMReq = Depends(BodyOrForm(PMReq)), - current_user: User = Security(get_current_user, scopes=["chat.write"]), - redis: Redis = Depends(get_redis), + req: Annotated[PMReq, Depends(BodyOrForm(PMReq))], + current_user: Annotated[User, Security(get_current_user, scopes=["chat.write"])], + redis: Redis, ): user_id = current_user.id target = await session.get(User, req.target_id) diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 778338c..29fc663 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -1,13 +1,14 @@ from __future__ import annotations import asyncio -from typing import overload +from typing import Annotated, overload from app.database.chat import ChannelType, ChatChannel, ChatChannelResp, ChatMessageResp from app.database.notification import UserNotification, insert_notification from app.database.user import User from app.dependencies.database import ( DBFactory, + Redis, get_db_factory, get_redis, with_db, @@ -22,7 +23,6 @@ from app.utils import bg_tasks from fastapi import APIRouter, Depends, Header, Query, WebSocket, WebSocketDisconnect from fastapi.security import SecurityScopes from fastapi.websockets import WebSocketState -from redis.asyncio import Redis from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession @@ -298,10 +298,10 @@ async def _listen_stop(ws: WebSocket, user_id: int, factory: DBFactory): @chat_router.websocket("/notification-server") async def chat_websocket( websocket: WebSocket, - token: str | None = Query(None, description="认证令牌,支持通过URL参数传递"), - access_token: str | None = Query(None, description="访问令牌,支持通过URL参数传递"), - authorization: str | None = Header(None, description="Bearer认证头"), - factory: DBFactory = Depends(get_db_factory), + factory: Annotated[DBFactory, Depends(get_db_factory)], + token: Annotated[str | None, Query(description="认证令牌,支持通过URL参数传递")] = None, + access_token: Annotated[str | None, Query(description="访问令牌,支持通过URL参数传递")] = None, + authorization: Annotated[str | None, Header(description="Bearer认证头")] = None, ): if not server._subscribed: server._subscribed = True diff --git a/app/router/private/admin.py b/app/router/private/admin.py index e29a264..57dcf98 100644 --- a/app/router/private/admin.py +++ b/app/router/private/admin.py @@ -1,15 +1,16 @@ from __future__ import annotations +from typing import Annotated + from app.database.auth import OAuthToken from app.database.verification import LoginSession, LoginSessionResp, TrustedDevice, TrustedDeviceResp from app.dependencies.database import Database -from app.dependencies.geoip import get_geoip_helper +from app.dependencies.geoip import GeoIPService from app.dependencies.user import UserAndToken, get_client_user_and_token -from app.helpers.geoip_helper import GeoIPHelper from .router import router -from fastapi import Depends, HTTPException, Security +from fastapi import HTTPException, Security from pydantic import BaseModel from sqlmodel import col, select @@ -28,8 +29,8 @@ class SessionsResp(BaseModel): ) async def get_sessions( session: Database, - user_and_token: UserAndToken = Security(get_client_user_and_token), - geoip: GeoIPHelper = Depends(get_geoip_helper), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], + geoip: GeoIPService, ): current_user, token = user_and_token sessions = ( @@ -57,7 +58,7 @@ async def get_sessions( async def delete_session( session: Database, session_id: int, - user_and_token: UserAndToken = Security(get_client_user_and_token), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], ): current_user, token = user_and_token if session_id == token.id: @@ -91,8 +92,8 @@ class TrustedDevicesResp(BaseModel): ) async def get_trusted_devices( session: Database, - user_and_token: UserAndToken = Security(get_client_user_and_token), - geoip: GeoIPHelper = Depends(get_geoip_helper), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], + geoip: GeoIPService, ): current_user, token = user_and_token devices = ( @@ -131,7 +132,7 @@ async def get_trusted_devices( async def delete_trusted_device( session: Database, device_id: int, - user_and_token: UserAndToken = Security(get_client_user_and_token), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], ): current_user, token = user_and_token device = await session.get(TrustedDevice, device_id) diff --git a/app/router/private/avatar.py b/app/router/private/avatar.py index e37596f..0af8694 100644 --- a/app/router/private/avatar.py +++ b/app/router/private/avatar.py @@ -1,25 +1,24 @@ from __future__ import annotations import hashlib +from typing import Annotated -from app.database.user import User from app.dependencies.database import Database -from app.dependencies.storage import get_storage_service -from app.dependencies.user import get_client_user -from app.storage.base import StorageService +from app.dependencies.storage import StorageService +from app.dependencies.user import ClientUser from app.utils import check_image from .router import router -from fastapi import Depends, File, Security +from fastapi import File @router.post("/avatar/upload", name="上传头像", tags=["用户", "g0v0 API"]) async def upload_avatar( session: Database, - content: bytes = File(...), - current_user: User = Security(get_client_user), - storage: StorageService = Depends(get_storage_service), + content: Annotated[bytes, File(...)], + current_user: ClientUser, + storage: StorageService, ): """上传用户头像 diff --git a/app/router/private/beatmapset.py b/app/router/private/beatmapset.py index ba3c4ee..5b80841 100644 --- a/app/router/private/beatmapset.py +++ b/app/router/private/beatmapset.py @@ -1,17 +1,18 @@ from __future__ import annotations +from typing import Annotated + from app.database.beatmap import Beatmap from app.database.beatmapset import Beatmapset from app.database.beatmapset_ratings import BeatmapRating from app.database.score import Score -from app.database.user import User from app.dependencies.database import Database -from app.dependencies.user import get_client_user +from app.dependencies.user import ClientUser from app.service.beatmapset_update_service import get_beatmapset_update_service from .router import router -from fastapi import Body, Depends, HTTPException, Security +from fastapi import Body, Depends, HTTPException from fastapi_limiter.depends import RateLimiter from sqlmodel import col, exists, select @@ -25,7 +26,7 @@ from sqlmodel import col, exists, select async def can_rate_beatmapset( beatmapset_id: int, session: Database, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): """检查用户是否可以评价谱面集 @@ -57,8 +58,8 @@ async def can_rate_beatmapset( async def rate_beatmaps( beatmapset_id: int, session: Database, - rating: int = Body(..., ge=0, le=10), - current_user: User = Security(get_client_user), + rating: Annotated[int, Body(..., ge=0, le=10)], + current_user: ClientUser, ): """为谱面集评分 @@ -96,7 +97,7 @@ async def rate_beatmaps( async def sync_beatmapset( beatmapset_id: int, session: Database, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): """请求同步谱面集 diff --git a/app/router/private/cover.py b/app/router/private/cover.py index 04f8d1b..71992e0 100644 --- a/app/router/private/cover.py +++ b/app/router/private/cover.py @@ -1,25 +1,25 @@ from __future__ import annotations import hashlib +from typing import Annotated -from app.database.user import User, UserProfileCover +from app.database.user import UserProfileCover from app.dependencies.database import Database -from app.dependencies.storage import get_storage_service -from app.dependencies.user import get_client_user -from app.storage.base import StorageService +from app.dependencies.storage import StorageService +from app.dependencies.user import ClientUser from app.utils import check_image from .router import router -from fastapi import Depends, File, Security +from fastapi import File @router.post("/cover/upload", name="上传头图", tags=["用户", "g0v0 API"]) async def upload_cover( session: Database, - content: bytes = File(...), - current_user: User = Security(get_client_user), - storage: StorageService = Depends(get_storage_service), + content: Annotated[bytes, File(...)], + current_user: ClientUser, + storage: StorageService, ): """上传用户头图 diff --git a/app/router/private/oauth.py b/app/router/private/oauth.py index f4f5d78..2af00dc 100644 --- a/app/router/private/oauth.py +++ b/app/router/private/oauth.py @@ -1,16 +1,15 @@ from __future__ import annotations import secrets +from typing import Annotated from app.database.auth import OAuthClient, OAuthToken -from app.database.user import User -from app.dependencies.database import Database, get_redis -from app.dependencies.user import get_client_user +from app.dependencies.database import Database, Redis +from app.dependencies.user import ClientUser from .router import router -from fastapi import Body, Depends, HTTPException, Security -from redis.asyncio import Redis +from fastapi import Body, HTTPException from sqlmodel import select, text @@ -22,10 +21,10 @@ from sqlmodel import select, text ) async def create_oauth_app( session: Database, - name: str = Body(..., max_length=100, description="应用程序名称"), - description: str = Body("", description="应用程序描述"), - redirect_uris: list[str] = Body(..., description="允许的重定向 URI 列表"), - current_user: User = Security(get_client_user), + name: Annotated[str, Body(..., max_length=100, description="应用程序名称")], + redirect_uris: Annotated[list[str], Body(..., description="允许的重定向 URI 列表")], + current_user: ClientUser, + description: Annotated[str, Body(description="应用程序描述")] = "", ): result = await session.execute( text( @@ -64,7 +63,7 @@ async def create_oauth_app( async def get_oauth_app( session: Database, client_id: int, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): oauth_app = await session.get(OAuthClient, client_id) if not oauth_app: @@ -85,7 +84,7 @@ async def get_oauth_app( ) async def get_user_oauth_apps( session: Database, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): oauth_apps = await session.exec(select(OAuthClient).where(OAuthClient.owner_id == current_user.id)) return [ @@ -109,7 +108,7 @@ async def get_user_oauth_apps( async def delete_oauth_app( session: Database, client_id: int, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): oauth_client = await session.get(OAuthClient, client_id) if not oauth_client: @@ -134,10 +133,10 @@ async def delete_oauth_app( async def update_oauth_app( session: Database, client_id: int, - name: str = Body(..., max_length=100, description="应用程序新名称"), - description: str = Body("", description="应用程序新描述"), - redirect_uris: list[str] = Body(..., description="新的重定向 URI 列表"), - current_user: User = Security(get_client_user), + name: Annotated[str, Body(..., max_length=100, description="应用程序新名称")], + redirect_uris: Annotated[list[str], Body(..., description="新的重定向 URI 列表")], + current_user: ClientUser, + description: Annotated[str, Body(description="应用程序新描述")] = "", ): oauth_client = await session.get(OAuthClient, client_id) if not oauth_client: @@ -168,7 +167,7 @@ async def update_oauth_app( async def refresh_secret( session: Database, client_id: int, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): oauth_client = await session.get(OAuthClient, client_id) if not oauth_client: @@ -200,10 +199,10 @@ async def refresh_secret( async def generate_oauth_code( session: Database, client_id: int, - current_user: User = Security(get_client_user), - redirect_uri: str = Body(..., description="授权后重定向的 URI"), - scopes: list[str] = Body(..., description="请求的权限范围列表"), - redis: Redis = Depends(get_redis), + current_user: ClientUser, + redirect_uri: Annotated[str, Body(..., description="授权后重定向的 URI")], + scopes: Annotated[list[str], Body(..., description="请求的权限范围列表")], + redis: Redis, ): client = await session.get(OAuthClient, client_id) if not client: diff --git a/app/router/private/relationship.py b/app/router/private/relationship.py index 4698350..1f882cb 100644 --- a/app/router/private/relationship.py +++ b/app/router/private/relationship.py @@ -1,13 +1,15 @@ from __future__ import annotations -from app.database import Relationship, User +from typing import Annotated + +from app.database import Relationship from app.database.relationship import RelationshipType from app.dependencies.database import Database -from app.dependencies.user import get_client_user +from app.dependencies.user import ClientUser from .router import router -from fastapi import HTTPException, Path, Security +from fastapi import HTTPException, Path from pydantic import BaseModel, Field from sqlmodel import select @@ -27,8 +29,8 @@ class CheckResponse(BaseModel): ) async def check_user_relationship( db: Database, - user_id: int = Path(..., description="目标用户的 ID"), - current_user: User = Security(get_client_user), + user_id: Annotated[int, Path(..., description="目标用户的 ID")], + current_user: ClientUser, ): if user_id == current_user.id: raise HTTPException(422, "Cannot check relationship with yourself") diff --git a/app/router/private/score.py b/app/router/private/score.py index 75225bd..e640121 100644 --- a/app/router/private/score.py +++ b/app/router/private/score.py @@ -1,17 +1,14 @@ from __future__ import annotations from app.database.score import Score -from app.database.user import User -from app.dependencies.database import Database, get_redis -from app.dependencies.storage import get_storage_service -from app.dependencies.user import get_client_user +from app.dependencies.database import Database, Redis +from app.dependencies.storage import StorageService +from app.dependencies.user import ClientUser from app.service.user_cache_service import refresh_user_cache_background -from app.storage.base import StorageService from .router import router -from fastapi import BackgroundTasks, Depends, HTTPException, Security -from redis.asyncio import Redis +from fastapi import BackgroundTasks, HTTPException @router.delete( @@ -24,9 +21,9 @@ async def delete_score( session: Database, background_task: BackgroundTasks, score_id: int, - redis: Redis = Depends(get_redis), - current_user: User = Security(get_client_user), - storage_service: StorageService = Depends(get_storage_service), + redis: Redis, + current_user: ClientUser, + storage_service: StorageService, ): """删除成绩 diff --git a/app/router/private/team.py b/app/router/private/team.py index 681cd13..b60461c 100644 --- a/app/router/private/team.py +++ b/app/router/private/team.py @@ -1,12 +1,13 @@ from __future__ import annotations import hashlib +from typing import Annotated from app.database.team import Team, TeamMember, TeamRequest from app.database.user import BASE_INCLUDES, User, UserResp -from app.dependencies.database import Database, get_redis -from app.dependencies.storage import get_storage_service -from app.dependencies.user import get_client_user +from app.dependencies.database import Database, Redis +from app.dependencies.storage import StorageService +from app.dependencies.user import ClientUser from app.models.notification import ( TeamApplicationAccept, TeamApplicationReject, @@ -14,27 +15,25 @@ from app.models.notification import ( ) from app.router.notification import server from app.service.ranking_cache_service import get_ranking_cache_service -from app.storage.base import StorageService from app.utils import check_image, utcnow from .router import router -from fastapi import Depends, File, Form, HTTPException, Path, Request, Security +from fastapi import File, Form, HTTPException, Path, Request from pydantic import BaseModel -from redis.asyncio import Redis from sqlmodel import exists, select @router.post("/team", name="创建战队", response_model=Team, tags=["战队", "g0v0 API"]) async def create_team( session: Database, - storage: StorageService = Depends(get_storage_service), - current_user: User = Security(get_client_user), - flag: bytes = File(..., description="战队图标文件"), - cover: bytes = File(..., description="战队头图文件"), - name: str = Form(max_length=100, description="战队名称"), - short_name: str = Form(max_length=10, description="战队缩写"), - redis: Redis = Depends(get_redis), + storage: StorageService, + current_user: ClientUser, + flag: Annotated[bytes, File(..., description="战队图标文件")], + cover: Annotated[bytes, File(..., description="战队头图文件")], + name: Annotated[str, Form(max_length=100, description="战队名称")], + short_name: Annotated[str, Form(max_length=10, description="战队缩写")], + redis: Redis, ): """创建战队。 @@ -88,13 +87,13 @@ async def create_team( async def update_team( team_id: int, session: Database, - storage: StorageService = Depends(get_storage_service), - current_user: User = Security(get_client_user), - flag: bytes | None = File(default=None, description="战队图标文件"), - cover: bytes | None = File(default=None, description="战队头图文件"), - name: str | None = Form(default=None, max_length=100, description="战队名称"), - short_name: str | None = Form(default=None, max_length=10, description="战队缩写"), - leader_id: int | None = Form(default=None, description="战队队长 ID"), + storage: StorageService, + current_user: ClientUser, + flag: Annotated[bytes | None, File(description="战队图标文件")] = None, + cover: Annotated[bytes | None, File(description="战队头图文件")] = None, + name: Annotated[str | None, Form(max_length=100, description="战队名称")] = None, + short_name: Annotated[str | None, Form(max_length=10, description="战队缩写")] = None, + leader_id: Annotated[int | None, Form(description="战队队长 ID")] = None, ): """修改战队。 @@ -161,9 +160,9 @@ async def update_team( @router.delete("/team/{team_id}", name="删除战队", status_code=204, tags=["战队", "g0v0 API"]) async def delete_team( session: Database, - team_id: int = Path(..., description="战队 ID"), - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + team_id: Annotated[int, Path(..., description="战队 ID")], + current_user: ClientUser, + redis: Redis, ): team = await session.get(Team, team_id) if not team: @@ -191,7 +190,7 @@ class TeamQueryResp(BaseModel): @router.get("/team/{team_id}", name="查询战队", response_model=TeamQueryResp, tags=["战队", "g0v0 API"]) async def get_team( session: Database, - team_id: int = Path(..., description="战队 ID"), + team_id: Annotated[int, Path(..., description="战队 ID")], ): members = (await session.exec(select(TeamMember).where(TeamMember.team_id == team_id))).all() return TeamQueryResp( @@ -203,8 +202,8 @@ async def get_team( @router.post("/team/{team_id}/request", name="请求加入战队", status_code=204, tags=["战队", "g0v0 API"]) async def request_join_team( session: Database, - team_id: int = Path(..., description="战队 ID"), - current_user: User = Security(get_client_user), + team_id: Annotated[int, Path(..., description="战队 ID")], + current_user: ClientUser, ): team = await session.get(Team, team_id) if not team: @@ -231,10 +230,10 @@ async def request_join_team( async def handle_request( req: Request, session: Database, - team_id: int = Path(..., description="战队 ID"), - user_id: int = Path(..., description="用户 ID"), - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + team_id: Annotated[int, Path(..., description="战队 ID")], + user_id: Annotated[int, Path(..., description="用户 ID")], + current_user: ClientUser, + redis: Redis, ): team = await session.get(Team, team_id) if not team: @@ -272,10 +271,10 @@ async def handle_request( @router.delete("/team/{team_id}/{user_id}", name="踢出成员 / 退出战队", status_code=204, tags=["战队", "g0v0 API"]) async def kick_member( session: Database, - team_id: int = Path(..., description="战队 ID"), - user_id: int = Path(..., description="用户 ID"), - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + team_id: Annotated[int, Path(..., description="战队 ID")], + user_id: Annotated[int, Path(..., description="用户 ID")], + current_user: ClientUser, + redis: Redis, ): team = await session.get(Team, team_id) if not team: diff --git a/app/router/private/totp.py b/app/router/private/totp.py index 2435567..06406aa 100644 --- a/app/router/private/totp.py +++ b/app/router/private/totp.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Annotated + from app.auth import ( check_totp_backup_code, finish_create_totp_key, @@ -9,17 +11,15 @@ from app.auth import ( ) from app.const import BACKUP_CODE_LENGTH from app.database.auth import TotpKeys -from app.database.user import User -from app.dependencies.database import Database, get_redis -from app.dependencies.user import get_client_user +from app.dependencies.database import Database, Redis +from app.dependencies.user import ClientUser from app.models.totp import FinishStatus, StartCreateTotpKeyResp from .router import router -from fastapi import Body, Depends, HTTPException, Security +from fastapi import Body, HTTPException from pydantic import BaseModel import pyotp -from redis.asyncio import Redis class TotpStatusResp(BaseModel): @@ -37,7 +37,7 @@ class TotpStatusResp(BaseModel): response_model=TotpStatusResp, ) async def get_totp_status( - current_user: User = Security(get_client_user), + current_user: ClientUser, ): """检查用户是否已创建TOTP""" totp_key = await current_user.awaitable_attrs.totp_key @@ -62,8 +62,8 @@ async def get_totp_status( status_code=201, ) async def start_create_totp( - redis: Redis = Depends(get_redis), - current_user: User = Security(get_client_user), + redis: Redis, + current_user: ClientUser, ): if await current_user.awaitable_attrs.totp_key: raise HTTPException(status_code=400, detail="TOTP is already enabled for this user") @@ -98,9 +98,9 @@ async def start_create_totp( ) async def finish_create_totp( session: Database, - code: str = Body(..., embed=True, description="用户提供的 TOTP 代码"), - redis: Redis = Depends(get_redis), - current_user: User = Security(get_client_user), + code: Annotated[str, Body(..., embed=True, description="用户提供的 TOTP 代码")], + redis: Redis, + current_user: ClientUser, ): status, backup_codes = await finish_create_totp_key(current_user, code, redis, session) if status == FinishStatus.SUCCESS: @@ -122,9 +122,9 @@ async def finish_create_totp( ) async def disable_totp( session: Database, - code: str = Body(..., embed=True, description="用户提供的 TOTP 代码或备份码"), - redis: Redis = Depends(get_redis), - current_user: User = Security(get_client_user), + code: Annotated[str, Body(..., embed=True, description="用户提供的 TOTP 代码或备份码")], + redis: Redis, + current_user: ClientUser, ): totp = await session.get(TotpKeys, current_user.id) if not totp: diff --git a/app/router/private/username.py b/app/router/private/username.py index 571cd40..18eb219 100644 --- a/app/router/private/username.py +++ b/app/router/private/username.py @@ -1,24 +1,26 @@ from __future__ import annotations +from typing import Annotated + from app.auth import validate_username from app.config import settings from app.database.events import Event, EventType from app.database.user import User from app.dependencies.database import Database -from app.dependencies.user import get_client_user +from app.dependencies.user import ClientUser from app.utils import utcnow from .router import router -from fastapi import Body, HTTPException, Security +from fastapi import Body, HTTPException from sqlmodel import exists, select @router.post("/rename", name="修改用户名", tags=["用户", "g0v0 API"]) async def user_rename( session: Database, - new_name: str = Body(..., description="新的用户名"), - current_user: User = Security(get_client_user), + new_name: Annotated[str, Body(..., description="新的用户名")], + current_user: ClientUser, ): """修改用户名 diff --git a/app/router/v1/beatmap.py b/app/router/v1/beatmap.py index 3301fd2..b723713 100644 --- a/app/router/v1/beatmap.py +++ b/app/router/v1/beatmap.py @@ -1,24 +1,22 @@ from __future__ import annotations from datetime import datetime -from typing import Literal +from typing import Annotated, Literal from app.database.beatmap import Beatmap, calculate_beatmap_attributes from app.database.beatmap_playcounts import BeatmapPlaycounts from app.database.beatmapset import Beatmapset from app.database.favourite_beatmapset import FavouriteBeatmapset from app.database.score import Score -from app.dependencies.database import Database, get_redis -from app.dependencies.fetcher import get_fetcher -from app.fetcher import Fetcher +from app.dependencies.database import Database, Redis +from app.dependencies.fetcher import Fetcher from app.models.beatmap import BeatmapRankStatus, Genre, Language from app.models.mods import int_to_mods from app.models.score import GameMode from .router import AllStrModel, router -from fastapi import Depends, Query -from redis.asyncio import Redis +from fastapi import Query from sqlmodel import col, func, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -148,18 +146,18 @@ class V1Beatmap(AllStrModel): ) async def get_beatmaps( session: Database, - since: datetime | None = Query(None, description="自指定时间后拥有排行榜的谱面"), - beatmapset_id: int | None = Query(None, alias="s", description="谱面集 ID"), - beatmap_id: int | None = Query(None, alias="b", description="谱面 ID"), - user: str | None = Query(None, alias="u", description="谱师"), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - ruleset_id: int | None = Query(None, alias="m", description="Ruleset ID", ge=0, le=3), # TODO - convert: bool = Query(False, alias="a", description="转谱"), # TODO - checksum: str | None = Query(None, alias="h", description="谱面文件 MD5"), - limit: int = Query(500, ge=1, le=500, description="返回结果数量限制"), - mods: int = Query(0, description="应用到谱面属性的 MOD"), - redis: Redis = Depends(get_redis), - fetcher: Fetcher = Depends(get_fetcher), + redis: Redis, + fetcher: Fetcher, + since: Annotated[datetime | None, Query(description="自指定时间后拥有排行榜的谱面")] = None, + beatmapset_id: Annotated[int | None, Query(alias="s", description="谱面集 ID")] = None, + beatmap_id: Annotated[int | None, Query(alias="b", description="谱面 ID")] = None, + user: Annotated[str | None, Query(alias="u", description="谱师")] = None, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + ruleset_id: Annotated[int | None, Query(alias="m", description="Ruleset ID", ge=0, le=3)] = None, # TODO + convert: Annotated[bool, Query(alias="a", description="转谱")] = False, # TODO + checksum: Annotated[str | None, Query(alias="h", description="谱面文件 MD5")] = None, + limit: Annotated[int, Query(ge=1, le=500, description="返回结果数量限制")] = 500, + mods: Annotated[int, Query(description="应用到谱面属性的 MOD")] = 0, ): beatmaps: list[Beatmap] = [] results = [] diff --git a/app/router/v1/public_user.py b/app/router/v1/public_user.py index 1f5df71..dadbfb9 100644 --- a/app/router/v1/public_user.py +++ b/app/router/v1/public_user.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Literal +from typing import Annotated, Literal from app.database.statistics import UserStatistics from app.database.user import User @@ -181,9 +181,9 @@ async def _count_online_users_optimized(redis): ) async def api_get_player_info( session: Database, - scope: Literal["stats", "events", "info", "all"] = Query(..., description="信息范围"), - id: int | None = Query(None, ge=3, le=2147483647, description="用户 ID"), - name: str | None = Query(None, regex=r"^[\w \[\]-]{2,32}$", description="用户名"), + scope: Annotated[Literal["stats", "events", "info", "all"], Query(..., description="信息范围")], + id: Annotated[int | None, Query(ge=3, le=2147483647, description="用户 ID")] = None, + name: Annotated[str | None, Query(regex=r"^[\w \[\]-]{2,32}$", description="用户名")] = None, ): """ 获取指定玩家的信息 diff --git a/app/router/v1/replay.py b/app/router/v1/replay.py index e37057e..b0ffc99 100644 --- a/app/router/v1/replay.py +++ b/app/router/v1/replay.py @@ -2,15 +2,14 @@ from __future__ import annotations import base64 from datetime import date -from typing import Literal +from typing import Annotated, Literal from app.database.counts import ReplayWatchedCount from app.database.score import Score from app.dependencies.database import Database -from app.dependencies.storage import get_storage_service +from app.dependencies.storage import StorageService from app.models.mods import int_to_mods from app.models.score import GameMode -from app.storage import StorageService from .router import router @@ -34,18 +33,20 @@ class ReplayModel(BaseModel): ) async def download_replay( session: Database, - beatmap: int = Query(..., alias="b", description="谱面 ID"), - user: str = Query(..., alias="u", description="用户"), - ruleset_id: int | None = Query( - None, - alias="m", - description="Ruleset ID", - ge=0, - ), - score_id: int | None = Query(None, alias="s", description="成绩 ID"), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - mods: int = Query(0, description="成绩的 MOD"), - storage_service: StorageService = Depends(get_storage_service), + beatmap: Annotated[int, Query(..., alias="b", description="谱面 ID")], + user: Annotated[str, Query(..., alias="u", description="用户")], + storage_service: StorageService, + ruleset_id: Annotated[ + int | None, + Query( + alias="m", + description="Ruleset ID", + ge=0, + ), + ] = None, + score_id: Annotated[int | None, Query(alias="s", description="成绩 ID")] = None, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + mods: Annotated[int, Query(description="成绩的 MOD")] = 0, ): mods_ = int_to_mods(mods) if score_id is not None: diff --git a/app/router/v1/score.py b/app/router/v1/score.py index ccbcf4f..4ac9b42 100644 --- a/app/router/v1/score.py +++ b/app/router/v1/score.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Literal +from typing import Annotated, Literal from app.database.best_scores import PPBestScore from app.database.score import Score, get_leaderboard @@ -69,10 +69,10 @@ class V1Score(AllStrModel): ) async def get_user_best( session: Database, - user: str = Query(..., alias="u", description="用户"), - ruleset_id: int = Query(0, alias="m", description="Ruleset ID", ge=0), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - limit: int = Query(10, ge=1, le=100, description="返回的成绩数量"), + user: Annotated[str, Query(..., alias="u", description="用户")], + ruleset_id: Annotated[int, Query(alias="m", description="Ruleset ID", ge=0)] = 0, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + limit: Annotated[int, Query(ge=1, le=100, description="返回的成绩数量")] = 10, ): try: scores = ( @@ -101,10 +101,10 @@ async def get_user_best( ) async def get_user_recent( session: Database, - user: str = Query(..., alias="u", description="用户"), - ruleset_id: int = Query(0, alias="m", description="Ruleset ID", ge=0), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - limit: int = Query(10, ge=1, le=100, description="返回的成绩数量"), + user: Annotated[str, Query(..., alias="u", description="用户")], + ruleset_id: Annotated[int, Query(alias="m", description="Ruleset ID", ge=0)] = 0, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + limit: Annotated[int, Query(ge=1, le=100, description="返回的成绩数量")] = 10, ): try: scores = ( @@ -133,12 +133,12 @@ async def get_user_recent( ) async def get_scores( session: Database, - user: str | None = Query(None, alias="u", description="用户"), - beatmap_id: int = Query(alias="b", description="谱面 ID"), - ruleset_id: int = Query(0, alias="m", description="Ruleset ID", ge=0), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - limit: int = Query(10, ge=1, le=100, description="返回的成绩数量"), - mods: int = Query(0, description="成绩的 MOD"), + beatmap_id: Annotated[int, Query(alias="b", description="谱面 ID")], + user: Annotated[str | None, Query(alias="u", description="用户")] = None, + ruleset_id: Annotated[int, Query(alias="m", description="Ruleset ID", ge=0)] = 0, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + limit: Annotated[int, Query(ge=1, le=100, description="返回的成绩数量")] = 10, + mods: Annotated[int, Query(description="成绩的 MOD")] = 0, ): try: if user is not None: diff --git a/app/router/v1/user.py b/app/router/v1/user.py index 77e0369..52ee19a 100644 --- a/app/router/v1/user.py +++ b/app/router/v1/user.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime -from typing import Literal +from typing import Annotated, Literal from app.database.statistics import UserStatistics, UserStatisticsResp from app.database.user import User @@ -104,10 +104,10 @@ class V1User(AllStrModel): async def get_user( session: Database, background_tasks: BackgroundTasks, - user: str = Query(..., alias="u", description="用户"), - ruleset_id: int | None = Query(None, alias="m", description="Ruleset ID", ge=0), - type: Literal["string", "id"] | None = Query(None, description="用户类型:string 用户名称 / id 用户 ID"), - event_days: int = Query(default=1, ge=1, le=31, description="从现在起所有事件的最大天数"), + user: Annotated[str, Query(..., alias="u", description="用户")], + ruleset_id: Annotated[int | None, Query(alias="m", description="Ruleset ID", ge=0)] = None, + type: Annotated[Literal["string", "id"] | None, Query(description="用户类型:string 用户名称 / id 用户 ID")] = None, + event_days: Annotated[int, Query(ge=1, le=31, description="从现在起所有事件的最大天数")] = 1, ): redis = get_redis() cache_service = get_user_cache_service(redis) diff --git a/app/router/v2/beatmap.py b/app/router/v2/beatmap.py index 506f9f3..024a542 100644 --- a/app/router/v2/beatmap.py +++ b/app/router/v2/beatmap.py @@ -3,13 +3,13 @@ from __future__ import annotations import asyncio import hashlib import json +from typing import Annotated from app.database import Beatmap, BeatmapResp, User from app.database.beatmap import calculate_beatmap_attributes -from app.dependencies.database import Database, get_redis -from app.dependencies.fetcher import get_fetcher +from app.dependencies.database import Database, Redis +from app.dependencies.fetcher import Fetcher from app.dependencies.user import get_current_user -from app.fetcher import Fetcher from app.models.beatmap import BeatmapAttributes from app.models.mods import APIMod, int_to_mods from app.models.score import ( @@ -18,10 +18,9 @@ from app.models.score import ( from .router import router -from fastapi import Depends, HTTPException, Path, Query, Security +from fastapi import HTTPException, Path, Query, Security from httpx import HTTPError, HTTPStatusError from pydantic import BaseModel -from redis.asyncio import Redis import rosu_pp_py as rosu from sqlmodel import col, select @@ -44,11 +43,11 @@ class BatchGetResp(BaseModel): ) async def lookup_beatmap( db: Database, - id: int | None = Query(default=None, alias="id", description="谱面 ID"), - md5: str | None = Query(default=None, alias="checksum", description="谱面文件 MD5"), - filename: str | None = Query(default=None, alias="filename", description="谱面文件名"), - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, + id: Annotated[int | None, Query(alias="id", description="谱面 ID")] = None, + md5: Annotated[str | None, Query(alias="checksum", description="谱面文件 MD5")] = None, + filename: Annotated[str | None, Query(alias="filename", description="谱面文件名")] = None, ): if id is None and md5 is None and filename is None: raise HTTPException( @@ -75,9 +74,9 @@ async def lookup_beatmap( ) async def get_beatmap( db: Database, - beatmap_id: int = Path(..., description="谱面 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), + beatmap_id: Annotated[int, Path(..., description="谱面 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, ): try: beatmap = await Beatmap.get_or_fetch(db, fetcher, beatmap_id) @@ -95,9 +94,12 @@ async def get_beatmap( ) async def batch_get_beatmaps( db: Database, - beatmap_ids: list[int] = Query(alias="ids[]", default_factory=list, description="谱面 ID 列表 (最多 50 个)"), - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), + beatmap_ids: Annotated[ + list[int], + Query(alias="ids[]", default_factory=list, description="谱面 ID 列表 (最多 50 个)"), + ], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, ): if not beatmap_ids: beatmaps = (await db.exec(select(Beatmap).order_by(col(Beatmap.last_updated).desc()).limit(50))).all() @@ -127,16 +129,19 @@ async def batch_get_beatmaps( ) async def get_beatmap_attributes( db: Database, - beatmap_id: int = Path(..., description="谱面 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), - mods: list[str] = Query( - default_factory=list, - description="Mods 列表;可为整型位掩码(单元素)或 JSON/简称", - ), - ruleset: GameMode | None = Query(default=None, description="指定 ruleset;为空则使用谱面自身模式"), - ruleset_id: int | None = Query(default=None, description="以数字指定 ruleset (与 ruleset 二选一)", ge=0, le=3), - redis: Redis = Depends(get_redis), - fetcher: Fetcher = Depends(get_fetcher), + beatmap_id: Annotated[int, Path(..., description="谱面 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + mods: Annotated[ + list[str], + Query( + default_factory=list, + description="Mods 列表;可为整型位掩码(单元素)或 JSON/简称", + ), + ], + redis: Redis, + fetcher: Fetcher, + ruleset: Annotated[GameMode | None, Query(description="指定 ruleset;为空则使用谱面自身模式")] = None, + ruleset_id: Annotated[int | None, Query(description="以数字指定 ruleset (与 ruleset 二选一)", ge=0, le=3)] = None, ): mods_ = [] if mods and mods[0].isdigit(): diff --git a/app/router/v2/beatmapset.py b/app/router/v2/beatmapset.py index 01b5658..c4f2561 100644 --- a/app/router/v2/beatmapset.py +++ b/app/router/v2/beatmapset.py @@ -6,23 +6,20 @@ from urllib.parse import parse_qs from app.database import Beatmap, Beatmapset, BeatmapsetResp, FavouriteBeatmapset, User from app.database.beatmapset import SearchBeatmapsetsResp -from app.dependencies.beatmap_download import get_beatmap_download_service -from app.dependencies.beatmapset_cache import get_beatmapset_cache_dependency -from app.dependencies.database import Database, get_redis, with_db -from app.dependencies.fetcher import get_fetcher -from app.dependencies.geoip import get_client_ip, get_geoip_helper -from app.dependencies.user import get_client_user, get_current_user -from app.fetcher import Fetcher +from app.dependencies.beatmap_download import DownloadService +from app.dependencies.beatmapset_cache import BeatmapsetCacheService +from app.dependencies.database import Database, Redis, with_db +from app.dependencies.fetcher import Fetcher +from app.dependencies.geoip import IPAddress, get_geoip_helper +from app.dependencies.user import ClientUser, get_current_user from app.models.beatmap import SearchQueryModel from app.service.asset_proxy_helper import process_response_assets -from app.service.beatmap_download_service import BeatmapDownloadService -from app.service.beatmapset_cache_service import BeatmapsetCacheService, generate_hash +from app.service.beatmapset_cache_service import generate_hash from .router import router from fastapi import ( BackgroundTasks, - Depends, Form, HTTPException, Path, @@ -53,10 +50,10 @@ async def search_beatmapset( query: Annotated[SearchQueryModel, Query(...)], request: Request, background_tasks: BackgroundTasks, - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), - redis=Depends(get_redis), - cache_service: BeatmapsetCacheService = Depends(get_beatmapset_cache_dependency), + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, + redis: Redis, + cache_service: BeatmapsetCacheService, ): params = parse_qs(qs=request.url.query, keep_blank_values=True) cursor = {} @@ -134,10 +131,10 @@ async def search_beatmapset( async def lookup_beatmapset( db: Database, request: Request, - beatmap_id: int = Query(description="谱面 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), - cache_service: BeatmapsetCacheService = Depends(get_beatmapset_cache_dependency), + beatmap_id: Annotated[int, Query(description="谱面 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, + cache_service: BeatmapsetCacheService, ): # 先尝试从缓存获取 cached_resp = await cache_service.get_beatmap_lookup_from_cache(beatmap_id) @@ -170,10 +167,10 @@ async def lookup_beatmapset( async def get_beatmapset( db: Database, request: Request, - beatmapset_id: int = Path(..., description="谱面集 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), - fetcher: Fetcher = Depends(get_fetcher), - cache_service: BeatmapsetCacheService = Depends(get_beatmapset_cache_dependency), + beatmapset_id: Annotated[int, Path(..., description="谱面集 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + fetcher: Fetcher, + cache_service: BeatmapsetCacheService, ): # 先尝试从缓存获取 cached_resp = await cache_service.get_beatmapset_from_cache(beatmapset_id) @@ -203,14 +200,12 @@ async def get_beatmapset( description="\n下载谱面集文件。基于请求IP地理位置智能分流,支持负载均衡和自动故障转移。中国IP使用Sayobot镜像,其他地区使用Nerinyan和OsuDirect镜像。", ) async def download_beatmapset( - request: Request, - beatmapset_id: int = Path(..., description="谱面集 ID"), - no_video: bool = Query(True, alias="noVideo", description="是否下载无视频版本"), - current_user: User = Security(get_client_user), - download_service: BeatmapDownloadService = Depends(get_beatmap_download_service), + client_ip: IPAddress, + beatmapset_id: Annotated[int, Path(..., description="谱面集 ID")], + current_user: ClientUser, + download_service: DownloadService, + no_video: Annotated[bool, Query(alias="noVideo", description="是否下载无视频版本")] = True, ): - client_ip = get_client_ip(request) - geoip_helper = get_geoip_helper() geo_info = geoip_helper.lookup(client_ip) country_code = geo_info.get("country_iso", "") @@ -242,9 +237,12 @@ async def download_beatmapset( ) async def favourite_beatmapset( db: Database, - beatmapset_id: int = Path(..., description="谱面集 ID"), - action: Literal["favourite", "unfavourite"] = Form(description="操作类型:favourite 收藏 / unfavourite 取消收藏"), - current_user: User = Security(get_client_user), + beatmapset_id: Annotated[int, Path(..., description="谱面集 ID")], + action: Annotated[ + Literal["favourite", "unfavourite"], + Form(description="操作类型:favourite 收藏 / unfavourite 取消收藏"), + ], + current_user: ClientUser, ): existing_favourite = ( await db.exec( diff --git a/app/router/v2/cache.py b/app/router/v2/cache.py index 08a0b27..fe610a6 100644 --- a/app/router/v2/cache.py +++ b/app/router/v2/cache.py @@ -5,14 +5,13 @@ from __future__ import annotations -from app.dependencies.database import get_redis +from app.dependencies.database import Redis from app.service.user_cache_service import get_user_cache_service from .router import router -from fastapi import Depends, HTTPException +from fastapi import HTTPException from pydantic import BaseModel -from redis.asyncio import Redis class CacheStatsResponse(BaseModel): @@ -28,7 +27,7 @@ class CacheStatsResponse(BaseModel): tags=["缓存管理"], ) async def get_cache_stats( - redis: Redis = Depends(get_redis), + redis: Redis, # current_user: User = Security(get_current_user, scopes=["admin"]), # 暂时注释,可根据需要启用 ): try: @@ -68,7 +67,7 @@ async def get_cache_stats( ) async def invalidate_user_cache( user_id: int, - redis: Redis = Depends(get_redis), + redis: Redis, # current_user: User = Security(get_current_user, scopes=["admin"]), # 暂时注释 ): try: @@ -87,7 +86,7 @@ async def invalidate_user_cache( tags=["缓存管理"], ) async def clear_all_user_cache( - redis: Redis = Depends(get_redis), + redis: Redis, # current_user: User = Security(get_current_user, scopes=["admin"]), # 暂时注释 ): try: @@ -119,7 +118,7 @@ class CacheWarmupRequest(BaseModel): ) async def warmup_cache( request: CacheWarmupRequest, - redis: Redis = Depends(get_redis), + redis: Redis, # current_user: User = Security(get_current_user, scopes=["admin"]), # 暂时注释 ): try: diff --git a/app/router/v2/me.py b/app/router/v2/me.py index fe1e797..5304e9d 100644 --- a/app/router/v2/me.py +++ b/app/router/v2/me.py @@ -1,9 +1,10 @@ from __future__ import annotations +from typing import Annotated + from app.database import MeResp, User -from app.dependencies import get_current_user from app.dependencies.database import Database -from app.dependencies.user import UserAndToken, get_current_user_and_token +from app.dependencies.user import UserAndToken, get_current_user, get_current_user_and_token from app.exceptions.userpage import UserpageError from app.models.score import GameMode from app.models.user import Page @@ -29,8 +30,8 @@ from fastapi import HTTPException, Path, Security ) async def get_user_info_with_ruleset( session: Database, - ruleset: GameMode = Path(description="指定 ruleset"), - user_and_token: UserAndToken = Security(get_current_user_and_token, scopes=["identify"]), + ruleset: Annotated[GameMode, Path(description="指定 ruleset")], + user_and_token: Annotated[UserAndToken, Security(get_current_user_and_token, scopes=["identify"])], ): user_resp = await MeResp.from_db(user_and_token[0], session, ruleset, token_id=user_and_token[1].id) return user_resp @@ -45,7 +46,7 @@ async def get_user_info_with_ruleset( ) async def get_user_info_default( session: Database, - user_and_token: UserAndToken = Security(get_current_user_and_token, scopes=["identify"]), + user_and_token: Annotated[UserAndToken, Security(get_current_user_and_token, scopes=["identify"])], ): user_resp = await MeResp.from_db(user_and_token[0], session, None, token_id=user_and_token[1].id) return user_resp @@ -85,8 +86,8 @@ async def get_user_info_default( async def update_userpage( request: UpdateUserpageRequest, session: Database, - user_id: int = Path(description="用户ID"), - current_user: User = Security(get_current_user, scopes=["edit"]), + user_id: Annotated[int, Path(description="用户ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["edit"])], ): """更新用户页面内容(匹配官方osu-web实现)""" # 检查权限:只能编辑自己的页面(除非是管理员) diff --git a/app/router/v2/ranking.py b/app/router/v2/ranking.py index b6893f5..f2c6236 100644 --- a/app/router/v2/ranking.py +++ b/app/router/v2/ranking.py @@ -1,11 +1,11 @@ from __future__ import annotations -from typing import Literal +from typing import Annotated, Literal from app.config import settings from app.database import Team, TeamMember, User, UserStatistics, UserStatisticsResp -from app.dependencies import get_current_user from app.dependencies.database import Database, get_redis +from app.dependencies.user import get_current_user from app.models.score import GameMode from app.service.ranking_cache_service import get_ranking_cache_service @@ -45,11 +45,11 @@ SortType = Literal["performance", "score"] async def get_team_ranking_pp( session: Database, background_tasks: BackgroundTasks, - ruleset: GameMode = Path(..., description="指定 ruleset"), - page: int = Query(1, ge=1, description="页码"), - current_user: User = Security(get_current_user, scopes=["public"]), + ruleset: Annotated[GameMode, Path(..., description="指定 ruleset")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + page: Annotated[int, Query(ge=1, description="页码")] = 1, ): - return await get_team_ranking(session, background_tasks, "performance", ruleset, page, current_user) + return await get_team_ranking(session, background_tasks, "performance", ruleset, current_user, page) @router.get( @@ -62,14 +62,17 @@ async def get_team_ranking_pp( async def get_team_ranking( session: Database, background_tasks: BackgroundTasks, - sort: SortType = Path( - ..., - description="排名类型:performance 表现分 / score 计分成绩总分 " - "**这个参数是本服务器额外添加的,不属于 v2 API 的一部分**", - ), - ruleset: GameMode = Path(..., description="指定 ruleset"), - page: int = Query(1, ge=1, description="页码"), - current_user: User = Security(get_current_user, scopes=["public"]), + sort: Annotated[ + SortType, + Path( + ..., + description="排名类型:performance 表现分 / score 计分成绩总分 " + "**这个参数是本服务器额外添加的,不属于 v2 API 的一部分**", + ), + ], + ruleset: Annotated[GameMode, Path(..., description="指定 ruleset")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + page: Annotated[int, Query(ge=1, description="页码")] = 1, ): # 获取 Redis 连接和缓存服务 redis = get_redis() @@ -193,11 +196,11 @@ class CountryResponse(BaseModel): async def get_country_ranking_pp( session: Database, background_tasks: BackgroundTasks, - ruleset: GameMode = Path(..., description="指定 ruleset"), - page: int = Query(1, ge=1, description="页码"), - current_user: User = Security(get_current_user, scopes=["public"]), + ruleset: Annotated[GameMode, Path(..., description="指定 ruleset")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + page: Annotated[int, Query(ge=1, description="页码")] = 1, ): - return await get_country_ranking(session, background_tasks, ruleset, page, "performance", current_user) + return await get_country_ranking(session, background_tasks, ruleset, "performance", current_user, page) @router.get( @@ -210,14 +213,17 @@ async def get_country_ranking_pp( async def get_country_ranking( session: Database, background_tasks: BackgroundTasks, - ruleset: GameMode = Path(..., description="指定 ruleset"), - page: int = Query(1, ge=1, description="页码"), - sort: SortType = Path( - ..., - description="排名类型:performance 表现分 / score 计分成绩总分 " - "**这个参数是本服务器额外添加的,不属于 v2 API 的一部分**", - ), - current_user: User = Security(get_current_user, scopes=["public"]), + ruleset: Annotated[GameMode, Path(..., description="指定 ruleset")], + sort: Annotated[ + SortType, + Path( + ..., + description="排名类型:performance 表现分 / score 计分成绩总分 " + "**这个参数是本服务器额外添加的,不属于 v2 API 的一部分**", + ), + ], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + page: Annotated[int, Query(ge=1, description="页码")] = 1, ): # 获取 Redis 连接和缓存服务 redis = get_redis() @@ -317,11 +323,11 @@ class TopUsersResponse(BaseModel): async def get_user_ranking( session: Database, background_tasks: BackgroundTasks, - ruleset: GameMode = Path(..., description="指定 ruleset"), - sort: SortType = Path(..., description="排名类型:performance 表现分 / score 计分成绩总分"), - country: str | None = Query(None, description="国家代码"), - page: int = Query(1, ge=1, description="页码"), - current_user: User = Security(get_current_user, scopes=["public"]), + ruleset: Annotated[GameMode, Path(..., description="指定 ruleset")], + sort: Annotated[SortType, Path(..., description="排名类型:performance 表现分 / score 计分成绩总分")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + country: Annotated[str | None, Query(description="国家代码")] = None, + page: Annotated[int, Query(ge=1, description="页码")] = 1, ): # 获取 Redis 连接和缓存服务 redis = get_redis() diff --git a/app/router/v2/relationship.py b/app/router/v2/relationship.py index b028951..4851e2a 100644 --- a/app/router/v2/relationship.py +++ b/app/router/v2/relationship.py @@ -1,10 +1,12 @@ from __future__ import annotations +from typing import Annotated + from app.database import Relationship, RelationshipResp, RelationshipType, User from app.database.user import UserResp from app.dependencies.api_version import APIVersion from app.dependencies.database import Database -from app.dependencies.user import get_client_user, get_current_user +from app.dependencies.user import ClientUser, get_current_user from .router import router @@ -56,7 +58,7 @@ async def get_relationship( db: Database, request: Request, api_version: APIVersion, - current_user: User = Security(get_current_user, scopes=["friends.read"]), + current_user: Annotated[User, Security(get_current_user, scopes=["friends.read"])], ): relationship_type = RelationshipType.FOLLOW if request.url.path.endswith("/friends") else RelationshipType.BLOCK relationships = await db.exec( @@ -107,8 +109,8 @@ class AddFriendResp(BaseModel): async def add_relationship( db: Database, request: Request, - target: int = Query(description="目标用户 ID"), - current_user: User = Security(get_client_user), + target: Annotated[int, Query(description="目标用户 ID")], + current_user: ClientUser, ): if not (await db.exec(select(exists()).where(User.id == target))).first(): raise HTTPException(404, "Target user not found") @@ -176,8 +178,8 @@ async def add_relationship( async def delete_relationship( db: Database, request: Request, - target: int = Path(..., description="目标用户 ID"), - current_user: User = Security(get_client_user), + target: Annotated[int, Path(..., description="目标用户 ID")], + current_user: ClientUser, ): if not (await db.exec(select(exists()).where(User.id == target))).first(): raise HTTPException(404, "Target user not found") diff --git a/app/router/v2/room.py b/app/router/v2/room.py index 6a42dbc..21ea109 100644 --- a/app/router/v2/room.py +++ b/app/router/v2/room.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import UTC -from typing import Literal +from typing import Annotated, Literal from app.database.beatmap import Beatmap, BeatmapResp from app.database.beatmapset import BeatmapsetResp @@ -12,8 +12,8 @@ from app.database.room import APIUploadedRoom, Room, RoomResp from app.database.room_participated_user import RoomParticipatedUser from app.database.score import Score from app.database.user import User, UserResp -from app.dependencies.database import Database, get_redis -from app.dependencies.user import get_client_user, get_current_user +from app.dependencies.database import Database, Redis +from app.dependencies.user import ClientUser, get_current_user from app.models.room import RoomCategory, RoomStatus from app.service.room import create_playlist_room_from_api from app.signalr.hub import MultiplayerHubs @@ -21,9 +21,8 @@ from app.utils import utcnow from .router import router -from fastapi import Depends, HTTPException, Path, Query, Security +from fastapi import HTTPException, Path, Query, Security from pydantic import BaseModel, Field -from redis.asyncio import Redis from sqlalchemy.sql.elements import ColumnElement from sqlmodel import col, exists, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -38,16 +37,20 @@ from sqlmodel.ext.asyncio.session import AsyncSession ) async def get_all_rooms( db: Database, - mode: Literal["open", "ended", "participated", "owned"] | None = Query( - default="open", - description=("房间模式:open 当前开放 / ended 已经结束 / participated 参与过 / owned 自己创建的房间"), - ), - category: RoomCategory = Query( - RoomCategory.NORMAL, - description=("房间分类:NORMAL 普通歌单模式房间 / REALTIME 多人游戏房间 / DAILY_CHALLENGE 每日挑战"), - ), - status: RoomStatus | None = Query(None, description="房间状态(可选)"), - current_user: User = Security(get_current_user, scopes=["public"]), + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + mode: Annotated[ + Literal["open", "ended", "participated", "owned"] | None, + Query( + description=("房间模式:open 当前开放 / ended 已经结束 / participated 参与过 / owned 自己创建的房间"), + ), + ] = "open", + category: Annotated[ + RoomCategory, + Query( + description=("房间分类:NORMAL 普通歌单模式房间 / REALTIME 多人游戏房间 / DAILY_CHALLENGE 每日挑战"), + ), + ] = RoomCategory.NORMAL, + status: Annotated[RoomStatus | None, Query(description="房间状态(可选)")] = None, ): resp_list: list[RoomResp] = [] where_clauses: list[ColumnElement[bool]] = [col(Room.category) == category] @@ -140,8 +143,8 @@ async def _participate_room(room_id: int, user_id: int, db_room: Room, session: async def create_room( db: Database, room: APIUploadedRoom, - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + current_user: ClientUser, + redis: Redis, ): user_id = current_user.id db_room = await create_playlist_room_from_api(db, room, user_id) @@ -162,13 +165,15 @@ async def create_room( ) async def get_room( db: Database, - room_id: int = Path(..., description="房间 ID"), - category: str = Query( - default="", - description=("房间分类:NORMAL 普通歌单模式房间 / REALTIME 多人游戏房间 / DAILY_CHALLENGE 每日挑战 (可选)"), - ), - current_user: User = Security(get_current_user, scopes=["public"]), - redis: Redis = Depends(get_redis), + room_id: Annotated[int, Path(..., description="房间 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + redis: Redis, + category: Annotated[ + str, + Query( + description=("房间分类:NORMAL 普通歌单模式房间 / REALTIME 多人游戏房间 / DAILY_CHALLENGE 每日挑战 (可选)"), + ), + ] = "", ): db_room = (await db.exec(select(Room).where(Room.id == room_id))).first() if db_room is None: @@ -185,8 +190,8 @@ async def get_room( ) async def delete_room( db: Database, - room_id: int = Path(..., description="房间 ID"), - current_user: User = Security(get_client_user), + room_id: Annotated[int, Path(..., description="房间 ID")], + current_user: ClientUser, ): db_room = (await db.exec(select(Room).where(Room.id == room_id))).first() if db_room is None: @@ -205,10 +210,10 @@ async def delete_room( ) async def add_user_to_room( db: Database, - room_id: int = Path(..., description="房间 ID"), - user_id: int = Path(..., description="用户 ID"), - redis: Redis = Depends(get_redis), - current_user: User = Security(get_client_user), + room_id: Annotated[int, Path(..., description="房间 ID")], + user_id: Annotated[int, Path(..., description="用户 ID")], + redis: Redis, + current_user: ClientUser, ): db_room = (await db.exec(select(Room).where(Room.id == room_id))).first() if db_room is not None: @@ -229,10 +234,10 @@ async def add_user_to_room( ) async def remove_user_from_room( db: Database, - room_id: int = Path(..., description="房间 ID"), - user_id: int = Path(..., description="用户 ID"), - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + room_id: Annotated[int, Path(..., description="房间 ID")], + user_id: Annotated[int, Path(..., description="用户 ID")], + current_user: ClientUser, + redis: Redis, ): db_room = (await db.exec(select(Room).where(Room.id == room_id))).first() if db_room is not None: @@ -273,8 +278,8 @@ class APILeaderboard(BaseModel): ) async def get_room_leaderboard( db: Database, - room_id: int = Path(..., description="房间 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), + room_id: Annotated[int, Path(..., description="房间 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], ): db_room = (await db.exec(select(Room).where(Room.id == room_id))).first() if db_room is None: @@ -329,11 +334,11 @@ class RoomEvents(BaseModel): ) async def get_room_events( db: Database, - room_id: int = Path(..., description="房间 ID"), - current_user: User = Security(get_current_user, scopes=["public"]), - limit: int = Query(100, ge=1, le=1000, description="返回条数 (1-1000)"), - after: int | None = Query(None, ge=0, description="仅包含大于该事件 ID 的事件"), - before: int | None = Query(None, ge=0, description="仅包含小于该事件 ID 的事件"), + room_id: Annotated[int, Path(..., description="房间 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + limit: Annotated[int, Query(ge=1, le=1000, description="返回条数 (1-1000)")] = 100, + after: Annotated[int | None, Query(ge=0, description="仅包含大于该事件 ID 的事件")] = None, + before: Annotated[int | None, Query(ge=0, description="仅包含小于该事件 ID 的事件")] = None, ): events = ( await db.exec( diff --git a/app/router/v2/score.py b/app/router/v2/score.py index ff5765f..0da6de0 100644 --- a/app/router/v2/score.py +++ b/app/router/v2/score.py @@ -2,6 +2,7 @@ from __future__ import annotations from datetime import UTC, date import time +from typing import Annotated from app.calculator import clamp from app.config import settings @@ -34,11 +35,10 @@ from app.database.score import ( process_user, ) from app.dependencies.api_version import APIVersion -from app.dependencies.database import Database, get_redis, with_db -from app.dependencies.fetcher import get_fetcher -from app.dependencies.storage import get_storage_service -from app.dependencies.user import get_client_user, get_current_user -from app.fetcher import Fetcher +from app.dependencies.database import Database, Redis, get_redis, with_db +from app.dependencies.fetcher import Fetcher, get_fetcher +from app.dependencies.storage import StorageService +from app.dependencies.user import ClientUser, get_current_user from app.log import logger from app.models.beatmap import BeatmapRankStatus from app.models.room import RoomCategory @@ -50,7 +50,6 @@ from app.models.score import ( ) from app.service.beatmap_cache_service import get_beatmap_cache_service from app.service.user_cache_service import refresh_user_cache_background -from app.storage.base import StorageService from app.utils import utcnow from .router import router @@ -69,7 +68,6 @@ from fastapi.responses import RedirectResponse from fastapi_limiter.depends import RateLimiter from httpx import HTTPError from pydantic import BaseModel -from redis.asyncio import Redis from sqlalchemy.orm import joinedload from sqlmodel import col, exists, func, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -245,16 +243,18 @@ class BeatmapScores[T: ScoreResp | LegacyScoreResp](BaseModel): async def get_beatmap_scores( db: Database, api_version: APIVersion, - beatmap_id: int = Path(description="谱面 ID"), - mode: GameMode = Query(description="指定 auleset"), - legacy_only: bool = Query(None, description="是否只查询 Stable 分数"), - mods: list[str] = Query(default_factory=set, alias="mods[]", description="筛选使用的 Mods (可选,多值)"), - type: LeaderboardType = Query( - LeaderboardType.GLOBAL, - description=("排行榜类型:GLOBAL 全局 / COUNTRY 国家 / FRIENDS 好友 / TEAM 战队"), - ), - current_user: User = Security(get_current_user, scopes=["public"]), - limit: int = Query(50, ge=1, le=200, description="返回条数 (1-200)"), + beatmap_id: Annotated[int, Path(description="谱面 ID")], + mode: Annotated[GameMode, Query(description="指定 auleset")], + mods: Annotated[list[str], Query(default_factory=set, alias="mods[]", description="筛选使用的 Mods (可选,多值)")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + legacy_only: Annotated[bool | None, Query(description="是否只查询 Stable 分数")] = None, + type: Annotated[ + LeaderboardType, + Query( + description=("排行榜类型:GLOBAL 全局 / COUNTRY 国家 / FRIENDS 好友 / TEAM 战队"), + ), + ] = LeaderboardType.GLOBAL, + limit: Annotated[int, Query(ge=1, le=200, description="返回条数 (1-200)")] = 50, ): if legacy_only: raise HTTPException(status_code=404, detail="this server only contains lazer scores") @@ -294,12 +294,12 @@ async def get_beatmap_scores( async def get_user_beatmap_score( db: Database, api_version: APIVersion, - beatmap_id: int = Path(description="谱面 ID"), - user_id: int = Path(description="用户 ID"), - legacy_only: bool = Query(None, description="是否只查询 Stable 分数"), - mode: GameMode | None = Query(None, description="指定 ruleset (可选)"), - mods: str = Query(None, description="筛选使用的 Mods (暂未实现)"), - current_user: User = Security(get_current_user, scopes=["public"]), + beatmap_id: Annotated[int, Path(description="谱面 ID")], + user_id: Annotated[int, Path(description="用户 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + legacy_only: Annotated[bool | None, Query(description="是否只查询 Stable 分数")] = None, + mode: Annotated[GameMode | None, Query(description="指定 ruleset (可选)")] = None, + mods: Annotated[str | None, Query(description="筛选使用的 Mods (暂未实现)")] = None, ): user_score = ( await db.exec( @@ -342,11 +342,11 @@ async def get_user_beatmap_score( async def get_user_all_beatmap_scores( db: Database, api_version: APIVersion, - beatmap_id: int = Path(description="谱面 ID"), - user_id: int = Path(description="用户 ID"), - legacy_only: bool = Query(None, description="是否只查询 Stable 分数"), - ruleset: GameMode | None = Query(None, description="指定 ruleset (可选)"), - current_user: User = Security(get_current_user, scopes=["public"]), + beatmap_id: Annotated[int, Path(description="谱面 ID")], + user_id: Annotated[int, Path(description="用户 ID")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + legacy_only: Annotated[bool | None, Query(description="是否只查询 Stable 分数")] = None, + ruleset: Annotated[GameMode | None, Query(description="指定 ruleset (可选)")] = None, ): all_user_scores = ( await db.exec( @@ -374,11 +374,11 @@ async def get_user_all_beatmap_scores( async def create_solo_score( background_task: BackgroundTasks, db: Database, - beatmap_id: int = Path(description="谱面 ID"), - version_hash: str = Form("", description="游戏版本哈希"), - beatmap_hash: str = Form(description="谱面文件哈希"), - ruleset_id: int = Form(..., ge=0, le=3, description="ruleset 数字 ID (0-3)"), - current_user: User = Security(get_client_user), + beatmap_id: Annotated[int, Path(description="谱面 ID")], + beatmap_hash: Annotated[str, Form(description="谱面文件哈希")], + ruleset_id: Annotated[int, Form(..., ge=0, le=3, description="ruleset 数字 ID (0-3)")], + current_user: ClientUser, + version_hash: Annotated[str, Form(description="游戏版本哈希")] = "", ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -406,12 +406,12 @@ async def create_solo_score( async def submit_solo_score( background_task: BackgroundTasks, db: Database, - beatmap_id: int = Path(description="谱面 ID"), - token: int = Path(description="成绩令牌 ID"), - info: SoloScoreSubmissionInfo = Body(description="成绩提交信息"), - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), - fetcher=Depends(get_fetcher), + beatmap_id: Annotated[int, Path(description="谱面 ID")], + token: Annotated[int, Path(description="成绩令牌 ID")], + info: Annotated[SoloScoreSubmissionInfo, Body(description="成绩提交信息")], + current_user: ClientUser, + redis: Redis, + fetcher: Fetcher, ): return await submit_score(background_task, info, beatmap_id, token, current_user, db, redis, fetcher) @@ -428,11 +428,11 @@ async def create_playlist_score( background_task: BackgroundTasks, room_id: int, playlist_id: int, - beatmap_id: int = Form(description="谱面 ID"), - beatmap_hash: str = Form(description="游戏版本哈希"), - ruleset_id: int = Form(..., ge=0, le=3, description="ruleset 数字 ID (0-3)"), - version_hash: str = Form("", description="谱面版本哈希"), - current_user: User = Security(get_client_user), + beatmap_id: Annotated[int, Form(description="谱面 ID")], + beatmap_hash: Annotated[str, Form(description="游戏版本哈希")], + ruleset_id: Annotated[int, Form(..., ge=0, le=3, description="ruleset 数字 ID (0-3)")], + current_user: ClientUser, + version_hash: Annotated[str, Form(description="谱面版本哈希")] = "", ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -496,9 +496,9 @@ async def submit_playlist_score( playlist_id: int, token: int, info: SoloScoreSubmissionInfo, - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), - fetcher: Fetcher = Depends(get_fetcher), + current_user: ClientUser, + redis: Redis, + fetcher: Fetcher, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -555,9 +555,9 @@ async def index_playlist_scores( session: Database, room_id: int, playlist_id: int, - limit: int = Query(50, ge=1, le=50, description="返回条数 (1-50)"), - cursor: int = Query(2000000, alias="cursor[total_score]", description="分页游标(上一页最低分)"), - current_user: User = Security(get_current_user, scopes=["public"]), + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + limit: Annotated[int, Query(ge=1, le=50, description="返回条数 (1-50)")] = 50, + cursor: Annotated[int, Query(alias="cursor[total_score]", description="分页游标(上一页最低分)")] = 2000000, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -623,8 +623,8 @@ async def show_playlist_score( room_id: int, playlist_id: int, score_id: int, - current_user: User = Security(get_client_user), - redis: Redis = Depends(get_redis), + current_user: ClientUser, + redis: Redis, ): room = await session.get(Room, room_id) if not room: @@ -692,7 +692,7 @@ async def get_user_playlist_score( room_id: int, playlist_id: int, user_id: int, - current_user: User = Security(get_client_user), + current_user: ClientUser, ): score_record = None start_time = time.time() @@ -725,8 +725,8 @@ async def get_user_playlist_score( ) async def pin_score( db: Database, - score_id: int = Path(description="成绩 ID"), - current_user: User = Security(get_client_user), + score_id: Annotated[int, Path(description="成绩 ID")], + current_user: ClientUser, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -770,8 +770,8 @@ async def pin_score( ) async def unpin_score( db: Database, - score_id: int = Path(description="成绩 ID"), - current_user: User = Security(get_client_user), + score_id: Annotated[int, Path(description="成绩 ID")], + current_user: ClientUser, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -805,10 +805,10 @@ async def unpin_score( ) async def reorder_score_pin( db: Database, - score_id: int = Path(description="成绩 ID"), - after_score_id: int | None = Body(default=None, description="放在该成绩之后"), - before_score_id: int | None = Body(default=None, description="放在该成绩之前"), - current_user: User = Security(get_client_user), + score_id: Annotated[int, Path(description="成绩 ID")], + current_user: ClientUser, + after_score_id: Annotated[int | None, Body(description="放在该成绩之后")] = None, + before_score_id: Annotated[int | None, Body(description="放在该成绩之前")] = None, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id @@ -893,8 +893,8 @@ async def reorder_score_pin( async def download_score_replay( score_id: int, db: Database, - current_user: User = Security(get_current_user, scopes=["public"]), - storage_service: StorageService = Depends(get_storage_service), + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + storage_service: StorageService, ): # 立即获取用户ID,避免懒加载问题 user_id = current_user.id diff --git a/app/router/v2/session_verify.py b/app/router/v2/session_verify.py index 81abb5d..079a0b3 100644 --- a/app/router/v2/session_verify.py +++ b/app/router/v2/session_verify.py @@ -11,8 +11,8 @@ from app.config import settings from app.const import BACKUP_CODE_LENGTH, SUPPORT_TOTP_VERIFICATION_VER from app.database.auth import TotpKeys from app.dependencies.api_version import APIVersion -from app.dependencies.database import Database, get_redis -from app.dependencies.geoip import get_client_ip +from app.dependencies.database import Database, Redis, get_redis +from app.dependencies.geoip import IPAddress from app.dependencies.user import UserAndToken, get_client_user_and_token from app.dependencies.user_agent import UserAgentInfo from app.log import logger @@ -27,7 +27,6 @@ from .router import router from fastapi import Depends, Form, Header, HTTPException, Request, Security, status from fastapi.responses import JSONResponse, Response from pydantic import BaseModel -from redis.asyncio import Redis class VerifyMethod(BaseModel): @@ -64,10 +63,14 @@ async def verify_session( db: Database, api_version: APIVersion, user_agent: UserAgentInfo, + ip_address: IPAddress, redis: Annotated[Redis, Depends(get_redis)], - verification_key: str = Form(..., description="8 位邮件验证码或者 6 位 TOTP 代码或 10 位备份码 (g0v0 扩展支持)"), - user_and_token: UserAndToken = Security(get_client_user_and_token), - web_uuid: str | None = Header(None, include_in_schema=False, alias="X-UUID"), + verification_key: Annotated[ + str, + Form(..., description="8 位邮件验证码或者 6 位 TOTP 代码或 10 位备份码 (g0v0 扩展支持)"), + ], + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], + web_uuid: Annotated[str | None, Header(include_in_schema=False, alias="X-UUID")] = None, ) -> Response: current_user = user_and_token[0] token_id = user_and_token[1].id @@ -82,7 +85,6 @@ async def verify_session( else await LoginSessionService.get_login_method(user_id, token_id, redis) ) - ip_address = get_client_ip(request) login_method = "password" try: @@ -182,12 +184,12 @@ async def verify_session( tags=["验证"], ) async def reissue_verification_code( - request: Request, db: Database, user_agent: UserAgentInfo, api_version: APIVersion, + ip_address: IPAddress, redis: Annotated[Redis, Depends(get_redis)], - user_and_token: UserAndToken = Security(get_client_user_and_token), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], ) -> SessionReissueResponse: current_user = user_and_token[0] token_id = user_and_token[1].id @@ -203,7 +205,6 @@ async def reissue_verification_code( return SessionReissueResponse(success=False, message="当前会话不支持重新发送验证码") try: - ip_address = get_client_ip(request) user_id = current_user.id success, message = await EmailVerificationService.resend_verification_code( db, @@ -233,17 +234,15 @@ async def reissue_verification_code( async def fallback_email( db: Database, user_agent: UserAgentInfo, - request: Request, + ip_address: IPAddress, redis: Annotated[Redis, Depends(get_redis)], - user_and_token: UserAndToken = Security(get_client_user_and_token), + user_and_token: Annotated[UserAndToken, Security(get_client_user_and_token)], ) -> VerifyMethod: current_user = user_and_token[0] token_id = user_and_token[1].id if not await LoginSessionService.get_login_method(current_user.id, token_id, redis): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="当前会话不需要回退") - ip_address = get_client_ip(request) - await LoginSessionService.set_login_method(current_user.id, token_id, "mail", redis) success, message = await EmailVerificationService.resend_verification_code( db, diff --git a/app/router/v2/tags.py b/app/router/v2/tags.py index 99fccb2..644cd77 100644 --- a/app/router/v2/tags.py +++ b/app/router/v2/tags.py @@ -1,10 +1,12 @@ from __future__ import annotations +from typing import Annotated + from app.database.beatmap import Beatmap from app.database.beatmap_tags import BeatmapTagVote from app.database.score import Score from app.database.user import User -from app.dependencies.database import get_db +from app.dependencies.database import Database from app.dependencies.user import get_client_user from app.models.score import Rank from app.models.tags import BeatmapTags, get_all_tags, get_tag_by_id @@ -55,10 +57,10 @@ async def check_user_can_vote(user: User, beatmap_id: int, session: AsyncSession description="为指定谱面添加标签投票。", ) async def vote_beatmap_tags( - beatmap_id: int = Path(..., description="谱面 ID"), - tag_id: int = Path(..., description="标签 ID"), - session: AsyncSession = Depends(get_db), - current_user: User = Depends(get_client_user), + beatmap_id: Annotated[int, Path(..., description="谱面 ID")], + tag_id: Annotated[int, Path(..., description="标签 ID")], + session: Database, + current_user: Annotated[User, Depends(get_client_user)], ): try: get_tag_by_id(tag_id) @@ -90,10 +92,10 @@ async def vote_beatmap_tags( description="取消对指定谱面标签的投票。", ) async def devote_beatmap_tags( - beatmap_id: int = Path(..., description="谱面 ID"), - tag_id: int = Path(..., description="标签 ID"), - session: AsyncSession = Depends(get_db), - current_user: User = Depends(get_client_user), + beatmap_id: Annotated[int, Path(..., description="谱面 ID")], + tag_id: Annotated[int, Path(..., description="标签 ID")], + session: Database, + current_user: Annotated[User, Depends(get_client_user)], ): """ 取消对谱面指定标签的投票。 diff --git a/app/router/v2/user.py b/app/router/v2/user.py index 7ff444a..7928029 100644 --- a/app/router/v2/user.py +++ b/app/router/v2/user.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import timedelta -from typing import Literal +from typing import Annotated, Literal from app.config import settings from app.const import BANCHOBOT_ID @@ -51,9 +51,12 @@ async def get_users( session: Database, request: Request, background_task: BackgroundTasks, - user_ids: list[int] = Query(default_factory=list, alias="ids[]", description="要查询的用户 ID 列表"), + user_ids: Annotated[list[int], Query(default_factory=list, alias="ids[]", description="要查询的用户 ID 列表")], # current_user: User = Security(get_current_user, scopes=["public"]), - include_variant_statistics: bool = Query(default=False, description="是否包含各模式的统计信息"), # TODO: future use + include_variant_statistics: Annotated[ + bool, + Query(description="是否包含各模式的统计信息"), + ] = False, # TODO: future use ): redis = get_redis() cache_service = get_user_cache_service(redis) @@ -119,9 +122,9 @@ async def get_users( ) async def get_user_events( session: Database, - user_id: int = Path(description="用户 ID"), - limit: int | None = Query(None, description="限制返回的活动数量"), - offset: int | None = Query(None, description="活动日志的偏移量"), + user_id: Annotated[int, Path(description="用户 ID")], + limit: Annotated[int | None, Query(description="限制返回的活动数量")] = None, + offset: Annotated[int | None, Query(description="活动日志的偏移量")] = None, ): db_user = await session.get(User, user_id) if db_user is None or db_user.id == BANCHOBOT_ID: @@ -147,9 +150,9 @@ async def get_user_events( ) async def get_user_kudosu( session: Database, - user_id: int = Path(description="用户 ID"), - offset: int = Query(default=0, description="偏移量"), - limit: int = Query(default=6, description="返回记录数量限制"), + user_id: Annotated[int, Path(description="用户 ID")], + offset: Annotated[int, Query(description="偏移量")] = 0, + limit: Annotated[int, Query(description="返回记录数量限制")] = 6, ): """ 获取用户的 kudosu 记录 @@ -176,8 +179,8 @@ async def get_user_kudosu( async def get_user_info_ruleset( session: Database, background_task: BackgroundTasks, - user_id: str = Path(description="用户 ID 或用户名"), - ruleset: GameMode | None = Path(description="指定 ruleset"), + user_id: Annotated[str, Path(description="用户 ID 或用户名")], + ruleset: Annotated[GameMode | None, Path(description="指定 ruleset")], # current_user: User = Security(get_current_user, scopes=["public"]), ): redis = get_redis() @@ -225,7 +228,7 @@ async def get_user_info( background_task: BackgroundTasks, session: Database, request: Request, - user_id: str = Path(description="用户 ID 或用户名"), + user_id: Annotated[str, Path(description="用户 ID 或用户名")], # current_user: User = Security(get_current_user, scopes=["public"]), ): redis = get_redis() @@ -274,11 +277,11 @@ async def get_user_info( async def get_user_beatmapsets( session: Database, background_task: BackgroundTasks, - user_id: int = Path(description="用户 ID"), - type: BeatmapsetType = Path(description="谱面集类型"), - current_user: User = Security(get_current_user, scopes=["public"]), - limit: int = Query(100, ge=1, le=1000, description="返回条数 (1-1000)"), - offset: int = Query(0, ge=0, description="偏移量"), + user_id: Annotated[int, Path(description="用户 ID")], + type: Annotated[BeatmapsetType, Path(description="谱面集类型")], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + limit: Annotated[int, Query(ge=1, le=1000, description="返回条数 (1-1000)")] = 100, + offset: Annotated[int, Query(ge=0, description="偏移量")] = 0, ): redis = get_redis() cache_service = get_user_cache_service(redis) @@ -356,16 +359,17 @@ async def get_user_scores( session: Database, api_version: APIVersion, background_task: BackgroundTasks, - user_id: int = Path(description="用户 ID"), - type: Literal["best", "recent", "firsts", "pinned"] = Path( - description=("成绩类型: best 最好成绩 / recent 最近 24h 游玩成绩 / firsts 第一名成绩 / pinned 置顶成绩") - ), - legacy_only: bool = Query(False, description="是否只查询 Stable 成绩"), - include_fails: bool = Query(False, description="是否包含失败的成绩"), - mode: GameMode | None = Query(None, description="指定 ruleset (可选,默认为用户主模式)"), - limit: int = Query(100, ge=1, le=1000, description="返回条数 (1-1000)"), - offset: int = Query(0, ge=0, description="偏移量"), - current_user: User = Security(get_current_user, scopes=["public"]), + user_id: Annotated[int, Path(description="用户 ID")], + type: Annotated[ + Literal["best", "recent", "firsts", "pinned"], + Path(description=("成绩类型: best 最好成绩 / recent 最近 24h 游玩成绩 / firsts 第一名成绩 / pinned 置顶成绩")), + ], + current_user: Annotated[User, Security(get_current_user, scopes=["public"])], + legacy_only: Annotated[bool, Query(description="是否只查询 Stable 成绩")] = False, + include_fails: Annotated[bool, Query(description="是否包含失败的成绩")] = False, + mode: Annotated[GameMode | None, Query(description="指定 ruleset (可选,默认为用户主模式)")] = None, + limit: Annotated[int, Query(ge=1, le=1000, description="返回条数 (1-1000)")] = 100, + offset: Annotated[int, Query(ge=0, description="偏移量")] = 0, ): is_legacy_api = api_version < 20220705 redis = get_redis() diff --git a/app/signalr/router.py b/app/signalr/router.py index cf4bf97..753bea3 100644 --- a/app/signalr/router.py +++ b/app/signalr/router.py @@ -7,9 +7,8 @@ from typing import Literal import uuid from app.database import User as DBUser -from app.dependencies import get_current_user from app.dependencies.database import DBFactory, get_db_factory -from app.dependencies.user import get_current_user_and_token +from app.dependencies.user import get_current_user, get_current_user_and_token from app.log import logger from app.models.signalr import NegotiateResponse, Transport From afd5018bcd35359e13d6af9bc25f1683eb4e1e34 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 08:22:41 +0000 Subject: [PATCH 03/26] refactor(log): add prefix for fetcher and services --- app/fetcher/beatmap.py | 6 ++- app/fetcher/beatmap_raw.py | 7 ++- app/fetcher/beatmapset.py | 62 ++++++++-------------- app/log.py | 59 ++++++++++++++++++++- app/service/audio_proxy_service.py | 3 +- app/service/beatmapset_update_service.py | 67 +++++++----------------- app/service/verification_service.py | 6 +-- 7 files changed, 112 insertions(+), 98 deletions(-) diff --git a/app/fetcher/beatmap.py b/app/fetcher/beatmap.py index fa49cf4..3909572 100644 --- a/app/fetcher/beatmap.py +++ b/app/fetcher/beatmap.py @@ -1,10 +1,12 @@ from __future__ import annotations from app.database.beatmap import BeatmapResp -from app.log import logger +from app.log import fetcher_logger from ._base import BaseFetcher +logger = fetcher_logger("BeatmapFetcher") + class BeatmapFetcher(BaseFetcher): async def get_beatmap(self, beatmap_id: int | None = None, beatmap_checksum: str | None = None) -> BeatmapResp: @@ -14,7 +16,7 @@ class BeatmapFetcher(BaseFetcher): params = {"checksum": beatmap_checksum} else: raise ValueError("Either beatmap_id or beatmap_checksum must be provided.") - logger.opt(colors=True).debug(f"[BeatmapFetcher] get_beatmap: {params}") + logger.opt(colors=True).debug(f"get_beatmap: {params}") return BeatmapResp.model_validate( await self.request_api( diff --git a/app/fetcher/beatmap_raw.py b/app/fetcher/beatmap_raw.py index ccb19b8..bdf1f90 100644 --- a/app/fetcher/beatmap_raw.py +++ b/app/fetcher/beatmap_raw.py @@ -1,10 +1,11 @@ from __future__ import annotations +from app.log import fetcher_logger + from ._base import BaseFetcher from httpx import AsyncClient, HTTPError from httpx._models import Response -from loguru import logger import redis.asyncio as redis urls = [ @@ -13,12 +14,14 @@ urls = [ "https://catboy.best/osu/{beatmap_id}", ] +logger = fetcher_logger("BeatmapRawFetcher") + class BeatmapRawFetcher(BaseFetcher): async def get_beatmap_raw(self, beatmap_id: int) -> str: for url in urls: req_url = url.format(beatmap_id=beatmap_id) - logger.opt(colors=True).debug(f"[BeatmapRawFetcher] get_beatmap_raw: {req_url}") + logger.opt(colors=True).debug(f"get_beatmap_raw: {req_url}") resp = await self._request(req_url) if resp.status_code >= 400: continue diff --git a/app/fetcher/beatmapset.py b/app/fetcher/beatmapset.py index 80deb82..8801fef 100644 --- a/app/fetcher/beatmapset.py +++ b/app/fetcher/beatmapset.py @@ -7,7 +7,7 @@ import json from app.database.beatmapset import BeatmapsetResp, SearchBeatmapsetsResp from app.helpers.rate_limiter import osu_api_rate_limiter -from app.log import logger +from app.log import fetcher_logger from app.models.beatmap import SearchQueryModel from app.models.model import Cursor from app.utils import bg_tasks @@ -24,6 +24,9 @@ class RateLimitError(Exception): pass +logger = fetcher_logger("BeatmapsetFetcher") + + class BeatmapsetFetcher(BaseFetcher): @staticmethod def _get_homepage_queries() -> list[tuple[SearchQueryModel, Cursor]]: @@ -135,7 +138,7 @@ class BeatmapsetFetcher(BaseFetcher): return {} async def get_beatmapset(self, beatmap_set_id: int) -> BeatmapsetResp: - logger.opt(colors=True).debug(f"[BeatmapsetFetcher] get_beatmapset: {beatmap_set_id}") + logger.opt(colors=True).debug(f"get_beatmapset: {beatmap_set_id}") return BeatmapsetResp.model_validate( await self.request_api(f"https://osu.ppy.sh/api/v2/beatmapsets/{beatmap_set_id}") @@ -144,7 +147,7 @@ class BeatmapsetFetcher(BaseFetcher): async def search_beatmapset( self, query: SearchQueryModel, cursor: Cursor, redis_client: redis.Redis ) -> SearchBeatmapsetsResp: - logger.opt(colors=True).debug(f"[BeatmapsetFetcher] search_beatmapset: {query}") + logger.opt(colors=True).debug(f"search_beatmapset: {query}") # 生成缓存键 cache_key = self._generate_cache_key(query, cursor) @@ -152,17 +155,15 @@ class BeatmapsetFetcher(BaseFetcher): # 尝试从缓存获取结果 cached_result = await redis_client.get(cache_key) if cached_result: - logger.opt(colors=True).debug(f"[BeatmapsetFetcher] Cache hit for key: {cache_key}") + logger.opt(colors=True).debug(f"Cache hit for key: {cache_key}") try: cached_data = json.loads(cached_result) return SearchBeatmapsetsResp.model_validate(cached_data) except Exception as e: - logger.opt(colors=True).warning( - f"[BeatmapsetFetcher] Cache data invalid, fetching from API: {e}" - ) + logger.opt(colors=True).warning(f"Cache data invalid, fetching from API: {e}") # 缓存未命中,从 API 获取数据 - logger.opt(colors=True).debug("[BeatmapsetFetcher] Cache miss, fetching from API") + logger.opt(colors=True).debug("Cache miss, fetching from API") params = query.model_dump(exclude_none=True, exclude_unset=True, exclude_defaults=True) @@ -186,9 +187,7 @@ class BeatmapsetFetcher(BaseFetcher): cache_ttl = 15 * 60 # 15 分钟 await redis_client.set(cache_key, json.dumps(api_response, separators=(",", ":")), ex=cache_ttl) - logger.opt(colors=True).debug( - f"[BeatmapsetFetcher] Cached result for key: {cache_key} (TTL: {cache_ttl}s)" - ) + logger.opt(colors=True).debug(f"Cached result for key: {cache_key} (TTL: {cache_ttl}s)") resp = SearchBeatmapsetsResp.model_validate(api_response) @@ -204,9 +203,7 @@ class BeatmapsetFetcher(BaseFetcher): try: await self.prefetch_next_pages(query, api_response["cursor"], redis_client, pages=1) except RateLimitError: - logger.opt(colors=True).info( - "[BeatmapsetFetcher] Prefetch skipped due to rate limit" - ) + logger.opt(colors=True).info("Prefetch skipped due to rate limit") bg_tasks.add_task(delayed_prefetch) @@ -230,14 +227,14 @@ class BeatmapsetFetcher(BaseFetcher): # 使用当前 cursor 请求下一页 next_query = query.model_copy() - logger.opt(colors=True).debug(f"[BeatmapsetFetcher] Prefetching page {page + 1}") + logger.opt(colors=True).debug(f"Prefetching page {page + 1}") # 生成下一页的缓存键 next_cache_key = self._generate_cache_key(next_query, cursor) # 检查是否已经缓存 if await redis_client.exists(next_cache_key): - logger.opt(colors=True).debug(f"[BeatmapsetFetcher] Page {page + 1} already cached") + logger.opt(colors=True).debug(f"Page {page + 1} already cached") # 尝试从缓存获取cursor继续预取 cached_data = await redis_client.get(next_cache_key) if cached_data: @@ -282,22 +279,18 @@ class BeatmapsetFetcher(BaseFetcher): ex=prefetch_ttl, ) - logger.opt(colors=True).debug( - f"[BeatmapsetFetcher] Prefetched page {page + 1} (TTL: {prefetch_ttl}s)" - ) + logger.opt(colors=True).debug(f"Prefetched page {page + 1} (TTL: {prefetch_ttl}s)") except RateLimitError: - logger.opt(colors=True).info("[BeatmapsetFetcher] Prefetch stopped due to rate limit") + logger.opt(colors=True).info("Prefetch stopped due to rate limit") except Exception as e: - logger.opt(colors=True).warning(f"[BeatmapsetFetcher] Prefetch failed: {e}") + logger.opt(colors=True).warning(f"Prefetch failed: {e}") async def warmup_homepage_cache(self, redis_client: redis.Redis) -> None: """预热主页缓存""" homepage_queries = self._get_homepage_queries() - logger.opt(colors=True).info( - f"[BeatmapsetFetcher] Starting homepage cache warmup ({len(homepage_queries)} queries)" - ) + logger.opt(colors=True).info(f"Starting homepage cache warmup ({len(homepage_queries)} queries)") for i, (query, cursor) in enumerate(homepage_queries): try: @@ -309,9 +302,7 @@ class BeatmapsetFetcher(BaseFetcher): # 检查是否已经缓存 if await redis_client.exists(cache_key): - logger.opt(colors=True).debug( - f"[BeatmapsetFetcher] Query {query.sort} already cached" - ) + logger.opt(colors=True).debug(f"Query {query.sort} already cached") continue # 请求并缓存 @@ -334,24 +325,15 @@ class BeatmapsetFetcher(BaseFetcher): ex=cache_ttl, ) - logger.opt(colors=True).info( - f"[BeatmapsetFetcher] Warmed up cache for {query.sort} (TTL: {cache_ttl}s)" - ) + logger.opt(colors=True).info(f"Warmed up cache for {query.sort} (TTL: {cache_ttl}s)") if api_response.get("cursor"): try: await self.prefetch_next_pages(query, api_response["cursor"], redis_client, pages=2) except RateLimitError: - logger.opt(colors=True).info( - f"[BeatmapsetFetcher] Warmup prefetch " - f"skipped for {query.sort} due to rate limit" - ) + logger.opt(colors=True).info(f"Warmup prefetch skipped for {query.sort} due to rate limit") except RateLimitError: - logger.opt(colors=True).warning( - f"[BeatmapsetFetcher] Warmup skipped for {query.sort} due to rate limit" - ) + logger.opt(colors=True).warning(f"Warmup skipped for {query.sort} due to rate limit") except Exception as e: - logger.opt(colors=True).error( - f"[BeatmapsetFetcher] Failed to warmup cache for {query.sort}: {e}" - ) + logger.opt(colors=True).error(f"Failed to warmup cache for {query.sort}: {e}") diff --git a/app/log.py b/app/log.py index 9186f49..57b8eb0 100644 --- a/app/log.py +++ b/app/log.py @@ -5,6 +5,7 @@ import inspect import logging import re from sys import stdout +from types import FunctionType from typing import TYPE_CHECKING from app.config import settings @@ -107,11 +108,67 @@ class InterceptHandler(logging.Handler): return message +def get_caller_class_name(module_prefix: str = ""): + """获取调用类名/模块名,仅对指定模块前缀生效""" + stack = inspect.stack() + for frame_info in stack[2:]: + module = frame_info.frame.f_globals.get("__name__", "") + if module_prefix and not module.startswith(module_prefix): + continue + + local_vars = frame_info.frame.f_locals + # 实例方法 + if "self" in local_vars: + return local_vars["self"].__class__.__name__ + # 类方法 + if "cls" in local_vars: + return local_vars["cls"].__name__ + + # 静态方法 / 普通函数 -> 尝试通过函数名匹配类 + func_name = frame_info.function + for obj_name, obj in frame_info.frame.f_globals.items(): + if isinstance(obj, type): # 遍历模块内类 + cls = obj + attr = getattr(cls, func_name, None) + if isinstance(attr, (staticmethod, classmethod, FunctionType)): + return cls.__name__ + + # 如果没找到类,返回模块名 + return module + return None + + +def service_logger(name: str) -> Logger: + return logger.bind(service=name) + + +def fetcher_logger(name: str) -> Logger: + return logger.bind(fetcher=name) + + +def dynamic_format(record): + prefix = "" + + fetcher = record["extra"].get("fetcher") + if not fetcher: + fetcher = get_caller_class_name("app.fetcher") + if fetcher: + prefix = f"[{fetcher}] " + + service = record["extra"].get("service") + if not service: + service = get_caller_class_name("app.service") + if service: + prefix = f"[{service}] " + + return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {prefix}{{message}}\n" + + logger.remove() logger.add( stdout, colorize=True, - format=("{time:YYYY-MM-DD HH:mm:ss} [{level}] | {message}"), + format=dynamic_format, level=settings.log_level, diagnose=settings.debug, ) diff --git a/app/service/audio_proxy_service.py b/app/service/audio_proxy_service.py index 0209997..5ff77d9 100644 --- a/app/service/audio_proxy_service.py +++ b/app/service/audio_proxy_service.py @@ -5,9 +5,10 @@ from __future__ import annotations +from app.log import logger + from fastapi import HTTPException import httpx -from loguru import logger import redis.asyncio as redis diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py index 5d80947..16199d5 100644 --- a/app/service/beatmapset_update_service.py +++ b/app/service/beatmapset_update_service.py @@ -136,9 +136,7 @@ class BeatmapsetUpdateService: status = BeatmapRankStatus(beatmapset.ranked) if status.has_pp() or status == BeatmapRankStatus.LOVED: return False - logger.opt(colors=True).debug( - f"[BeatmapsetUpdateService] added missing beatmapset {beatmapset_id} " - ) + logger.debug(f"added missing beatmapset {beatmapset_id} ") return True async def add_missing_beatmapsets(self): @@ -167,11 +165,9 @@ class BeatmapsetUpdateService: if await self.add_missing_beatmapset(missing): total += 1 except Exception as e: - logger.opt(colors=True).error( - f"[BeatmapsetUpdateService] failed to add missing beatmapset {missing}: {e}" - ) + logger.error(f"failed to add missing beatmapset {missing}: {e}") if total > 0: - logger.opt(colors=True).info(f"[BeatmapsetUpdateService] added {total} missing beatmapset") + logger.info(f"added {total} missing beatmapset") self._adding_missing = False async def add(self, beatmapset: BeatmapsetResp): @@ -211,23 +207,17 @@ class BeatmapsetUpdateService: processing = ProcessingBeatmapset(beatmapset, sync_record) next_time_delta = processing.calculate_next_sync_time() if not next_time_delta: - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{beatmapset.id}] " - "beatmapset has transformed to ranked or loved," - " removing from sync list" - ) + logger.info(f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list") await session.delete(sync_record) await session.commit() return sync_record.next_sync_time = utcnow() + next_time_delta - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{beatmapset.id}] next sync at {sync_record.next_sync_time}" - ) + logger.info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}") await session.commit() async def _update_beatmaps(self): async with with_db() as session: - logger.opt(colors=True).info("[BeatmapsetUpdateService] checking for beatmapset updates...") + logger.info("checking for beatmapset updates...") now = utcnow() records = await session.exec( select(BeatmapSync) @@ -235,22 +225,17 @@ class BeatmapsetUpdateService: .order_by(col(BeatmapSync.next_sync_time).desc()) ) for record in records: - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{record.beatmapset_id}] syncing..." - ) + logger.info(f"[{record.beatmapset_id}] syncing...") try: beatmapset = await self.fetcher.get_beatmapset(record.beatmapset_id) except Exception as e: if isinstance(e, HTTPError): - logger.opt(colors=True).warning( - f"[BeatmapsetUpdateService] [{record.beatmapset_id}] " + logger.warning( + f"[{record.beatmapset_id}] " f"failed to fetch beatmapset: [{e.__class__.__name__}] {e}, retrying later" ) else: - logger.opt(colors=True).exception( - f"[BeatmapsetUpdateService] [{record.beatmapset_id}] " - f"unexpected error: {e}, retrying later" - ) + logger.exception(f"[{record.beatmapset_id}] unexpected error: {e}, retrying later") record.next_sync_time = utcnow() + timedelta(seconds=MIN_DELTA) continue processing = ProcessingBeatmapset(beatmapset, record) @@ -282,18 +267,15 @@ class BeatmapsetUpdateService: next_time_delta = processing.calculate_next_sync_time() if not next_time_delta: - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{record.beatmapset_id}] beatmapset " + logger.info( + f"[{record.beatmapset_id}] beatmapset " "has transformed to ranked or loved," " removing from sync list" ) await session.delete(record) else: record.next_sync_time = utcnow() + next_time_delta - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{record.beatmapset_id}] " - f"next sync at {record.next_sync_time}" - ) + logger.info(f"[{record.beatmapset_id}] next sync at {record.next_sync_time}") await session.commit() async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp): @@ -323,9 +305,7 @@ class BeatmapsetUpdateService: await score.ranked_score.delete(session) total += 1 if total > 0: - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [beatmap: {beatmap_id}] processed {total} old scores" - ) + logger.info(f"[beatmap: {beatmap_id}] processed {total} old scores") await session.commit() for change in changed: @@ -333,28 +313,17 @@ class BeatmapsetUpdateService: try: beatmap = await self.fetcher.get_beatmap(change.beatmap_id) except Exception as e: - logger.opt(colors=True).error( - f"[BeatmapsetUpdateService] [beatmap: {change.beatmap_id}] " - f"failed to fetch added beatmap: {e}, skipping" - ) + logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping") continue - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{beatmap.beatmapset_id}] adding beatmap {beatmap.id}" - ) + logger.info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}") await Beatmap.from_resp_no_save(session, beatmap) else: try: beatmap = await self.fetcher.get_beatmap(change.beatmap_id) except Exception as e: - logger.opt(colors=True).error( - f"[BeatmapsetUpdateService] [beatmap: {change.beatmap_id}] " - f"failed to fetch changed beatmap: {e}, skipping" - ) + logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping") continue - logger.opt(colors=True).info( - f"[BeatmapsetUpdateService] [{beatmap.beatmapset_id}] processing beatmap " - f"{beatmap.id} change {change.type}" - ) + logger.info(f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} change {change.type}") new_db_beatmap = await Beatmap.from_resp_no_save(session, beatmap) existing_beatmap = await session.get(Beatmap, change.beatmap_id) if existing_beatmap: diff --git a/app/service/verification_service.py b/app/service/verification_service.py index 053486a..778dc02 100644 --- a/app/service/verification_service.py +++ b/app/service/verification_service.py @@ -430,7 +430,7 @@ class LoginSessionService: await db.commit() await db.refresh(session) - logger.info(f"[Login Session] Created session for user {user_id} (new device: {is_new_device})") + logger.info(f"Created session for user {user_id} (new device: {is_new_device})") return session @classmethod @@ -562,7 +562,7 @@ class LoginSessionService: session.device_id = device_info.id if sessions: - logger.info(f"[Login Session] Marked {len(sessions)} session(s) as verified for user {user_id}") + logger.info(f"Marked {len(sessions)} session(s) as verified for user {user_id}") await LoginSessionService.clear_login_method(user_id, token_id, redis) await db.commit() @@ -570,7 +570,7 @@ class LoginSessionService: return len(sessions) > 0 except Exception as e: - logger.error(f"[Login Session] Exception during marking sessions as verified: {e}") + logger.error(f"Exception during marking sessions as verified: {e}") return False @staticmethod From fce88272b5e80be2ac6c3a740032acacbb66a689 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 10:15:22 +0000 Subject: [PATCH 04/26] refactor(task): move schedulers and startup/shutdown task into `tasks` directory --- app/database/beatmapset.py | 17 +- app/helpers/geoip_helper.py | 14 +- app/log.py | 21 +- app/router/v2/cache.py | 4 +- app/scheduler/__init__.py | 7 - app/scheduler/cache_scheduler.py | 190 ------------- app/scheduler/user_cache_scheduler.py | 124 --------- app/service/__init__.py | 6 - app/service/beatmapset_update_service.py | 15 +- app/service/database_cleanup_service.py | 17 -- app/service/email_queue.py | 2 +- app/service/geoip_scheduler.py | 55 ---- app/service/init_geoip.py | 30 --- app/tasks/__init__.py | 28 ++ app/tasks/beatmapset_update.py | 21 ++ app/tasks/cache.py | 254 ++++++++++++++++++ .../calculate_all_user_rank.py | 15 ++ app/{service => tasks}/create_banchobot.py | 2 + app/{service => tasks}/daily_challenge.py | 3 +- app/tasks/database_cleanup.py | 21 ++ app/tasks/geoip.py | 57 ++++ app/{service => tasks}/load_achievements.py | 0 app/{service => tasks}/osu_rx_statistics.py | 13 + .../recalculate_banned_beatmap.py | 0 .../recalculate_failed_score.py | 0 main.py | 28 +- 26 files changed, 464 insertions(+), 480 deletions(-) delete mode 100644 app/scheduler/__init__.py delete mode 100644 app/scheduler/cache_scheduler.py delete mode 100644 app/scheduler/user_cache_scheduler.py delete mode 100644 app/service/geoip_scheduler.py delete mode 100644 app/service/init_geoip.py create mode 100644 app/tasks/__init__.py create mode 100644 app/tasks/beatmapset_update.py create mode 100644 app/tasks/cache.py rename app/{service => tasks}/calculate_all_user_rank.py (82%) rename app/{service => tasks}/create_banchobot.py (92%) rename app/{service => tasks}/daily_challenge.py (99%) create mode 100644 app/tasks/database_cleanup.py create mode 100644 app/tasks/geoip.py rename app/{service => tasks}/load_achievements.py (100%) rename app/{service => tasks}/osu_rx_statistics.py (78%) rename app/{service => tasks}/recalculate_banned_beatmap.py (100%) rename app/{service => tasks}/recalculate_failed_score.py (100%) diff --git a/app/database/beatmapset.py b/app/database/beatmapset.py index fa9abf4..d2df25e 100644 --- a/app/database/beatmapset.py +++ b/app/database/beatmapset.py @@ -132,21 +132,20 @@ class Beatmapset(AsyncAttrs, BeatmapsetBase, table=True): @classmethod async def from_resp_no_save(cls, session: AsyncSession, resp: "BeatmapsetResp", from_: int = 0) -> "Beatmapset": d = resp.model_dump() - update = {} if resp.nominations: - update["nominations_required"] = resp.nominations.required - update["nominations_current"] = resp.nominations.current + d["nominations_required"] = resp.nominations.required + d["nominations_current"] = resp.nominations.current if resp.hype: - update["hype_current"] = resp.hype.current - update["hype_required"] = resp.hype.required + d["hype_current"] = resp.hype.current + d["hype_required"] = resp.hype.required if resp.genre_id: - update["beatmap_genre"] = Genre(resp.genre_id) + d["beatmap_genre"] = Genre(resp.genre_id) elif resp.genre: - update["beatmap_genre"] = Genre(resp.genre.id) + d["beatmap_genre"] = Genre(resp.genre.id) if resp.language_id: - update["beatmap_language"] = Language(resp.language_id) + d["beatmap_language"] = Language(resp.language_id) elif resp.language: - update["beatmap_language"] = Language(resp.language.id) + d["beatmap_language"] = Language(resp.language.id) beatmapset = Beatmapset.model_validate( { **d, diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py index 5be89c6..c0b822e 100644 --- a/app/helpers/geoip_helper.py +++ b/app/helpers/geoip_helper.py @@ -116,22 +116,20 @@ class GeoIPHelper: if age_days >= self.max_age_days: need = True logger.info( - f"[GeoIP] {eid} database is {age_days:.1f} days old " + f"{eid} database is {age_days:.1f} days old " f"(max: {self.max_age_days}), will download new version" ) else: - logger.info( - f"[GeoIP] {eid} database is {age_days:.1f} days old, still fresh (max: {self.max_age_days})" - ) + logger.info(f"{eid} database is {age_days:.1f} days old, still fresh (max: {self.max_age_days})") else: - logger.info(f"[GeoIP] {eid} database not found, will download") + logger.info(f"{eid} database not found, will download") if need: - logger.info(f"[GeoIP] Downloading {eid} database...") + logger.info(f"Downloading {eid} database...") path = self._download_and_extract(eid) - logger.info(f"[GeoIP] {eid} database downloaded successfully") + logger.info(f"{eid} database downloaded successfully") else: - logger.info(f"[GeoIP] Using existing {eid} database") + logger.info(f"Using existing {eid} database") old = self._readers.get(ed) if old: diff --git a/app/log.py b/app/log.py index 57b8eb0..6a8c478 100644 --- a/app/log.py +++ b/app/log.py @@ -9,6 +9,7 @@ from types import FunctionType from typing import TYPE_CHECKING from app.config import settings +from app.utils import snake_to_pascal import loguru @@ -108,7 +109,7 @@ class InterceptHandler(logging.Handler): return message -def get_caller_class_name(module_prefix: str = ""): +def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) -> str | None: """获取调用类名/模块名,仅对指定模块前缀生效""" stack = inspect.stack() for frame_info in stack[2:]: @@ -134,6 +135,8 @@ def get_caller_class_name(module_prefix: str = ""): return cls.__name__ # 如果没找到类,返回模块名 + if just_last_part: + return module.rsplit(".", 1)[-1] return module return None @@ -146,6 +149,14 @@ def fetcher_logger(name: str) -> Logger: return logger.bind(fetcher=name) +def task_logger(name: str) -> Logger: + return logger.bind(task=name) + + +def system_logger(name: str) -> Logger: + return logger.bind(system=name) + + def dynamic_format(record): prefix = "" @@ -161,6 +172,13 @@ def dynamic_format(record): if service: prefix = f"[{service}] " + task = record["extra"].get("task") + if not task: + task = get_caller_class_name("app.tasks") + if task: + task = snake_to_pascal(task) + prefix = f"[{task}] " + return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {prefix}{{message}}\n" @@ -197,3 +215,4 @@ for logger_name in uvicorn_loggers: uvicorn_logger.propagate = False logging.getLogger("httpx").setLevel("WARNING") +logging.getLogger("apscheduler").setLevel("WARNING") diff --git a/app/router/v2/cache.py b/app/router/v2/cache.py index fe610a6..0b1a396 100644 --- a/app/router/v2/cache.py +++ b/app/router/v2/cache.py @@ -133,9 +133,7 @@ async def warmup_cache( return {"message": f"Warmed up cache for {len(request.user_ids)} users"} else: # 预热活跃用户 - from app.scheduler.user_cache_scheduler import ( - schedule_user_cache_preload_task, - ) + from app.tasks.cache import schedule_user_cache_preload_task await schedule_user_cache_preload_task() return {"message": f"Warmed up cache for top {request.limit} active users"} diff --git a/app/scheduler/__init__.py b/app/scheduler/__init__.py deleted file mode 100644 index d6e4f7c..0000000 --- a/app/scheduler/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""缓存调度器模块""" - -from __future__ import annotations - -from .cache_scheduler import start_cache_scheduler, stop_cache_scheduler - -__all__ = ["start_cache_scheduler", "stop_cache_scheduler"] diff --git a/app/scheduler/cache_scheduler.py b/app/scheduler/cache_scheduler.py deleted file mode 100644 index 9a36ddb..0000000 --- a/app/scheduler/cache_scheduler.py +++ /dev/null @@ -1,190 +0,0 @@ -from __future__ import annotations - -import asyncio - -from app.config import settings -from app.dependencies.database import get_redis -from app.dependencies.fetcher import get_fetcher -from app.log import logger -from app.scheduler.user_cache_scheduler import ( - schedule_user_cache_cleanup_task, - schedule_user_cache_preload_task, - schedule_user_cache_warmup_task, -) - - -class CacheScheduler: - """缓存调度器 - 统一管理各种缓存任务""" - - def __init__(self): - self.running = False - self.task = None - - async def start(self): - """启动调度器""" - if self.running: - return - - self.running = True - self.task = asyncio.create_task(self._run_scheduler()) - logger.info("CacheScheduler started") - - async def stop(self): - """停止调度器""" - self.running = False - if self.task: - self.task.cancel() - try: - await self.task - except asyncio.CancelledError: - pass - logger.info("CacheScheduler stopped") - - async def _run_scheduler(self): - """运行调度器主循环""" - # 启动时立即执行一次预热 - await self._warmup_cache() - - # 启动时执行一次排行榜缓存刷新 - await self._refresh_ranking_cache() - - # 启动时执行一次用户缓存预热 - await self._warmup_user_cache() - - beatmap_cache_counter = 0 - ranking_cache_counter = 0 - user_cache_counter = 0 - user_cleanup_counter = 0 - - # 从配置文件获取间隔设置 - check_interval = 5 * 60 # 5分钟检查间隔 - beatmap_cache_interval = 30 * 60 # 30分钟beatmap缓存间隔 - ranking_cache_interval = settings.ranking_cache_refresh_interval_minutes * 60 # 从配置读取 - user_cache_interval = 15 * 60 # 15分钟用户缓存预加载间隔 - user_cleanup_interval = 60 * 60 # 60分钟用户缓存清理间隔 - - beatmap_cache_cycles = beatmap_cache_interval // check_interval - ranking_cache_cycles = ranking_cache_interval // check_interval - user_cache_cycles = user_cache_interval // check_interval - user_cleanup_cycles = user_cleanup_interval // check_interval - - while self.running: - try: - # 每5分钟检查一次 - await asyncio.sleep(check_interval) - - if not self.running: - break - - beatmap_cache_counter += 1 - ranking_cache_counter += 1 - user_cache_counter += 1 - user_cleanup_counter += 1 - - # beatmap缓存预热 - if beatmap_cache_counter >= beatmap_cache_cycles: - await self._warmup_cache() - beatmap_cache_counter = 0 - - # 排行榜缓存刷新 - if ranking_cache_counter >= ranking_cache_cycles: - await self._refresh_ranking_cache() - ranking_cache_counter = 0 - - # 用户缓存预加载 - if user_cache_counter >= user_cache_cycles: - await self._preload_user_cache() - user_cache_counter = 0 - - # 用户缓存清理 - if user_cleanup_counter >= user_cleanup_cycles: - await self._cleanup_user_cache() - user_cleanup_counter = 0 - - except asyncio.CancelledError: - break - except Exception as e: - logger.error(f"Cache scheduler error: {e}") - await asyncio.sleep(60) # 出错后等待1分钟再继续 - - async def _warmup_cache(self): - """执行缓存预热""" - try: - logger.info("Starting beatmap cache warmup...") - - fetcher = await get_fetcher() - redis = get_redis() - - # 预热主页缓存 - await fetcher.warmup_homepage_cache(redis) - - logger.info("Beatmap cache warmup completed successfully") - - except Exception as e: - logger.error(f"Beatmap cache warmup failed: {e}") - - async def _refresh_ranking_cache(self): - """刷新排行榜缓存""" - try: - logger.info("Starting ranking cache refresh...") - - redis = get_redis() - - # 导入排行榜缓存服务 - # 使用独立的数据库会话 - from app.dependencies.database import with_db - from app.service.ranking_cache_service import ( - schedule_ranking_refresh_task, - ) - - async with with_db() as session: - await schedule_ranking_refresh_task(session, redis) - - logger.info("Ranking cache refresh completed successfully") - - except Exception as e: - logger.error(f"Ranking cache refresh failed: {e}") - - async def _warmup_user_cache(self): - """用户缓存预热""" - try: - await schedule_user_cache_warmup_task() - except Exception as e: - logger.error(f"User cache warmup failed: {e}") - - async def _preload_user_cache(self): - """用户缓存预加载""" - try: - await schedule_user_cache_preload_task() - except Exception as e: - logger.error(f"User cache preload failed: {e}") - - async def _cleanup_user_cache(self): - """用户缓存清理""" - try: - await schedule_user_cache_cleanup_task() - except Exception as e: - logger.error(f"User cache cleanup failed: {e}") - - -# Beatmap缓存调度器(保持向后兼容) -class BeatmapsetCacheScheduler(CacheScheduler): - """谱面集缓存调度器 - 为了向后兼容""" - - pass - - -# 全局调度器实例 -cache_scheduler = CacheScheduler() -# 保持向后兼容的别名 -beatmapset_cache_scheduler = BeatmapsetCacheScheduler() - - -async def start_cache_scheduler(): - """启动缓存调度器""" - await cache_scheduler.start() - - -async def stop_cache_scheduler(): - """停止缓存调度器""" - await cache_scheduler.stop() diff --git a/app/scheduler/user_cache_scheduler.py b/app/scheduler/user_cache_scheduler.py deleted file mode 100644 index 0589daf..0000000 --- a/app/scheduler/user_cache_scheduler.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -用户缓存预热任务调度器 -""" - -from __future__ import annotations - -import asyncio -from datetime import timedelta - -from app.config import settings -from app.database.score import Score -from app.dependencies.database import get_redis -from app.log import logger -from app.service.user_cache_service import get_user_cache_service -from app.utils import utcnow - -from sqlmodel import col, func, select - - -async def schedule_user_cache_preload_task(): - """定时用户缓存预加载任务""" - # 默认启用用户缓存预加载,除非明确禁用 - enable_user_cache_preload = getattr(settings, "enable_user_cache_preload", True) - if not enable_user_cache_preload: - return - - try: - logger.info("Starting user cache preload task...") - - redis = get_redis() - cache_service = get_user_cache_service(redis) - - # 使用独立的数据库会话 - from app.dependencies.database import with_db - - async with with_db() as session: - # 获取最近24小时内活跃的用户(提交过成绩的用户) - recent_time = utcnow() - timedelta(hours=24) - - score_count = func.count().label("score_count") - active_user_ids = ( - await session.exec( - select(Score.user_id, score_count) - .where(col(Score.ended_at) >= recent_time) - .group_by(col(Score.user_id)) - .order_by(score_count.desc()) # 使用标签对象而不是字符串 - .limit(settings.user_cache_max_preload_users) # 使用配置中的限制 - ) - ).all() - - if active_user_ids: - user_ids = [row[0] for row in active_user_ids] - await cache_service.preload_user_cache(session, user_ids) - logger.info(f"Preloaded cache for {len(user_ids)} active users") - else: - logger.info("No active users found for cache preload") - - logger.info("User cache preload task completed successfully") - - except Exception as e: - logger.error(f"User cache preload task failed: {e}") - - -async def schedule_user_cache_warmup_task(): - """定时用户缓存预热任务 - 预加载排行榜前100用户""" - try: - logger.info("Starting user cache warmup task...") - - redis = get_redis() - cache_service = get_user_cache_service(redis) - - # 使用独立的数据库会话 - from app.dependencies.database import with_db - - async with with_db() as session: - # 获取全球排行榜前100的用户 - from app.database.statistics import UserStatistics - from app.models.score import GameMode - - for mode in GameMode: - try: - top_users = ( - await session.exec( - select(UserStatistics.user_id) - .where(UserStatistics.mode == mode) - .order_by(col(UserStatistics.pp).desc()) - .limit(100) - ) - ).all() - - if top_users: - user_ids = list(top_users) - await cache_service.preload_user_cache(session, user_ids) - logger.info(f"Warmed cache for top 100 users in {mode}") - - # 避免过载,稍微延迟 - await asyncio.sleep(1) - - except Exception as e: - logger.error(f"Failed to warm cache for {mode}: {e}") - continue - - logger.info("User cache warmup task completed successfully") - - except Exception as e: - logger.error(f"User cache warmup task failed: {e}") - - -async def schedule_user_cache_cleanup_task(): - """定时用户缓存清理任务""" - try: - logger.info("Starting user cache cleanup task...") - - redis = get_redis() - - # 清理过期的用户缓存(Redis会自动处理TTL,这里主要记录统计信息) - cache_service = get_user_cache_service(redis) - stats = await cache_service.get_cache_stats() - - logger.info(f"User cache stats: {stats}") - logger.info("User cache cleanup task completed successfully") - - except Exception as e: - logger.error(f"User cache cleanup task failed: {e}") diff --git a/app/service/__init__.py b/app/service/__init__.py index ced3b75..8ddddd9 100644 --- a/app/service/__init__.py +++ b/app/service/__init__.py @@ -1,14 +1,8 @@ from __future__ import annotations -from .daily_challenge import create_daily_challenge_room -from .recalculate_banned_beatmap import recalculate_banned_beatmap -from .recalculate_failed_score import recalculate_failed_score from .room import create_playlist_room, create_playlist_room_from_api __all__ = [ - "create_daily_challenge_room", "create_playlist_room", "create_playlist_room_from_api", - "recalculate_banned_beatmap", - "recalculate_failed_score", ] diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py index 16199d5..a03f91b 100644 --- a/app/service/beatmapset_update_service.py +++ b/app/service/beatmapset_update_service.py @@ -1,6 +1,6 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import timedelta from enum import Enum import math import random @@ -12,7 +12,6 @@ from app.database.beatmap_sync import BeatmapSync, SavedBeatmapMeta from app.database.beatmapset import Beatmapset, BeatmapsetResp from app.database.score import Score from app.dependencies.database import with_db -from app.dependencies.scheduler import get_scheduler from app.dependencies.storage import get_storage_service from app.log import logger from app.models.beatmap import BeatmapRankStatus @@ -347,15 +346,3 @@ def init_beatmapset_update_service(fetcher: "Fetcher") -> BeatmapsetUpdateServic def get_beatmapset_update_service() -> BeatmapsetUpdateService: assert service is not None, "BeatmapsetUpdateService is not initialized" return service - - -@get_scheduler().scheduled_job( - "interval", - id="update_beatmaps", - minutes=SCHEDULER_INTERVAL_MINUTES, - next_run_time=datetime.now() + timedelta(minutes=1), -) -async def beatmapset_update_job(): - if service is not None: - bg_tasks.add_task(service.add_missing_beatmapsets) - await service._update_beatmaps() diff --git a/app/service/database_cleanup_service.py b/app/service/database_cleanup_service.py index 3acac4a..c8ffd0f 100644 --- a/app/service/database_cleanup_service.py +++ b/app/service/database_cleanup_service.py @@ -8,8 +8,6 @@ from datetime import timedelta from app.database.auth import OAuthToken from app.database.verification import EmailVerification, LoginSession, TrustedDevice -from app.dependencies.database import with_db -from app.dependencies.scheduler import get_scheduler from app.log import logger from app.utils import utcnow @@ -434,18 +432,3 @@ class DatabaseCleanupService: "outdated_trusted_devices": 0, "total_cleanable": 0, } - - -@get_scheduler().scheduled_job( - "interval", - id="cleanup_database", - hours=1, -) -async def scheduled_cleanup_job(): - async with with_db() as session: - logger.debug("Starting database cleanup...") - results = await DatabaseCleanupService.run_full_cleanup(session) - total = sum(results.values()) - if total > 0: - logger.debug(f"Cleanup completed, total records cleaned: {total}") - return results diff --git a/app/service/email_queue.py b/app/service/email_queue.py index 9c3ec62..7a07291 100644 --- a/app/service/email_queue.py +++ b/app/service/email_queue.py @@ -17,7 +17,7 @@ import uuid from app.config import settings from app.log import logger -from app.utils import bg_tasks # 添加同步Redis导入 +from app.utils import bg_tasks import redis as sync_redis diff --git a/app/service/geoip_scheduler.py b/app/service/geoip_scheduler.py deleted file mode 100644 index 1169496..0000000 --- a/app/service/geoip_scheduler.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -[GeoIP] Scheduled Update Service -Periodically update the MaxMind GeoIP database -""" - -from __future__ import annotations - -import asyncio - -from app.config import settings -from app.dependencies.geoip import get_geoip_helper -from app.dependencies.scheduler import get_scheduler -from app.log import logger - - -async def update_geoip_database(): - """ - Asynchronous task to update the GeoIP database - """ - try: - logger.info("[GeoIP] Starting scheduled GeoIP database update...") - geoip = get_geoip_helper() - - # Run the synchronous update method in a background thread - loop = asyncio.get_event_loop() - await loop.run_in_executor(None, lambda: geoip.update(force=False)) - - logger.info("[GeoIP] Scheduled GeoIP database update completed successfully") - except Exception as e: - logger.error(f"[GeoIP] Scheduled GeoIP database update failed: {e}") - - -def schedule_geoip_updates(): - """ - Schedule the GeoIP database update task - """ - scheduler = get_scheduler() - - # Use settings to configure the update time: update once a week - scheduler.add_job( - update_geoip_database, - "cron", - day_of_week=settings.geoip_update_day, - hour=settings.geoip_update_hour, - minute=0, - id="geoip_weekly_update", - name="Weekly GeoIP database update", - replace_existing=True, - ) - - logger.info( - f"[GeoIP] Scheduled update task registered: " - f"every week on day {settings.geoip_update_day} " - f"at {settings.geoip_update_hour}:00" - ) diff --git a/app/service/init_geoip.py b/app/service/init_geoip.py deleted file mode 100644 index 95a2edf..0000000 --- a/app/service/init_geoip.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -[GeoIP] Initialization Service -Initialize the GeoIP database when the application starts -""" - -from __future__ import annotations - -import asyncio - -from app.dependencies.geoip import get_geoip_helper -from app.log import logger - - -async def init_geoip(): - """ - Asynchronously initialize the GeoIP database - """ - try: - geoip = get_geoip_helper() - logger.info("[GeoIP] Initializing GeoIP database...") - - # Run the synchronous update method in a background thread - # force=False means only download if files don't exist or are expired - loop = asyncio.get_event_loop() - await loop.run_in_executor(None, lambda: geoip.update(force=False)) - - logger.info("[GeoIP] GeoIP database initialization completed") - except Exception as e: - logger.error(f"[GeoIP] GeoIP database initialization failed: {e}") - # Do not raise an exception to avoid blocking application startup diff --git a/app/tasks/__init__.py b/app/tasks/__init__.py new file mode 100644 index 0000000..6b3332a --- /dev/null +++ b/app/tasks/__init__.py @@ -0,0 +1,28 @@ +# ruff: noqa: F401 +from __future__ import annotations + +from . import ( + beatmapset_update, + database_cleanup, + recalculate_banned_beatmap, + recalculate_failed_score, +) +from .cache import start_cache_tasks, stop_cache_tasks +from .calculate_all_user_rank import calculate_user_rank +from .create_banchobot import create_banchobot +from .daily_challenge import daily_challenge_job, process_daily_challenge_top +from .geoip import init_geoip +from .load_achievements import load_achievements +from .osu_rx_statistics import create_rx_statistics + +__all__ = [ + "calculate_user_rank", + "create_banchobot", + "create_rx_statistics", + "daily_challenge_job", + "init_geoip", + "load_achievements", + "process_daily_challenge_top", + "start_cache_tasks", + "stop_cache_tasks", +] diff --git a/app/tasks/beatmapset_update.py b/app/tasks/beatmapset_update.py new file mode 100644 index 0000000..4e1492e --- /dev/null +++ b/app/tasks/beatmapset_update.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from datetime import datetime, timedelta + +from app.dependencies.scheduler import get_scheduler +from app.service.beatmapset_update_service import service +from app.utils import bg_tasks + +SCHEDULER_INTERVAL_MINUTES = 2 + + +@get_scheduler().scheduled_job( + "interval", + id="update_beatmaps", + minutes=SCHEDULER_INTERVAL_MINUTES, + next_run_time=datetime.now() + timedelta(minutes=1), +) +async def beatmapset_update_job(): + if service is not None: + bg_tasks.add_task(service.add_missing_beatmapsets) + await service._update_beatmaps() diff --git a/app/tasks/cache.py b/app/tasks/cache.py new file mode 100644 index 0000000..4a684f6 --- /dev/null +++ b/app/tasks/cache.py @@ -0,0 +1,254 @@ +"""缓存相关的 APScheduler 任务入口。""" + +from __future__ import annotations + +import asyncio +from datetime import UTC, timedelta +from typing import Final + +from app.config import settings +from app.database.score import Score +from app.dependencies.database import get_redis +from app.dependencies.fetcher import get_fetcher +from app.dependencies.scheduler import get_scheduler +from app.log import logger +from app.service.ranking_cache_service import schedule_ranking_refresh_task +from app.service.user_cache_service import get_user_cache_service +from app.utils import utcnow + +from apscheduler.jobstores.base import JobLookupError +from apscheduler.triggers.interval import IntervalTrigger +from sqlmodel import col, func, select + +CACHE_JOB_IDS: Final[dict[str, str]] = { + "beatmap_warmup": "cache:beatmap:warmup", + "ranking_refresh": "cache:ranking:refresh", + "user_preload": "cache:user:preload", + "user_cleanup": "cache:user:cleanup", +} + + +async def warmup_cache() -> None: + """执行缓存预热""" + try: + logger.info("Starting beatmap cache warmup...") + + fetcher = await get_fetcher() + redis = get_redis() + + await fetcher.warmup_homepage_cache(redis) + + logger.info("Beatmap cache warmup completed successfully") + + except Exception as e: + logger.error("Beatmap cache warmup failed: %s", e) + + +async def refresh_ranking_cache() -> None: + """刷新排行榜缓存""" + try: + logger.info("Starting ranking cache refresh...") + + redis = get_redis() + + from app.dependencies.database import with_db + + async with with_db() as session: + await schedule_ranking_refresh_task(session, redis) + + logger.info("Ranking cache refresh completed successfully") + + except Exception as e: + logger.error("Ranking cache refresh failed: %s", e) + + +async def schedule_user_cache_preload_task() -> None: + """定时用户缓存预加载任务""" + enable_user_cache_preload = getattr(settings, "enable_user_cache_preload", True) + if not enable_user_cache_preload: + return + + try: + logger.info("Starting user cache preload task...") + + redis = get_redis() + cache_service = get_user_cache_service(redis) + + from app.dependencies.database import with_db + + async with with_db() as session: + recent_time = utcnow() - timedelta(hours=24) + + score_count = func.count().label("score_count") + active_user_ids = ( + await session.exec( + select(Score.user_id, score_count) + .where(col(Score.ended_at) >= recent_time) + .group_by(col(Score.user_id)) + .order_by(score_count.desc()) + .limit(settings.user_cache_max_preload_users) + ) + ).all() + + if active_user_ids: + user_ids = [row[0] for row in active_user_ids] + await cache_service.preload_user_cache(session, user_ids) + logger.info("Preloaded cache for %s active users", len(user_ids)) + else: + logger.info("No active users found for cache preload") + + logger.info("User cache preload task completed successfully") + + except Exception as e: + logger.error("User cache preload task failed: %s", e) + + +async def schedule_user_cache_warmup_task() -> None: + """定时用户缓存预热任务 - 预加载排行榜前100用户""" + try: + logger.info("Starting user cache warmup task...") + + redis = get_redis() + cache_service = get_user_cache_service(redis) + + from app.dependencies.database import with_db + + async with with_db() as session: + from app.database.statistics import UserStatistics + from app.models.score import GameMode + + for mode in GameMode: + try: + top_users = ( + await session.exec( + select(UserStatistics.user_id) + .where(UserStatistics.mode == mode) + .order_by(col(UserStatistics.pp).desc()) + .limit(100) + ) + ).all() + + if top_users: + user_ids = list(top_users) + await cache_service.preload_user_cache(session, user_ids) + logger.info("Warmed cache for top 100 users in %s", mode) + + await asyncio.sleep(1) + + except Exception as e: + logger.error("Failed to warm cache for %s: %s", mode, e) + continue + + logger.info("User cache warmup task completed successfully") + + except Exception as e: + logger.error("User cache warmup task failed: %s", e) + + +async def schedule_user_cache_cleanup_task() -> None: + """定时用户缓存清理任务""" + try: + logger.info("Starting user cache cleanup task...") + + redis = get_redis() + + cache_service = get_user_cache_service(redis) + stats = await cache_service.get_cache_stats() + + logger.info("User cache stats: %s", stats) + logger.info("User cache cleanup task completed successfully") + + except Exception as e: + logger.error("User cache cleanup task failed: %s", e) + + +async def warmup_user_cache() -> None: + """用户缓存预热""" + try: + await schedule_user_cache_warmup_task() + except Exception as e: + logger.error("User cache warmup failed: %s", e) + + +async def preload_user_cache() -> None: + """用户缓存预加载""" + try: + await schedule_user_cache_preload_task() + except Exception as e: + logger.error("User cache preload failed: %s", e) + + +async def cleanup_user_cache() -> None: + """用户缓存清理""" + try: + await schedule_user_cache_cleanup_task() + except Exception as e: + logger.error("User cache cleanup failed: %s", e) + + +def register_cache_jobs() -> None: + """注册缓存相关 APScheduler 任务""" + scheduler = get_scheduler() + + scheduler.add_job( + warmup_cache, + trigger=IntervalTrigger(minutes=30, timezone=UTC), + id=CACHE_JOB_IDS["beatmap_warmup"], + replace_existing=True, + coalesce=True, + max_instances=1, + misfire_grace_time=300, + ) + + scheduler.add_job( + refresh_ranking_cache, + trigger=IntervalTrigger( + minutes=settings.ranking_cache_refresh_interval_minutes, + timezone=UTC, + ), + id=CACHE_JOB_IDS["ranking_refresh"], + replace_existing=True, + coalesce=True, + max_instances=1, + misfire_grace_time=300, + ) + + scheduler.add_job( + preload_user_cache, + trigger=IntervalTrigger(minutes=15, timezone=UTC), + id=CACHE_JOB_IDS["user_preload"], + replace_existing=True, + coalesce=True, + max_instances=1, + misfire_grace_time=300, + ) + + scheduler.add_job( + cleanup_user_cache, + trigger=IntervalTrigger(hours=1, timezone=UTC), + id=CACHE_JOB_IDS["user_cleanup"], + replace_existing=True, + coalesce=True, + max_instances=1, + misfire_grace_time=300, + ) + + logger.info("Registered cache APScheduler jobs") + + +async def start_cache_tasks() -> None: + """注册 APScheduler 任务并执行启动时任务""" + register_cache_jobs() + logger.info("Cache APScheduler jobs registered; running initial tasks") + + +async def stop_cache_tasks() -> None: + """移除 APScheduler 任务""" + scheduler = get_scheduler() + for job_id in CACHE_JOB_IDS.values(): + try: + scheduler.remove_job(job_id) + except JobLookupError: + continue + + logger.info("Cache APScheduler jobs removed") diff --git a/app/service/calculate_all_user_rank.py b/app/tasks/calculate_all_user_rank.py similarity index 82% rename from app/service/calculate_all_user_rank.py rename to app/tasks/calculate_all_user_rank.py index 1d1395a..f742d25 100644 --- a/app/service/calculate_all_user_rank.py +++ b/app/tasks/calculate_all_user_rank.py @@ -6,6 +6,7 @@ from app.database import RankHistory, UserStatistics from app.database.rank_history import RankTop from app.dependencies.database import with_db from app.dependencies.scheduler import get_scheduler +from app.log import logger from app.models.score import GameMode from app.utils import utcnow @@ -16,8 +17,10 @@ from sqlmodel import col, exists, select, update async def calculate_user_rank(is_today: bool = False): today = utcnow().date() target_date = today if is_today else today - timedelta(days=1) + logger.info("Starting user rank calculation for {}", target_date) async with with_db() as session: for gamemode in GameMode: + logger.info("Calculating ranks for {} on {}", gamemode.name, target_date) users = await session.exec( select(UserStatistics) .where( @@ -31,6 +34,7 @@ async def calculate_user_rank(is_today: bool = False): ) ) rank = 1 + processed_users = 0 for user in users: is_exist = ( await session.exec( @@ -82,4 +86,15 @@ async def calculate_user_rank(is_today: bool = False): rank_top.date = today rank += 1 + processed_users += 1 await session.commit() + if processed_users > 0: + logger.info( + "Updated ranks for {} on {} ({} users)", + gamemode.name, + target_date, + processed_users, + ) + else: + logger.info("No users found for {} on {}", gamemode.name, target_date) + logger.success("User rank calculation completed for {}", target_date) diff --git a/app/service/create_banchobot.py b/app/tasks/create_banchobot.py similarity index 92% rename from app/service/create_banchobot.py rename to app/tasks/create_banchobot.py index 16605c5..6148ff1 100644 --- a/app/service/create_banchobot.py +++ b/app/tasks/create_banchobot.py @@ -4,6 +4,7 @@ from app.const import BANCHOBOT_ID from app.database.statistics import UserStatistics from app.database.user import User from app.dependencies.database import with_db +from app.log import logger from app.models.score import GameMode from sqlmodel import exists, select @@ -27,3 +28,4 @@ async def create_banchobot(): statistics = UserStatistics(user_id=BANCHOBOT_ID, mode=GameMode.OSU) session.add(statistics) await session.commit() + logger.success("BanchoBot user created") diff --git a/app/service/daily_challenge.py b/app/tasks/daily_challenge.py similarity index 99% rename from app/service/daily_challenge.py rename to app/tasks/daily_challenge.py index c0f5aac..e8a7fa3 100644 --- a/app/service/daily_challenge.py +++ b/app/tasks/daily_challenge.py @@ -17,10 +17,9 @@ from app.log import logger from app.models.metadata_hub import DailyChallengeInfo from app.models.mods import APIMod, get_available_mods from app.models.room import RoomCategory +from app.service.room import create_playlist_room from app.utils import are_same_weeks, utcnow -from .room import create_playlist_room - from sqlmodel import col, select diff --git a/app/tasks/database_cleanup.py b/app/tasks/database_cleanup.py new file mode 100644 index 0000000..264e5c1 --- /dev/null +++ b/app/tasks/database_cleanup.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from app.dependencies.database import with_db +from app.dependencies.scheduler import get_scheduler +from app.log import logger +from app.service.database_cleanup_service import DatabaseCleanupService + + +@get_scheduler().scheduled_job( + "interval", + id="cleanup_database", + hours=1, +) +async def scheduled_cleanup_job(): + async with with_db() as session: + logger.info("Starting database cleanup...") + results = await DatabaseCleanupService.run_full_cleanup(session) + total = sum(results.values()) + if total > 0: + logger.success(f"Cleanup completed, total records cleaned: {total}") + return results diff --git a/app/tasks/geoip.py b/app/tasks/geoip.py new file mode 100644 index 0000000..0d22ed8 --- /dev/null +++ b/app/tasks/geoip.py @@ -0,0 +1,57 @@ +""" +Scheduled Update Service +Periodically update the MaxMind GeoIP database +""" + +from __future__ import annotations + +import asyncio + +from app.config import settings +from app.dependencies.geoip import get_geoip_helper +from app.dependencies.scheduler import get_scheduler +from app.log import logger + + +@get_scheduler().scheduled_job( + "cron", + day_of_week=settings.geoip_update_day, + hour=settings.geoip_update_hour, + minute=0, + id="geoip_weekly_update", + name="Weekly GeoIP database update", +) +async def update_geoip_database(): + """ + Asynchronous task to update the GeoIP database + """ + try: + logger.info("Starting scheduled GeoIP database update...") + geoip = get_geoip_helper() + + # Run the synchronous update method in a background thread + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, lambda: geoip.update(force=False)) + + logger.info("Scheduled GeoIP database update completed successfully") + except Exception as e: + logger.error(f"Scheduled GeoIP database update failed: {e}") + + +async def init_geoip(): + """ + Asynchronously initialize the GeoIP database + """ + try: + geoip = get_geoip_helper() + logger.info("Initializing GeoIP database...") + + # Run the synchronous update method in a background thread + # force=False means only download if files don't exist or are expired + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, lambda: geoip.update(force=False)) + + logger.info("GeoIP database initialization completed") + except Exception as e: + logger.error(f"GeoIP database initialization failed: {e}") + # Do not raise an exception to avoid blocking application startup diff --git a/app/service/load_achievements.py b/app/tasks/load_achievements.py similarity index 100% rename from app/service/load_achievements.py rename to app/tasks/load_achievements.py diff --git a/app/service/osu_rx_statistics.py b/app/tasks/osu_rx_statistics.py similarity index 78% rename from app/service/osu_rx_statistics.py rename to app/tasks/osu_rx_statistics.py index ed82189..732d727 100644 --- a/app/service/osu_rx_statistics.py +++ b/app/tasks/osu_rx_statistics.py @@ -5,6 +5,7 @@ from app.const import BANCHOBOT_ID from app.database.statistics import UserStatistics from app.database.user import User from app.dependencies.database import with_db +from app.log import logger from app.models.score import GameMode from sqlalchemy import exists @@ -14,6 +15,10 @@ from sqlmodel import select async def create_rx_statistics(): async with with_db() as session: users = (await session.exec(select(User.id))).all() + total_users = len(users) + logger.info("Ensuring RX/AP statistics exist for %s users", total_users) + rx_created = 0 + ap_created = 0 for i in users: if i == BANCHOBOT_ID: continue @@ -35,6 +40,7 @@ async def create_rx_statistics(): if not is_exist: statistics_rx = UserStatistics(mode=mode, user_id=i) session.add(statistics_rx) + rx_created += 1 if settings.enable_ap: is_exist = ( await session.exec( @@ -47,4 +53,11 @@ async def create_rx_statistics(): if not is_exist: statistics_ap = UserStatistics(mode=GameMode.OSUAP, user_id=i) session.add(statistics_ap) + ap_created += 1 await session.commit() + if rx_created or ap_created: + logger.success( + "Created %s RX statistics rows and %s AP statistics rows during backfill", + rx_created, + ap_created, + ) diff --git a/app/service/recalculate_banned_beatmap.py b/app/tasks/recalculate_banned_beatmap.py similarity index 100% rename from app/service/recalculate_banned_beatmap.py rename to app/tasks/recalculate_banned_beatmap.py diff --git a/app/service/recalculate_failed_score.py b/app/tasks/recalculate_failed_score.py similarity index 100% rename from app/service/recalculate_failed_score.py rename to app/tasks/recalculate_failed_score.py diff --git a/main.py b/main.py index d5c7f29..d74acfa 100644 --- a/main.py +++ b/main.py @@ -24,18 +24,21 @@ from app.router import ( ) from app.router.redirect import redirect_router from app.router.v1 import api_v1_public_router -from app.scheduler.cache_scheduler import start_cache_scheduler, stop_cache_scheduler from app.service.beatmap_download_service import download_service from app.service.beatmapset_update_service import init_beatmapset_update_service -from app.service.calculate_all_user_rank import calculate_user_rank -from app.service.create_banchobot import create_banchobot -from app.service.daily_challenge import daily_challenge_job, process_daily_challenge_top from app.service.email_queue import start_email_processor, stop_email_processor -from app.service.geoip_scheduler import schedule_geoip_updates -from app.service.init_geoip import init_geoip -from app.service.load_achievements import load_achievements -from app.service.osu_rx_statistics import create_rx_statistics from app.service.redis_message_system import redis_message_system +from app.tasks import ( + calculate_user_rank, + create_banchobot, + create_rx_statistics, + daily_challenge_job, + init_geoip, + load_achievements, + process_daily_challenge_top, + start_cache_tasks, + stop_cache_tasks, +) from app.utils import bg_tasks, utcnow from fastapi import FastAPI, HTTPException, Request @@ -56,17 +59,16 @@ async def lifespan(app: FastAPI): await init_geoip() # 初始化 GeoIP 数据库 await create_rx_statistics() await calculate_user_rank(True) - start_scheduler() - schedule_geoip_updates() # 调度 GeoIP 定时更新任务 await daily_challenge_job() await process_daily_challenge_top() await create_banchobot() await start_email_processor() # 启动邮件队列处理器 await download_service.start_health_check() # 启动下载服务健康检查 - await start_cache_scheduler() # 启动缓存调度器 + await start_cache_tasks() # 启动缓存调度器 init_beatmapset_update_service(fetcher) # 初始化谱面集更新服务 redis_message_system.start() # 启动 Redis 消息系统 load_achievements() + start_scheduler() # 显示资源代理状态 if settings.enable_asset_proxy: @@ -75,9 +77,9 @@ async def lifespan(app: FastAPI): # on shutdown yield bg_tasks.stop() - stop_scheduler() redis_message_system.stop() # 停止 Redis 消息系统 - await stop_cache_scheduler() # 停止缓存调度器 + await stop_cache_tasks() # 停止缓存调度器 + stop_scheduler() await download_service.stop_health_check() # 停止下载服务健康检查 await stop_email_processor() # 停止邮件队列处理器 await engine.dispose() From d23f32f08dd0886f0b07600db76a8f6879cad30e Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 11:44:47 +0000 Subject: [PATCH 05/26] refactor(log): refactor the whole project format: {time:YYYY-MM-DD HH:mm:ss} [{level}] | {name} | {message} {name} is: - Uvicorn: log from uvicorn server (#228B22) - Service: log from class of `app.service` (blue) - Fetcher: log from fetchers (magenta) - Task: log from `app.tasks` (#FFD700) - System: log from `system_logger` (red) - Normal: log from `log(name)` (#FFC1C1) - Default: the module name of caller if you are writing services or tasks, you can just call `logger.`, we will pack it with name `Service` or `Task` if you want to print fetcher logs, system-related logs, or normal logs, use `logger = (fetcher_logger / system_logger / log)(name)` --- app/auth.py | 10 ++--- app/calculator.py | 4 +- app/database/score.py | 54 +++++++++++------------ app/dependencies/fetcher.py | 6 ++- app/fetcher/_base.py | 5 ++- app/helpers/geoip_helper.py | 2 +- app/log.py | 55 ++++++++++++++++-------- app/middleware/verify_session.py | 16 ++++--- app/models/mods.py | 4 +- app/models/tags.py | 3 +- app/router/auth.py | 10 ++--- app/router/lio.py | 3 +- app/router/notification/message.py | 5 ++- app/router/notification/server.py | 8 ++-- app/router/v2/score.py | 3 +- app/router/v2/session_verify.py | 6 +-- app/router/v2/user.py | 4 +- app/service/beatmapset_update_service.py | 46 ++++++++++++-------- app/service/database_cleanup_service.py | 39 +++++++---------- app/service/email_queue.py | 5 --- app/service/email_service.py | 4 +- app/service/password_reset_service.py | 22 +++++----- app/service/subscribers/chat.py | 6 ++- app/service/verification_service.py | 26 ++++++----- app/tasks/beatmapset_update.py | 8 ++-- app/tasks/daily_challenge.py | 10 ++--- main.py | 20 ++++----- 27 files changed, 210 insertions(+), 174 deletions(-) diff --git a/app/auth.py b/app/auth.py index 8a8bf01..6e43f8b 100644 --- a/app/auth.py +++ b/app/auth.py @@ -13,7 +13,7 @@ from app.database import ( User, ) from app.database.auth import TotpKeys -from app.log import logger +from app.log import log from app.models.totp import FinishStatus, StartCreateTotpKeyResp from app.utils import utcnow @@ -31,6 +31,8 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") # bcrypt 缓存(模拟应用状态缓存) bcrypt_cache = {} +logger = log("Auth") + def validate_username(username: str) -> list[str]: """验证用户名""" @@ -253,7 +255,7 @@ async def store_token( tokens_to_delete = active_tokens[max_tokens_per_client - 1 :] for token in tokens_to_delete: await db.delete(token) - logger.info(f"[Auth] Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}") + logger.info(f"Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}") # 检查是否有重复的 access_token duplicate_token = (await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))).first() @@ -274,9 +276,7 @@ async def store_token( await db.commit() await db.refresh(token_record) - logger.info( - f"[Auth] Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})" - ) + logger.info(f"Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})") return token_record diff --git a/app/calculator.py b/app/calculator.py index 36b5f2b..a408fcf 100644 --- a/app/calculator.py +++ b/app/calculator.py @@ -7,7 +7,7 @@ import math from typing import TYPE_CHECKING from app.config import settings -from app.log import logger +from app.log import log from app.models.beatmap import BeatmapAttributes from app.models.mods import APIMod, parse_enum_to_str from app.models.score import GameMode @@ -18,6 +18,8 @@ from redis.asyncio import Redis from sqlmodel import col, exists, select from sqlmodel.ext.asyncio.session import AsyncSession +logger = log("Calculator") + try: import rosu_pp_py as rosu except ImportError: diff --git a/app/database/score.py b/app/database/score.py index 1aaf8b8..fd9668f 100644 --- a/app/database/score.py +++ b/app/database/score.py @@ -16,7 +16,7 @@ from app.calculator import ( from app.config import settings from app.database.team import TeamMember from app.dependencies.database import get_redis -from app.log import logger +from app.log import log from app.models.beatmap import BeatmapRankStatus from app.models.model import ( CurrentUserAttributes, @@ -74,6 +74,8 @@ from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: from app.fetcher import Fetcher +logger = log("Score") + class ScoreBase(AsyncAttrs, SQLModel, UTCBaseModel): # 基本字段 @@ -854,8 +856,7 @@ async def process_score( ) -> Score: gamemode = GameMode.from_int(info.ruleset_id).to_special_mode(info.mods) logger.info( - "[Score] Creating score for user {user_id} | beatmap={beatmap_id} " - "ruleset={ruleset} passed={passed} total={total}", + "Creating score for user {user_id} | beatmap={beatmap_id} ruleset={ruleset} passed={passed} total={total}", user_id=user.id, beatmap_id=beatmap_id, ruleset=gamemode, @@ -897,7 +898,7 @@ async def process_score( ) session.add(score) logger.debug( - "[Score] Score staged for commit | token={token} mods={mods} total_hits={hits}", + "Score staged for commit | token={token} mods={mods} total_hits={hits}", token=score_token.id, mods=info.mods, hits=sum(info.statistics.values()) if info.statistics else 0, @@ -910,7 +911,7 @@ async def process_score( async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, fetcher: "Fetcher"): if score.pp != 0: logger.debug( - "[Score] Skipping PP calculation for score {score_id} | already set {pp:.2f}", + "Skipping PP calculation for score {score_id} | already set {pp:.2f}", score_id=score.id, pp=score.pp, ) @@ -918,7 +919,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f can_get_pp = score.passed and score.ranked and mods_can_get_pp(int(score.gamemode), score.mods) if not can_get_pp: logger.debug( - "[Score] Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}", + "Skipping PP calculation for score {score_id} | passed={passed} ranked={ranked} mods={mods}", score_id=score.id, passed=score.passed, ranked=score.ranked, @@ -928,10 +929,10 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f pp, successed = await pre_fetch_and_calculate_pp(score, session, redis, fetcher) if not successed: await redis.rpush("score:need_recalculate", score.id) # pyright: ignore[reportGeneralTypeIssues] - logger.warning("[Score] Queued score {score_id} for PP recalculation", score_id=score.id) + logger.warning("Queued score {score_id} for PP recalculation", score_id=score.id) return score.pp = pp - logger.info("[Score] Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp) + logger.info("Calculated PP for score {score_id} | pp={pp:.2f}", score_id=score.id, pp=pp) user_id = score.user_id beatmap_id = score.beatmap_id previous_pp_best = await get_user_best_pp_in_beatmap(session, beatmap_id, user_id, score.gamemode) @@ -947,7 +948,7 @@ async def _process_score_pp(score: Score, session: AsyncSession, redis: Redis, f session.add(best_score) await session.delete(previous_pp_best) if previous_pp_best else None logger.info( - "[Score] Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}", + "Updated PP best for user {user_id} | score_id={score_id} pp={pp:.2f}", user_id=user_id, score_id=score.id, pp=score.pp, @@ -966,15 +967,14 @@ async def _process_score_events(score: Score, session: AsyncSession): if rank_global == 0 or total_users == 0: logger.debug( - "[Score] Skipping event creation for score {score_id} | " - "rank_global={rank_global} total_users={total_users}", + "Skipping event creation for score {score_id} | rank_global={rank_global} total_users={total_users}", score_id=score.id, rank_global=rank_global, total_users=total_users, ) return logger.debug( - "[Score] Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}", + "Processing events for score {score_id} | rank_global={rank_global} total_users={total_users}", score_id=score.id, rank_global=rank_global, total_users=total_users, @@ -1003,7 +1003,7 @@ async def _process_score_events(score: Score, session: AsyncSession): } session.add(rank_event) logger.info( - "[Score] Registered rank event for user {user_id} | score_id={score_id} rank={rank}", + "Registered rank event for user {user_id} | score_id={score_id} rank={rank}", user_id=score.user_id, score_id=score.id, rank=rank_global, @@ -1045,12 +1045,12 @@ async def _process_score_events(score: Score, session: AsyncSession): } session.add(rank_lost_event) logger.info( - "[Score] Registered rank lost event | displaced_user={user_id} new_score_id={score_id}", + "Registered rank lost event | displaced_user={user_id} new_score_id={score_id}", user_id=displaced_score.user_id, score_id=score.id, ) logger.debug( - "[Score] Event processing committed for score {score_id}", + "Event processing committed for score {score_id}", score_id=score.id, ) @@ -1074,7 +1074,7 @@ async def _process_statistics( session, score.beatmap_id, user.id, mod_for_save, score.gamemode ) logger.debug( - "[Score] Existing best scores for user {user_id} | global={global_id} mod={mod_id}", + "Existing best scores for user {user_id} | global={global_id} mod={mod_id}", user_id=user.id, global_id=previous_score_best.score_id if previous_score_best else None, mod_id=previous_score_best_mod.score_id if previous_score_best_mod else None, @@ -1104,7 +1104,7 @@ async def _process_statistics( statistics.total_score += score.total_score difference = score.total_score - previous_score_best.total_score if previous_score_best else score.total_score logger.debug( - "[Score] Score delta computed for {score_id}: {difference}", + "Score delta computed for {score_id}: {difference}", score_id=score.id, difference=difference, ) @@ -1151,7 +1151,7 @@ async def _process_statistics( ) ) logger.info( - "[Score] Created new best score entry for user {user_id} | score_id={score_id} mods={mods}", + "Created new best score entry for user {user_id} | score_id={score_id} mods={mods}", user_id=user.id, score_id=score.id, mods=mod_for_save, @@ -1163,7 +1163,7 @@ async def _process_statistics( previous_score_best.rank = score.rank previous_score_best.score_id = score.id logger.info( - "[Score] Updated existing best score for user {user_id} | score_id={score_id} total={total}", + "Updated existing best score for user {user_id} | score_id={score_id} total={total}", user_id=user.id, score_id=score.id, total=score.total_score, @@ -1175,7 +1175,7 @@ async def _process_statistics( if difference > 0: # 下方的 if 一定会触发。将高分设置为此分数,删除自己防止重复的 score_id logger.info( - "[Score] Replacing global best score for user {user_id} | old_score_id={old_score_id}", + "Replacing global best score for user {user_id} | old_score_id={old_score_id}", user_id=user.id, old_score_id=previous_score_best.score_id, ) @@ -1188,7 +1188,7 @@ async def _process_statistics( previous_score_best_mod.rank = score.rank previous_score_best_mod.score_id = score.id logger.info( - "[Score] Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}", + "Replaced mod-specific best for user {user_id} | mods={mods} score_id={score_id}", user_id=user.id, mods=mod_for_save, score_id=score.id, @@ -1202,14 +1202,14 @@ async def _process_statistics( mouthly_playcount.count += 1 statistics.play_time += playtime logger.debug( - "[Score] Recorded playtime {playtime}s for score {score_id} (user {user_id})", + "Recorded playtime {playtime}s for score {score_id} (user {user_id})", playtime=playtime, score_id=score.id, user_id=user.id, ) else: logger.debug( - "[Score] Playtime {playtime}s for score {score_id} did not meet validity checks", + "Playtime {playtime}s for score {score_id} did not meet validity checks", playtime=playtime, score_id=score.id, ) @@ -1242,7 +1242,7 @@ async def _process_statistics( if add_to_db: session.add(mouthly_playcount) logger.debug( - "[Score] Created monthly playcount record for user {user_id} ({year}-{month})", + "Created monthly playcount record for user {user_id} ({year}-{month})", user_id=user.id, year=mouthly_playcount.year, month=mouthly_playcount.month, @@ -1262,7 +1262,7 @@ async def process_user( score_id = score.id user_id = user.id logger.info( - "[Score] Processing score {score_id} for user {user_id} on beatmap {beatmap_id}", + "Processing score {score_id} for user {user_id} on beatmap {beatmap_id}", score_id=score_id, user_id=user_id, beatmap_id=score.beatmap_id, @@ -1287,14 +1287,14 @@ async def process_user( score_ = (await session.exec(select(Score).where(Score.id == score_id).options(joinedload(Score.beatmap)))).first() if score_ is None: logger.warning( - "[Score] Score {score_id} disappeared after commit, skipping event processing", + "Score {score_id} disappeared after commit, skipping event processing", score_id=score_id, ) return await _process_score_events(score_, session) await session.commit() logger.info( - "[Score] Finished processing score {score_id} for user {user_id}", + "Finished processing score {score_id} for user {user_id}", score_id=score_id, user_id=user_id, ) diff --git a/app/dependencies/fetcher.py b/app/dependencies/fetcher.py index ccc3f06..be75cc3 100644 --- a/app/dependencies/fetcher.py +++ b/app/dependencies/fetcher.py @@ -5,7 +5,7 @@ from typing import Annotated from app.config import settings from app.dependencies.database import get_redis from app.fetcher import Fetcher as OriginFetcher -from app.log import logger +from app.log import fetcher_logger from fastapi import Depends @@ -29,7 +29,9 @@ async def get_fetcher() -> OriginFetcher: if refresh_token: fetcher.refresh_token = str(refresh_token) if not fetcher.access_token or not fetcher.refresh_token: - logger.opt(colors=True).info(f"Login to initialize fetcher: {fetcher.authorize_url}") + fetcher_logger("Fetcher").opt(colors=True).info( + f"Login to initialize fetcher: {fetcher.authorize_url}" + ) return fetcher diff --git a/app/fetcher/_base.py b/app/fetcher/_base.py index 86fc646..cf41be0 100644 --- a/app/fetcher/_base.py +++ b/app/fetcher/_base.py @@ -5,7 +5,7 @@ import time from urllib.parse import quote from app.dependencies.database import get_redis -from app.log import logger +from app.log import fetcher_logger from httpx import AsyncClient @@ -16,6 +16,9 @@ class TokenAuthError(Exception): pass +logger = fetcher_logger("Fetcher") + + class BaseFetcher: def __init__( self, diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py index c0b822e..3d65371 100644 --- a/app/helpers/geoip_helper.py +++ b/app/helpers/geoip_helper.py @@ -55,7 +55,7 @@ class GeoIPHelper: - 临时目录退出后自动清理 """ if not self.license_key: - raise ValueError("缺少 MaxMind License Key,请传入或设置环境变量 MAXMIND_LICENSE_KEY") + raise ValueError("MaxMind License Key is missing. Please configure it via env MAXMIND_LICENSE_KEY.") url = f"{BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz" diff --git a/app/log.py b/app/log.py index 6a8c478..e171a0d 100644 --- a/app/log.py +++ b/app/log.py @@ -39,16 +39,18 @@ class InterceptHandler(logging.Handler): depth += 1 message = record.getMessage() - + _logger = logger if record.name == "uvicorn.access": message = self._format_uvicorn_access_log(message) color = True + _logger = uvicorn_logger() elif record.name == "uvicorn.error": message = self._format_uvicorn_error_log(message) + _logger = uvicorn_logger() color = True else: color = False - logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message) + _logger.opt(depth=depth, exception=record.exc_info, colors=color).log(level, message) def _format_uvicorn_error_log(self, message: str) -> str: websocket_pattern = r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"WebSocket\s+([^"]+)"\s+([\w\[\]]+)' @@ -110,7 +112,6 @@ class InterceptHandler(logging.Handler): def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) -> str | None: - """获取调用类名/模块名,仅对指定模块前缀生效""" stack = inspect.stack() for frame_info in stack[2:]: module = frame_info.frame.f_globals.get("__name__", "") @@ -157,29 +158,49 @@ def system_logger(name: str) -> Logger: return logger.bind(system=name) -def dynamic_format(record): - prefix = "" +def uvicorn_logger() -> Logger: + return logger.bind(uvicorn="Uvicorn") - fetcher = record["extra"].get("fetcher") - if not fetcher: - fetcher = get_caller_class_name("app.fetcher") - if fetcher: - prefix = f"[{fetcher}] " + +def log(name: str) -> Logger: + return logger.bind(real_name=name) + + +def dynamic_format(record): + name = "" + + uvicorn = record["extra"].get("uvicorn") + if uvicorn: + name = f"{uvicorn}" service = record["extra"].get("service") if not service: service = get_caller_class_name("app.service") if service: - prefix = f"[{service}] " + name = f"{service}" + + fetcher = record["extra"].get("fetcher") + if not fetcher: + fetcher = get_caller_class_name("app.fetcher") + if fetcher: + name = f"{fetcher}" task = record["extra"].get("task") if not task: task = get_caller_class_name("app.tasks") if task: task = snake_to_pascal(task) - prefix = f"[{task}] " + name = f"{task}" - return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {prefix}{{message}}\n" + system = record["extra"].get("system") + if system: + name = f"{system}" + + if name == "": + real_name = record["extra"].get("real_name", "") or record["name"] + name = f"{real_name}" + + return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}\n" logger.remove() @@ -195,7 +216,7 @@ logger.add( rotation="00:00", retention="30 days", colorize=False, - format="{time:YYYY-MM-DD HH:mm:ss} {level} | {message}", + format=dynamic_format, level=settings.log_level, diagnose=settings.debug, encoding="utf8", @@ -210,9 +231,9 @@ uvicorn_loggers = [ ] for logger_name in uvicorn_loggers: - uvicorn_logger = logging.getLogger(logger_name) - uvicorn_logger.handlers = [InterceptHandler()] - uvicorn_logger.propagate = False + _uvicorn_logger = logging.getLogger(logger_name) + _uvicorn_logger.handlers = [InterceptHandler()] + _uvicorn_logger.propagate = False logging.getLogger("httpx").setLevel("WARNING") logging.getLogger("apscheduler").setLevel("WARNING") diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py index 76277fc..2a9c911 100644 --- a/app/middleware/verify_session.py +++ b/app/middleware/verify_session.py @@ -14,7 +14,7 @@ from app.const import SUPPORT_TOTP_VERIFICATION_VER from app.database.user import User from app.database.verification import LoginSession from app.dependencies.database import get_redis, with_db -from app.log import logger +from app.log import log from app.service.verification_service import LoginSessionService from app.utils import extract_user_agent @@ -25,6 +25,8 @@ from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession from starlette.middleware.base import BaseHTTPMiddleware +logger = log("Middleware") + class VerifySessionMiddleware(BaseHTTPMiddleware): """会话验证中间件 @@ -84,7 +86,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return await self._initiate_verification(request, session_state) except Exception as e: - logger.error(f"[Verify Session Middleware] Error: {e}") + logger.error(f"Error: {e}") # 出错时允许请求继续,避免阻塞 return await call_next(request) @@ -145,7 +147,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return user except Exception as e: - logger.debug(f"[Verify Session Middleware] Error getting user: {e}") + logger.debug(f"Error getting user: {e}") return None async def _get_session_state(self, request: Request, user: User) -> SessionState | None: @@ -178,7 +180,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return SessionState(session, user, redis, db, api_version) except Exception as e: - logger.error(f"[Verify Session Middleware] Error getting session state: {e}") + logger.error(f"Error getting session state: {e}") return None async def _initiate_verification(self, request: Request, state: SessionState) -> Response: @@ -195,7 +197,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): ) except Exception as e: - logger.error(f"[Verify Session Middleware] Error initiating verification: {e}") + logger.error(f"Error initiating verification: {e}") return JSONResponse( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"error": "Verification initiation failed"} ) @@ -261,7 +263,7 @@ class SessionState: self.session.web_uuid, ) except Exception as e: - logger.error(f"[Session State] Error marking verified: {e}") + logger.error(f"Error marking verified: {e}") async def issue_mail_if_needed(self) -> None: """如果需要,发送验证邮件""" @@ -274,7 +276,7 @@ class SessionState: self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None ) except Exception as e: - logger.error(f"[Session State] Error issuing mail: {e}") + logger.error(f"Error issuing mail: {e}") def get_key(self) -> str: """获取会话密钥""" diff --git a/app/models/mods.py b/app/models/mods.py index 88a375d..a928cab 100644 --- a/app/models/mods.py +++ b/app/models/mods.py @@ -5,7 +5,7 @@ import json from typing import Any, Literal, NotRequired, TypedDict from app.config import settings as app_settings -from app.log import logger +from app.log import log from app.path import CONFIG_DIR, STATIC_DIR from pydantic import ConfigDict, Field, create_model @@ -268,7 +268,7 @@ def generate_ranked_mod_settings(enable_all: bool = False): for mod_acronym in ruleset_mods: result[ruleset_id][mod_acronym] = {} if not enable_all: - logger.info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json") + log("Mod").info("ENABLE_ALL_MODS_PP is deprecated, transformed to config/ranked_mods.json") result["$mods_checksum"] = checksum # pyright: ignore[reportArgumentType] ranked_mods_file.write_text(json.dumps(result, indent=4)) diff --git a/app/models/tags.py b/app/models/tags.py index e1415ca..abe728f 100644 --- a/app/models/tags.py +++ b/app/models/tags.py @@ -2,7 +2,7 @@ from __future__ import annotations import json -from app.log import logger +from app.log import log from app.path import STATIC_DIR from pydantic import BaseModel @@ -16,6 +16,7 @@ class BeatmapTags(BaseModel): ALL_TAGS: dict[int, BeatmapTags] = {} +logger = log("BeatmapTag") def load_tags() -> None: diff --git a/app/router/auth.py b/app/router/auth.py index f2e7336..ff163b3 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -22,7 +22,7 @@ from app.database.statistics import UserStatistics from app.dependencies.database import Database, Redis from app.dependencies.geoip import GeoIPService, IPAddress from app.dependencies.user_agent import UserAgentInfo -from app.log import logger +from app.log import log from app.models.extended_auth import ExtendedTokenResponse from app.models.oauth import ( OAuthErrorResponse, @@ -44,6 +44,8 @@ from fastapi.responses import JSONResponse from sqlalchemy import text from sqlmodel import exists, select +logger = log("Auth") + def create_oauth_error_response(error: str, description: str, hint: str, status_code: int = 400): """创建标准的 OAuth 错误响应""" @@ -360,9 +362,7 @@ async def oauth_token( await LoginSessionService.mark_session_verified( db, redis, user_id, token_id, ip_address, user_agent, web_uuid ) - logger.debug( - f"[Auth] New location login detected but email verification disabled, auto-verifying user {user_id}" - ) + logger.debug(f"New location login detected but email verification disabled, auto-verifying user {user_id}") else: # 不是新设备登录,正常登录 await LoginLogService.record_login( @@ -505,7 +505,7 @@ async def oauth_token( ) # 打印jwt - logger.info(f"[Auth] Generated JWT for user {user_id}: {access_token}") + logger.info(f"Generated JWT for user {user_id}: {access_token}") return TokenResponse( access_token=access_token, diff --git a/app/router/lio.py b/app/router/lio.py index 2cabebb..28d8f35 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -14,7 +14,7 @@ from app.database.user import User from app.dependencies.database import Database, Redis from app.dependencies.fetcher import Fetcher from app.dependencies.storage import StorageService -from app.log import logger +from app.log import log from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus from app.utils import utcnow @@ -27,6 +27,7 @@ from sqlalchemy import update from sqlmodel import col, select router = APIRouter(prefix="/_lio", include_in_schema=False) +logger = log("LegacyIO") async def _ensure_room_chat_channel( diff --git a/app/router/notification/message.py b/app/router/notification/message.py index 41ac452..8dec1cb 100644 --- a/app/router/notification/message.py +++ b/app/router/notification/message.py @@ -16,7 +16,7 @@ from app.database.user import User from app.dependencies.database import Database, Redis from app.dependencies.param import BodyOrForm from app.dependencies.user import get_current_user -from app.log import logger +from app.log import log from app.models.notification import ChannelMessage, ChannelMessageTeam from app.router.v2 import api_v2_router as router from app.service.redis_message_system import redis_message_system @@ -33,6 +33,9 @@ class KeepAliveResp(BaseModel): silences: list[UserSilenceResp] = Field(default_factory=list) +logger = log("Chat") + + @router.post( "/chat/ack", name="保持连接", diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 29fc663..9940801 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -14,7 +14,7 @@ from app.dependencies.database import ( with_db, ) from app.dependencies.user import get_current_user_and_token -from app.log import logger +from app.log import log from app.models.chat import ChatEvent from app.models.notification import NotificationDetail from app.service.subscribers.chat import ChatSubscriber @@ -26,6 +26,8 @@ from fastapi.websockets import WebSocketState from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession +logger = log("NotificationServer") + class ChatServer: def __init__(self): @@ -285,10 +287,10 @@ async def _listen_stop(ws: WebSocket, user_id: int, factory: DBFactory): await ws.close(code=1000) break except WebSocketDisconnect as e: - logger.info(f"[NotificationServer] Client {user_id} disconnected: {e.code}, {e.reason}") + logger.info(f"Client {user_id} disconnected: {e.code}, {e.reason}") except RuntimeError as e: if "disconnect message" in str(e): - logger.info(f"[NotificationServer] Client {user_id} closed the connection.") + logger.info(f"Client {user_id} closed the connection.") else: logger.exception(f"RuntimeError in client {user_id}: {e}") except Exception: diff --git a/app/router/v2/score.py b/app/router/v2/score.py index 0da6de0..2b47c78 100644 --- a/app/router/v2/score.py +++ b/app/router/v2/score.py @@ -39,7 +39,7 @@ from app.dependencies.database import Database, Redis, get_redis, with_db from app.dependencies.fetcher import Fetcher, get_fetcher from app.dependencies.storage import StorageService from app.dependencies.user import ClientUser, get_current_user -from app.log import logger +from app.log import log from app.models.beatmap import BeatmapRankStatus from app.models.room import RoomCategory from app.models.score import ( @@ -73,6 +73,7 @@ from sqlmodel import col, exists, func, select from sqlmodel.ext.asyncio.session import AsyncSession READ_SCORE_TIMEOUT = 10 +logger = log("Score") async def _process_user_achievement(score_id: int): diff --git a/app/router/v2/session_verify.py b/app/router/v2/session_verify.py index 079a0b3..424b988 100644 --- a/app/router/v2/session_verify.py +++ b/app/router/v2/session_verify.py @@ -15,7 +15,7 @@ from app.dependencies.database import Database, Redis, get_redis from app.dependencies.geoip import IPAddress from app.dependencies.user import UserAndToken, get_client_user_and_token from app.dependencies.user_agent import UserAgentInfo -from app.log import logger +from app.log import log from app.service.login_log_service import LoginLogService from app.service.verification_service import ( EmailVerificationService, @@ -254,7 +254,7 @@ async def fallback_email( user_agent, ) if not success: - logger.error( - f"[Email Fallback] Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}" + log("Verification").error( + f"Failed to send fallback email to user {current_user.id} (token: {token_id}): {message}" ) return VerifyMethod() diff --git a/app/router/v2/user.py b/app/router/v2/user.py index 7928029..98a0b93 100644 --- a/app/router/v2/user.py +++ b/app/router/v2/user.py @@ -19,7 +19,7 @@ from app.database.user import SEARCH_INCLUDED from app.dependencies.api_version import APIVersion from app.dependencies.database import Database, get_redis from app.dependencies.user import get_current_user -from app.log import logger +from app.log import log from app.models.score import GameMode from app.models.user import BeatmapsetType from app.service.asset_proxy_helper import process_response_assets @@ -336,7 +336,7 @@ async def get_user_beatmapsets( try: await cache_service.cache_user_beatmapsets(user_id, type.value, resp, limit, offset) except Exception as e: - logger.error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}") + log("Beatmapset").error(f"Error caching user beatmapsets for user {user_id}, type {type.value}: {e}") background_task.add_task(cache_beatmapsets) diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py index a03f91b..8852146 100644 --- a/app/service/beatmapset_update_service.py +++ b/app/service/beatmapset_update_service.py @@ -166,7 +166,7 @@ class BeatmapsetUpdateService: except Exception as e: logger.error(f"failed to add missing beatmapset {missing}: {e}") if total > 0: - logger.info(f"added {total} missing beatmapset") + logger.opt(colors=True).info(f"added {total} missing beatmapset") self._adding_missing = False async def add(self, beatmapset: BeatmapsetResp): @@ -206,12 +206,14 @@ class BeatmapsetUpdateService: processing = ProcessingBeatmapset(beatmapset, sync_record) next_time_delta = processing.calculate_next_sync_time() if not next_time_delta: - logger.info(f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list") + logger.opt(colors=True).info( + f"[{beatmapset.id}] beatmapset has transformed to ranked or loved, removing from sync list" + ) await session.delete(sync_record) await session.commit() return sync_record.next_sync_time = utcnow() + next_time_delta - logger.info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}") + logger.opt(colors=True).info(f"[{beatmapset.id}] next sync at {sync_record.next_sync_time}") await session.commit() async def _update_beatmaps(self): @@ -224,17 +226,19 @@ class BeatmapsetUpdateService: .order_by(col(BeatmapSync.next_sync_time).desc()) ) for record in records: - logger.info(f"[{record.beatmapset_id}] syncing...") + logger.opt(colors=True).info(f"[{record.beatmapset_id}] syncing...") try: beatmapset = await self.fetcher.get_beatmapset(record.beatmapset_id) except Exception as e: if isinstance(e, HTTPError): - logger.warning( - f"[{record.beatmapset_id}] " + logger.opt(colors=True).warning( + f"[{record.beatmapset_id}] " f"failed to fetch beatmapset: [{e.__class__.__name__}] {e}, retrying later" ) else: - logger.exception(f"[{record.beatmapset_id}] unexpected error: {e}, retrying later") + logger.opt(colors=True).exception( + f"[{record.beatmapset_id}] unexpected error: {e}, retrying later" + ) record.next_sync_time = utcnow() + timedelta(seconds=MIN_DELTA) continue processing = ProcessingBeatmapset(beatmapset, record) @@ -266,15 +270,16 @@ class BeatmapsetUpdateService: next_time_delta = processing.calculate_next_sync_time() if not next_time_delta: - logger.info( - f"[{record.beatmapset_id}] beatmapset " - "has transformed to ranked or loved," - " removing from sync list" + logger.opt(colors=True).info( + f"[{beatmapset.id}] beatmapset has transformed to ranked or loved," + f" removing from sync list" ) await session.delete(record) else: record.next_sync_time = utcnow() + next_time_delta - logger.info(f"[{record.beatmapset_id}] next sync at {record.next_sync_time}") + logger.opt(colors=True).info( + f"[{record.beatmapset_id}] next sync at {record.next_sync_time}" + ) await session.commit() async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp): @@ -304,7 +309,7 @@ class BeatmapsetUpdateService: await score.ranked_score.delete(session) total += 1 if total > 0: - logger.info(f"[beatmap: {beatmap_id}] processed {total} old scores") + logger.opt(colors=True).info(f"[beatmap: {beatmap_id}] processed {total} old scores") await session.commit() for change in changed: @@ -312,17 +317,24 @@ class BeatmapsetUpdateService: try: beatmap = await self.fetcher.get_beatmap(change.beatmap_id) except Exception as e: - logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping") + logger.opt(colors=True).error( + f"[beatmap: {change.beatmap_id}] failed to fetch added beatmap: {e}, skipping" + ) continue - logger.info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}") + logger.opt(colors=True).info(f"[{beatmap.beatmapset_id}] adding beatmap {beatmap.id}") await Beatmap.from_resp_no_save(session, beatmap) else: try: beatmap = await self.fetcher.get_beatmap(change.beatmap_id) except Exception as e: - logger.error(f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping") + logger.opt(colors=True).error( + f"[beatmap: {change.beatmap_id}] failed to fetch changed beatmap: {e}, skipping" + ) continue - logger.info(f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} change {change.type}") + logger.opt(colors=True).info( + f"[{beatmap.beatmapset_id}] processing beatmap {beatmap.id} " + f"change {change.type}" + ) new_db_beatmap = await Beatmap.from_resp_no_save(session, beatmap) existing_beatmap = await session.get(Beatmap, change.beatmap_id) if existing_beatmap: diff --git a/app/service/database_cleanup_service.py b/app/service/database_cleanup_service.py index c8ffd0f..6d80819 100644 --- a/app/service/database_cleanup_service.py +++ b/app/service/database_cleanup_service.py @@ -46,13 +46,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired email verification codes") + logger.debug(f"Cleaned up {deleted_count} expired email verification codes") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning expired verification codes: {e!s}") + logger.error(f"Error cleaning expired verification codes: {e!s}") return 0 @staticmethod @@ -85,13 +85,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired login sessions") + logger.debug(f"Cleaned up {deleted_count} expired login sessions") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning expired login sessions: {e!s}") + logger.error(f"Error cleaning expired login sessions: {e!s}") return 0 @staticmethod @@ -126,15 +126,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug( - f"[Cleanup Service] Cleaned up {deleted_count} used verification codes older than {days_old} days" - ) + logger.debug(f"Cleaned up {deleted_count} used verification codes older than {days_old} days") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning old used verification codes: {e!s}") + logger.error(f"Error cleaning old used verification codes: {e!s}") return 0 @staticmethod @@ -169,16 +167,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug( - f"[Cleanup Service] Cleaned up {deleted_count} unverified " - f"login sessions older than {hours_old} hour(s)" - ) + logger.debug(f"Cleaned up {deleted_count} unverified login sessions older than {hours_old} hour(s)") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning unverified login sessions: {e!s}") + logger.error(f"Error cleaning unverified login sessions: {e!s}") return 0 @staticmethod @@ -206,13 +201,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} outdated verified sessions") + logger.debug(f"Cleaned up {deleted_count} outdated verified sessions") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning outdated verified sessions: {e!s}") + logger.error(f"Error cleaning outdated verified sessions: {e!s}") return 0 @staticmethod @@ -243,13 +238,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired trusted devices") + logger.debug(f"Cleaned up {deleted_count} expired trusted devices") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning expired trusted devices: {e!s}") + logger.error(f"Error cleaning expired trusted devices: {e!s}") return 0 @staticmethod @@ -278,13 +273,13 @@ class DatabaseCleanupService: await db.commit() if deleted_count > 0: - logger.debug(f"[Cleanup Service] Cleaned up {deleted_count} expired OAuth tokens") + logger.debug(f"Cleaned up {deleted_count} expired OAuth tokens") return deleted_count except Exception as e: await db.rollback() - logger.error(f"[Cleanup Service] Error cleaning expired OAuth tokens: {e!s}") + logger.error(f"Error cleaning expired OAuth tokens: {e!s}") return 0 @staticmethod @@ -323,9 +318,7 @@ class DatabaseCleanupService: total_cleaned = sum(results.values()) if total_cleaned > 0: - logger.debug( - f"[Cleanup Service] Full cleanup completed, total cleaned: {total_cleaned} records - {results}" - ) + logger.debug(f"Full cleanup completed, total cleaned: {total_cleaned} records - {results}") return results @@ -421,7 +414,7 @@ class DatabaseCleanupService: } except Exception as e: - logger.error(f"[Cleanup Service] Error getting cleanup statistics: {e!s}") + logger.error(f"Error getting cleanup statistics: {e!s}") return { "expired_verification_codes": 0, "expired_login_sessions": 0, diff --git a/app/service/email_queue.py b/app/service/email_queue.py index 7a07291..be946fa 100644 --- a/app/service/email_queue.py +++ b/app/service/email_queue.py @@ -221,11 +221,6 @@ class EmailQueue: 是否发送成功 """ try: - # 如果邮件发送功能被禁用,则只记录日志 - if not getattr(settings, "enable_email_sending", True): - logger.info(f"[Mock Email] Would send to {email_data.get('to_email')}: {email_data.get('subject')}") - return True - # 创建邮件 msg = MIMEMultipart("alternative") msg["From"] = f"{self.from_name} <{self.from_email}>" diff --git a/app/service/email_service.py b/app/service/email_service.py index 73c4b7c..8e8314f 100644 --- a/app/service/email_service.py +++ b/app/service/email_service.py @@ -147,11 +147,11 @@ class EmailService: server.send_message(msg) - logger.info(f"[Email Verification] Successfully sent verification code to {email}") + logger.info(f"Successfully sent verification code to {email}") return True except Exception as e: - logger.error(f"[Email Verification] Failed to send email: {e}") + logger.error(f"Failed to send email: {e}") return False diff --git a/app/service/password_reset_service.py b/app/service/password_reset_service.py index b822329..5d831d6 100644 --- a/app/service/password_reset_service.py +++ b/app/service/password_reset_service.py @@ -105,13 +105,13 @@ class PasswordResetService: email_sent = await self.send_password_reset_email(email=email, code=reset_code, username=user.username) if email_sent: - logger.info(f"[Password Reset] Sent reset code to user {user.id} ({email})") + logger.info(f"Sent reset code to user {user.id} ({email})") return True, "密码重置邮件已发送,请查收邮箱" else: # 邮件发送失败,清理Redis中的数据 await redis.delete(reset_code_key) await redis.delete(rate_limit_key) - logger.warning(f"[Password Reset] Email sending failed, cleaned up Redis data for {email}") + logger.warning(f"Email sending failed, cleaned up Redis data for {email}") return False, "邮件发送失败,请稍后重试" except Exception: @@ -121,7 +121,7 @@ class PasswordResetService: await redis.delete(rate_limit_key) except Exception: pass - logger.exception("[Password Reset] Redis operation failed") + logger.exception("Redis operation failed") return False, "服务暂时不可用,请稍后重试" async def send_password_reset_email(self, email: str, code: str, username: str) -> bool: @@ -269,11 +269,11 @@ class PasswordResetService: metadata=metadata, ) - logger.info(f"[Password Reset] Enqueued reset code email to {email}") + logger.info(f"Enqueued reset code email to {email}") return True except Exception as e: - logger.error(f"[Password Reset] Failed to enqueue email: {e}") + logger.error(f"Failed to enqueue email: {e}") return False async def reset_password( @@ -366,7 +366,7 @@ class PasswordResetService: await redis.setex(reset_code_key, 300, json.dumps(reset_data)) # 保留5分钟用于日志记录 logger.info( - f"[Password Reset] User {user_id} ({email}) successfully reset password from IP {ip_address}," + f"User {user_id} ({email}) successfully reset password from IP {ip_address}," f" invalidated {tokens_deleted} tokens" ) return True, "密码重置成功,所有设备已被登出" @@ -374,7 +374,7 @@ class PasswordResetService: except Exception as e: # 不要在异常处理中访问user.id,可能触发数据库操作 user_id = reset_data.get("user_id", "未知") - logger.error(f"[Password Reset] Failed to reset password for user {user_id}: {e}") + logger.error(f"Failed to reset password for user {user_id}: {e}") await session.rollback() # 数据库回滚时,需要恢复Redis中的验证码状态 @@ -401,14 +401,14 @@ class PasswordResetService: remaining_ttl, json.dumps(original_reset_data), ) - logger.info(f"[Password Reset] Restored Redis state after database rollback for {email}") + logger.info(f"Restored Redis state after database rollback for {email}") else: # 如果已经过期,直接删除 await redis.delete(reset_code_key) - logger.info(f"[Password Reset] Removed expired reset code after database rollback for {email}") + logger.info(f"Removed expired reset code after database rollback for {email}") except Exception as redis_error: - logger.error(f"[Password Reset] Failed to restore Redis state after rollback: {redis_error}") + logger.error(f"Failed to restore Redis state after rollback: {redis_error}") return False, "密码重置失败,请稍后重试" @@ -428,7 +428,7 @@ class PasswordResetService: ttl = await redis.ttl(rate_limit_key) return 1 if ttl > 0 else 0 except Exception as e: - logger.error(f"[Password Reset] Failed to get attempts count: {e}") + logger.error(f"Failed to get attempts count: {e}") return 0 diff --git a/app/service/subscribers/chat.py b/app/service/subscribers/chat.py index 060ac3f..9512f9c 100644 --- a/app/service/subscribers/chat.py +++ b/app/service/subscribers/chat.py @@ -2,7 +2,7 @@ from __future__ import annotations from typing import TYPE_CHECKING -from app.log import logger +from app.log import log from app.models.notification import NotificationDetails from .base import RedisSubscriber @@ -17,6 +17,8 @@ JOIN_CHANNEL = "chat:room:joined" EXIT_CHANNEL = "chat:room:left" ON_NOTIFICATION = "chat:notification" +logger = log("Chat") + class ChatSubscriber(RedisSubscriber): def __init__(self): @@ -49,7 +51,7 @@ class ChatSubscriber(RedisSubscriber): try: detail = TypeAdapter(NotificationDetails).validate_json(s) except ValueError: - logger.exception("") + logger.exception("Failed to parse notification detail") return except Exception: logger.exception("Failed to parse notification detail") diff --git a/app/service/verification_service.py b/app/service/verification_service.py index 778dc02..d1f2ee4 100644 --- a/app/service/verification_service.py +++ b/app/service/verification_service.py @@ -180,7 +180,7 @@ This email was sent automatically, please do not reply. return True except Exception as e: - logger.error(f"[Email Verification] Failed to enqueue email: {e}") + logger.error(f"Failed to enqueue email: {e}") return False @staticmethod @@ -237,7 +237,7 @@ This email was sent automatically, please do not reply. str(verification.id) if verification.id else "0", ) - logger.info(f"[Email Verification] Created verification code for user {user_id}: {code}") + logger.info(f"Created verification code for user {user_id}: {code}") return verification, code @staticmethod @@ -254,11 +254,11 @@ This email was sent automatically, please do not reply. try: # 检查是否启用邮件验证功能 if not settings.enable_email_verification: - logger.debug(f"[Email Verification] Email verification is disabled, skipping for user {user_id}") + logger.debug(f"Email verification is disabled, skipping for user {user_id}") return True # 返回成功,但不执行验证流程 # 检测客户端信息 - logger.info(f"[Email Verification] Detected client for user {user_id}: {user_agent}") + logger.info(f"Detected client for user {user_id}: {user_agent}") # 创建验证记录 ( @@ -272,16 +272,14 @@ This email was sent automatically, please do not reply. success = await EmailVerificationService.send_verification_email_via_queue(email, code, username, user_id) if success: - logger.info( - f"[Email Verification] Successfully enqueued verification email to {email} (user: {username})" - ) + logger.info(f"Successfully enqueued verification email to {email} (user: {username})") return True else: - logger.error(f"[Email Verification] Failed to enqueue verification email: {email} (user: {username})") + logger.error(f"Failed to enqueue verification email: {email} (user: {username})") return False except Exception as e: - logger.error(f"[Email Verification] Exception during sending verification email: {e}") + logger.error(f"Exception during sending verification email: {e}") return False @staticmethod @@ -299,7 +297,7 @@ This email was sent automatically, please do not reply. try: # 检查是否启用邮件验证功能 if not settings.enable_email_verification: - logger.debug(f"[Email Verification] Email verification is disabled, auto-approving for user {user_id}") + logger.debug(f"Email verification is disabled, auto-approving for user {user_id}") return True, "验证成功(邮件验证功能已禁用)" # 先从 Redis 检查 @@ -331,11 +329,11 @@ This email was sent automatically, please do not reply. # 删除 Redis 记录 await redis.delete(f"email_verification:{user_id}:{code}") - logger.info(f"[Email Verification] User {user_id} verification code verified successfully") + logger.info(f"User {user_id} verification code verified successfully") return True, "验证成功" except Exception as e: - logger.error(f"[Email Verification] Exception during verification code validation: {e}") + logger.error(f"Exception during verification code validation: {e}") return False, "验证过程中发生错误" @staticmethod @@ -354,7 +352,7 @@ This email was sent automatically, please do not reply. _ = user_agent # 检查是否启用邮件验证功能 if not settings.enable_email_verification: - logger.debug(f"[Email Verification] Email verification is disabled, skipping resend for user {user_id}") + logger.debug(f"Email verification is disabled, skipping resend for user {user_id}") return True, "验证码已发送(邮件验证功能已禁用)" # 检查重发频率限制(60秒内只能发送一次) @@ -376,7 +374,7 @@ This email was sent automatically, please do not reply. return False, "重新发送失败,请稍后再试" except Exception as e: - logger.error(f"[Email Verification] Exception during resending verification code: {e}") + logger.error(f"Exception during resending verification code: {e}") return False, "重新发送过程中发生错误" diff --git a/app/tasks/beatmapset_update.py b/app/tasks/beatmapset_update.py index 4e1492e..e04e860 100644 --- a/app/tasks/beatmapset_update.py +++ b/app/tasks/beatmapset_update.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import datetime, timedelta from app.dependencies.scheduler import get_scheduler -from app.service.beatmapset_update_service import service +from app.service.beatmapset_update_service import get_beatmapset_update_service from app.utils import bg_tasks SCHEDULER_INTERVAL_MINUTES = 2 @@ -16,6 +16,6 @@ SCHEDULER_INTERVAL_MINUTES = 2 next_run_time=datetime.now() + timedelta(minutes=1), ) async def beatmapset_update_job(): - if service is not None: - bg_tasks.add_task(service.add_missing_beatmapsets) - await service._update_beatmaps() + service = get_beatmapset_update_service() + bg_tasks.add_task(service.add_missing_beatmapsets) + await service._update_beatmaps() diff --git a/app/tasks/daily_challenge.py b/app/tasks/daily_challenge.py index e8a7fa3..3057fcb 100644 --- a/app/tasks/daily_challenge.py +++ b/app/tasks/daily_challenge.py @@ -80,9 +80,7 @@ async def daily_challenge_job(): allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues] if beatmap is None or ruleset_id is None: - logger.warning( - f"[DailyChallenge] Missing required data for daily challenge {now}. Will try again in 5 minutes." - ) + logger.warning(f"Missing required data for daily challenge {now}. Will try again in 5 minutes.") get_scheduler().add_job( daily_challenge_job, "date", @@ -111,12 +109,12 @@ async def daily_challenge_job(): duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60), ) await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id)) - logger.success(f"[DailyChallenge] Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}") + logger.success(f"Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}") return except (ValueError, json.JSONDecodeError) as e: - logger.warning(f"[DailyChallenge] Error processing daily challenge data: {e} Will try again in 5 minutes.") + logger.warning(f"Error processing daily challenge data: {e} Will try again in 5 minutes.") except Exception as e: - logger.exception(f"[DailyChallenge] Unexpected error in daily challenge job: {e} Will try again in 5 minutes.") + logger.exception(f"Unexpected error in daily challenge job: {e} Will try again in 5 minutes.") get_scheduler().add_job( daily_challenge_job, "date", diff --git a/main.py b/main.py index d74acfa..ee8dcb4 100644 --- a/main.py +++ b/main.py @@ -8,7 +8,7 @@ from app.database import User from app.dependencies.database import Database, engine, get_redis, redis_client from app.dependencies.fetcher import get_fetcher from app.dependencies.scheduler import start_scheduler, stop_scheduler -from app.log import logger +from app.log import logger, system_logger from app.middleware.verify_session import VerifySessionMiddleware from app.models.mods import init_mods, init_ranked_mods from app.router import ( @@ -136,13 +136,9 @@ if newrelic_config_path.exists(): environment = settings.new_relic_environment or ("production" if not settings.debug else "development") newrelic.agent.initialize(newrelic_config_path, environment) - logger.info(f"[NewRelic] Enabled, environment: {environment}") - except ImportError: - logger.warning("[NewRelic] Config file found but 'newrelic' package is not installed") + system_logger("NewRelic").info(f"Enabled, environment: {environment}") except Exception as e: - logger.error(f"[NewRelic] Initialization failed: {e}") -else: - logger.info("[NewRelic] No newrelic.ini config file found, skipping initialization") + system_logger("NewRelic").error(f"Initialization failed: {e}") if settings.sentry_dsn is not None: sentry_sdk.init( @@ -245,10 +241,14 @@ async def http_exception_handler(requst: Request, exc: HTTPException): if settings.secret_key == "your_jwt_secret_here": - logger.warning("jwt_secret_key is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 32") + system_logger("Security").opt(colors=True).warning( + "jwt_secret_key is unset. Your server is unsafe. " + "Use this command to generate: openssl rand -hex 32." + ) if settings.osu_web_client_secret == "your_osu_web_client_secret_here": - logger.warning( - "osu_web_client_secret is unset. Your server is unsafe. Use this command to generate: openssl rand -hex 40" + system_logger("Security").opt(colors=True).warning( + "osu_web_client_secret is unset. Your server is unsafe. " + "Use this command to generate: openssl rand -hex 40." ) if __name__ == "__main__": From 0d9019c6cc24ed100650bf88a6657f64030d1178 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 13:20:12 +0000 Subject: [PATCH 06/26] refactor(signalr): remove SignalR server & `msgpack_lazer_api` Maybe we can make `msgpack_lazer_api` independent? --- .devcontainer/devcontainer.json | 2 +- app/config.py | 12 - app/database/playlists.py | 11 +- app/database/room.py | 23 - app/exception.py | 10 - app/interfaces/session_verification.py | 73 - app/models/metadata_hub.py | 157 -- app/models/multiplayer_hub.py | 840 ---------- app/models/playlist.py | 22 + app/models/signalr.py | 36 - app/models/spectator_hub.py | 131 -- app/router/__init__.py | 2 - app/router/lio.py | 6 +- app/router/notification/banchobot.py | 361 +---- app/router/v2/room.py | 15 +- app/service/message_queue_processor.py | 4 +- app/service/subscribers/score_processed.py | 85 - app/signalr/__init__.py | 5 - app/signalr/hub/__init__.py | 15 - app/signalr/hub/hub.py | 322 ---- app/signalr/hub/metadata.py | 296 ---- app/signalr/hub/multiplayer.py | 1393 ----------------- app/signalr/hub/spectator.py | 585 ------- app/signalr/packet.py | 492 ------ app/signalr/router.py | 119 -- app/signalr/store.py | 37 - app/signalr/utils.py | 42 - app/tasks/daily_challenge.py | 4 - main.py | 3 - osu_lazer_api.code-workspace | 7 +- packages/msgpack_lazer_api/Cargo.lock | 424 ----- packages/msgpack_lazer_api/Cargo.toml | 14 - .../msgpack_lazer_api/msgpack_lazer_api.pyi | 4 - packages/msgpack_lazer_api/pyproject.toml | 16 - packages/msgpack_lazer_api/src/decode.rs | 312 ---- packages/msgpack_lazer_api/src/encode.rs | 156 -- packages/msgpack_lazer_api/src/lib.rs | 26 - pyproject.toml | 9 +- uv.lock | 493 +++--- 39 files changed, 312 insertions(+), 6252 deletions(-) delete mode 100644 app/exception.py delete mode 100644 app/interfaces/session_verification.py create mode 100644 app/models/playlist.py delete mode 100644 app/service/subscribers/score_processed.py delete mode 100644 app/signalr/__init__.py delete mode 100644 app/signalr/hub/__init__.py delete mode 100644 app/signalr/hub/hub.py delete mode 100644 app/signalr/hub/metadata.py delete mode 100644 app/signalr/hub/multiplayer.py delete mode 100644 app/signalr/hub/spectator.py delete mode 100644 app/signalr/packet.py delete mode 100644 app/signalr/router.py delete mode 100644 app/signalr/store.py delete mode 100644 app/signalr/utils.py delete mode 100644 packages/msgpack_lazer_api/Cargo.lock delete mode 100644 packages/msgpack_lazer_api/Cargo.toml delete mode 100644 packages/msgpack_lazer_api/msgpack_lazer_api.pyi delete mode 100644 packages/msgpack_lazer_api/pyproject.toml delete mode 100644 packages/msgpack_lazer_api/src/decode.rs delete mode 100644 packages/msgpack_lazer_api/src/encode.rs delete mode 100644 packages/msgpack_lazer_api/src/lib.rs diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 32a1e09..c29fdab 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -107,6 +107,6 @@ 80, 8080 ], - "postCreateCommand": "uv sync --dev && uv run alembic upgrade head && uv run pre-commit install && cd packages/msgpack_lazer_api && cargo check && cd ../../spectator-server && dotnet restore", + "postCreateCommand": "uv sync --dev && uv run alembic upgrade head && uv run pre-commit install && cd spectator-server && dotnet restore", "remoteUser": "vscode" } diff --git a/app/config.py b/app/config.py index 1761d7d..adb7794 100644 --- a/app/config.py +++ b/app/config.py @@ -266,18 +266,6 @@ STORAGE_SETTINGS='{ else: return "/" - # SignalR 设置 - signalr_negotiate_timeout: Annotated[ - int, - Field(default=30, description="SignalR 协商超时时间(秒)"), - "SignalR 服务器设置", - ] - signalr_ping_interval: Annotated[ - int, - Field(default=15, description="SignalR ping 间隔(秒)"), - "SignalR 服务器设置", - ] - # Fetcher 设置 fetcher_client_id: Annotated[ str, diff --git a/app/database/playlists.py b/app/database/playlists.py index b589e96..4eeb2ef 100644 --- a/app/database/playlists.py +++ b/app/database/playlists.py @@ -3,6 +3,7 @@ from typing import TYPE_CHECKING from app.models.model import UTCBaseModel from app.models.mods import APIMod +from app.models.playlist import PlaylistItem from .beatmap import Beatmap, BeatmapResp @@ -21,8 +22,6 @@ from sqlmodel import ( from sqlmodel.ext.asyncio.session import AsyncSession if TYPE_CHECKING: - from app.models.multiplayer_hub import PlaylistItem - from .room import Room @@ -73,7 +72,7 @@ class Playlist(PlaylistBase, table=True): return result.one() @classmethod - async def from_hub(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession) -> "Playlist": + async def from_model(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession) -> "Playlist": next_id = await cls.get_next_id_for_room(room_id, session=session) return cls( id=next_id, @@ -90,7 +89,7 @@ class Playlist(PlaylistBase, table=True): ) @classmethod - async def update(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession): + async def update(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession): db_playlist = await session.exec(select(cls).where(cls.id == playlist.id, cls.room_id == room_id)) db_playlist = db_playlist.first() if db_playlist is None: @@ -107,8 +106,8 @@ class Playlist(PlaylistBase, table=True): await session.commit() @classmethod - async def add_to_db(cls, playlist: "PlaylistItem", room_id: int, session: AsyncSession): - db_playlist = await cls.from_hub(playlist, room_id, session) + async def add_to_db(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession): + db_playlist = await cls.from_model(playlist, room_id, session) session.add(db_playlist) await session.commit() await session.refresh(db_playlist) diff --git a/app/database/room.py b/app/database/room.py index 2729e37..add0dbc 100644 --- a/app/database/room.py +++ b/app/database/room.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import TYPE_CHECKING from app.database.item_attempts_count import PlaylistAggregateScore from app.database.room_participated_user import RoomParticipatedUser @@ -32,9 +31,6 @@ from sqlmodel import ( ) from sqlmodel.ext.asyncio.session import AsyncSession -if TYPE_CHECKING: - from app.models.multiplayer_hub import ServerMultiplayerRoom - class RoomBase(SQLModel, UTCBaseModel): name: str = Field(index=True) @@ -163,25 +159,6 @@ class RoomResp(RoomBase): resp.current_user_score = await PlaylistAggregateScore.from_db(room.id, user.id, session) return resp - @classmethod - async def from_hub(cls, server_room: "ServerMultiplayerRoom") -> "RoomResp": - room = server_room.room - resp = cls( - id=room.room_id, - name=room.settings.name, - type=room.settings.match_type, - queue_mode=room.settings.queue_mode, - auto_skip=room.settings.auto_skip, - auto_start_duration=int(room.settings.auto_start_duration.total_seconds()), - status=server_room.status, - category=server_room.category, - # duration = room.settings.duration, - starts_at=server_room.start_at, - participant_count=len(room.users), - channel_id=server_room.room.channel_id or 0, - ) - return resp - class APIUploadedRoom(RoomBase): def to_room(self) -> Room: diff --git a/app/exception.py b/app/exception.py deleted file mode 100644 index 8510bf1..0000000 --- a/app/exception.py +++ /dev/null @@ -1,10 +0,0 @@ -from __future__ import annotations - - -class SignalRException(Exception): - pass - - -class InvokeException(SignalRException): - def __init__(self, message: str) -> None: - self.message = message diff --git a/app/interfaces/session_verification.py b/app/interfaces/session_verification.py deleted file mode 100644 index 2953086..0000000 --- a/app/interfaces/session_verification.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -会话验证接口 - -基于osu-web的SessionVerificationInterface实现 -用于标准化会话验证行为 -""" - -from __future__ import annotations - -from abc import ABC, abstractmethod - - -class SessionVerificationInterface(ABC): - """会话验证接口 - - 定义了会话验证所需的基本操作,参考osu-web的实现 - """ - - @classmethod - @abstractmethod - async def find_for_verification(cls, session_id: str) -> SessionVerificationInterface | None: - """根据会话ID查找会话用于验证 - - Args: - session_id: 会话ID - - Returns: - 会话实例或None - """ - pass - - @abstractmethod - def get_key(self) -> str: - """获取会话密钥/ID""" - pass - - @abstractmethod - def get_key_for_event(self) -> str: - """获取用于事件广播的会话密钥""" - pass - - @abstractmethod - def get_verification_method(self) -> str | None: - """获取当前验证方法 - - Returns: - 验证方法 ('totp', 'mail') 或 None - """ - pass - - @abstractmethod - def is_verified(self) -> bool: - """检查会话是否已验证""" - pass - - @abstractmethod - async def mark_verified(self) -> None: - """标记会话为已验证""" - pass - - @abstractmethod - async def set_verification_method(self, method: str) -> None: - """设置验证方法 - - Args: - method: 验证方法 ('totp', 'mail') - """ - pass - - @abstractmethod - def user_id(self) -> int | None: - """获取关联的用户ID""" - pass diff --git a/app/models/metadata_hub.py b/app/models/metadata_hub.py index 7188235..e69de29 100644 --- a/app/models/metadata_hub.py +++ b/app/models/metadata_hub.py @@ -1,157 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum -from typing import ClassVar, Literal - -from app.models.signalr import SignalRUnionMessage, UserState - -from pydantic import BaseModel, Field - -TOTAL_SCORE_DISTRIBUTION_BINS = 13 - - -class _UserActivity(SignalRUnionMessage): ... - - -class ChoosingBeatmap(_UserActivity): - union_type: ClassVar[Literal[11]] = 11 - - -class _InGame(_UserActivity): - beatmap_id: int - beatmap_display_title: str - ruleset_id: int - ruleset_playing_verb: str - - -class InSoloGame(_InGame): - union_type: ClassVar[Literal[12]] = 12 - - -class InMultiplayerGame(_InGame): - union_type: ClassVar[Literal[23]] = 23 - - -class SpectatingMultiplayerGame(_InGame): - union_type: ClassVar[Literal[24]] = 24 - - -class InPlaylistGame(_InGame): - union_type: ClassVar[Literal[31]] = 31 - - -class PlayingDailyChallenge(_InGame): - union_type: ClassVar[Literal[52]] = 52 - - -class EditingBeatmap(_UserActivity): - union_type: ClassVar[Literal[41]] = 41 - beatmap_id: int - beatmap_display_title: str - - -class TestingBeatmap(EditingBeatmap): - union_type: ClassVar[Literal[43]] = 43 - - -class ModdingBeatmap(EditingBeatmap): - union_type: ClassVar[Literal[42]] = 42 - - -class WatchingReplay(_UserActivity): - union_type: ClassVar[Literal[13]] = 13 - score_id: int - player_name: str - beatmap_id: int - beatmap_display_title: str - - -class SpectatingUser(WatchingReplay): - union_type: ClassVar[Literal[14]] = 14 - - -class SearchingForLobby(_UserActivity): - union_type: ClassVar[Literal[21]] = 21 - - -class InLobby(_UserActivity): - union_type: ClassVar[Literal[22]] = 22 - room_id: int - room_name: str - - -class InDailyChallengeLobby(_UserActivity): - union_type: ClassVar[Literal[51]] = 51 - - -UserActivity = ( - ChoosingBeatmap - | InSoloGame - | WatchingReplay - | SpectatingUser - | SearchingForLobby - | InLobby - | InMultiplayerGame - | SpectatingMultiplayerGame - | InPlaylistGame - | EditingBeatmap - | ModdingBeatmap - | TestingBeatmap - | InDailyChallengeLobby - | PlayingDailyChallenge -) - - -class UserPresence(BaseModel): - activity: UserActivity | None = None - - status: OnlineStatus | None = None - - @property - def pushable(self) -> bool: - return self.status is not None and self.status != OnlineStatus.OFFLINE - - @property - def for_push(self) -> "UserPresence | None": - return UserPresence( - activity=self.activity, - status=self.status, - ) - - -class MetadataClientState(UserPresence, UserState): ... - - -class OnlineStatus(IntEnum): - OFFLINE = 0 # 隐身 - DO_NOT_DISTURB = 1 - ONLINE = 2 - - -class DailyChallengeInfo(BaseModel): - room_id: int - - -class MultiplayerPlaylistItemStats(BaseModel): - playlist_item_id: int = 0 - total_score_distribution: list[int] = Field( - default_factory=list, - min_length=TOTAL_SCORE_DISTRIBUTION_BINS, - max_length=TOTAL_SCORE_DISTRIBUTION_BINS, - ) - cumulative_score: int = 0 - last_processed_score_id: int = 0 - - -class MultiplayerRoomStats(BaseModel): - room_id: int - playlist_item_stats: dict[int, MultiplayerPlaylistItemStats] = Field(default_factory=dict) - - -class MultiplayerRoomScoreSetEvent(BaseModel): - room_id: int - playlist_item_id: int - score_id: int - user_id: int - total_score: int - new_rank: int | None = None diff --git a/app/models/multiplayer_hub.py b/app/models/multiplayer_hub.py index 7df4949..e69de29 100644 --- a/app/models/multiplayer_hub.py +++ b/app/models/multiplayer_hub.py @@ -1,840 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -import asyncio -from collections.abc import Awaitable, Callable -from dataclasses import dataclass -from datetime import datetime, timedelta -from enum import IntEnum -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - ClassVar, - Literal, - TypedDict, - cast, - override, -) - -from app.database.beatmap import Beatmap -from app.dependencies.database import with_db -from app.dependencies.fetcher import get_fetcher -from app.exception import InvokeException -from app.utils import utcnow - -from .mods import API_MODS, APIMod -from .room import ( - DownloadState, - MatchType, - MultiplayerRoomState, - MultiplayerUserState, - QueueMode, - RoomCategory, - RoomStatus, -) -from .signalr import ( - SignalRMeta, - SignalRUnionMessage, - UserState, -) - -from pydantic import BaseModel, Field -from sqlalchemy import update -from sqlmodel import col - -if TYPE_CHECKING: - from app.database.room import Room - from app.signalr.hub import MultiplayerHub - -HOST_LIMIT = 50 -PER_USER_LIMIT = 3 - - -class MultiplayerClientState(UserState): - room_id: int = 0 - - -class MultiplayerRoomSettings(BaseModel): - name: str = "Unnamed Room" - playlist_item_id: Annotated[int, Field(default=0), SignalRMeta(use_abbr=False)] - password: str = "" - match_type: MatchType = MatchType.HEAD_TO_HEAD - queue_mode: QueueMode = QueueMode.HOST_ONLY - auto_start_duration: timedelta = timedelta(seconds=0) - auto_skip: bool = False - - @property - def auto_start_enabled(self) -> bool: - return self.auto_start_duration != timedelta(seconds=0) - - -class BeatmapAvailability(BaseModel): - state: DownloadState = DownloadState.UNKNOWN - download_progress: float | None = None - - -class _MatchUserState(SignalRUnionMessage): ... - - -class TeamVersusUserState(_MatchUserState): - team_id: int - - union_type: ClassVar[Literal[0]] = 0 - - -MatchUserState = TeamVersusUserState - - -class _MatchRoomState(SignalRUnionMessage): ... - - -class MultiplayerTeam(BaseModel): - id: int - name: str - - -class TeamVersusRoomState(_MatchRoomState): - teams: list[MultiplayerTeam] = Field( - default_factory=lambda: [ - MultiplayerTeam(id=0, name="Team Red"), - MultiplayerTeam(id=1, name="Team Blue"), - ] - ) - - union_type: ClassVar[Literal[0]] = 0 - - -MatchRoomState = TeamVersusRoomState - - -class PlaylistItem(BaseModel): - id: Annotated[int, Field(default=0), SignalRMeta(use_abbr=False)] - owner_id: int - beatmap_id: int - beatmap_checksum: str - ruleset_id: int - required_mods: list[APIMod] = Field(default_factory=list) - allowed_mods: list[APIMod] = Field(default_factory=list) - expired: bool - playlist_order: int - played_at: datetime | None = None - star_rating: float - freestyle: bool - - def _validate_mod_for_ruleset(self, mod: APIMod, ruleset_key: int, context: str = "mod") -> None: - typed_ruleset_key = cast(Literal[0, 1, 2, 3], ruleset_key) - - # Check if mod is valid for ruleset - if typed_ruleset_key not in API_MODS or mod["acronym"] not in API_MODS[typed_ruleset_key]: - raise InvokeException(f"{context} {mod['acronym']} is invalid for this ruleset") - - mod_settings = API_MODS[typed_ruleset_key][mod["acronym"]] - - # Check if mod is unplayable in multiplayer - if mod_settings.get("UserPlayable", True) is False: - raise InvokeException(f"{context} {mod['acronym']} is not playable by users") - - if mod_settings.get("ValidForMultiplayer", True) is False: - raise InvokeException(f"{context} {mod['acronym']} is not valid for multiplayer") - - def _check_mod_compatibility(self, mods: list[APIMod], ruleset_key: int) -> None: - from typing import Literal, cast - - typed_ruleset_key = cast(Literal[0, 1, 2, 3], ruleset_key) - - for i, mod1 in enumerate(mods): - mod1_settings = API_MODS[typed_ruleset_key].get(mod1["acronym"]) - if mod1_settings: - incompatible = set(mod1_settings.get("IncompatibleMods", [])) - for mod2 in mods[i + 1 :]: - if mod2["acronym"] in incompatible: - raise InvokeException(f"Mods {mod1['acronym']} and {mod2['acronym']} are incompatible") - - def _check_required_allowed_compatibility(self, ruleset_key: int) -> None: - from typing import Literal, cast - - typed_ruleset_key = cast(Literal[0, 1, 2, 3], ruleset_key) - allowed_acronyms = {mod["acronym"] for mod in self.allowed_mods} - - for req_mod in self.required_mods: - req_acronym = req_mod["acronym"] - req_settings = API_MODS[typed_ruleset_key].get(req_acronym) - if req_settings: - incompatible = set(req_settings.get("IncompatibleMods", [])) - conflicting_allowed = allowed_acronyms & incompatible - if conflicting_allowed: - conflict_list = ", ".join(conflicting_allowed) - raise InvokeException(f"Required mod {req_acronym} conflicts with allowed mods: {conflict_list}") - - def validate_playlist_item_mods(self) -> None: - ruleset_key = cast(Literal[0, 1, 2, 3], self.ruleset_id) - - # Validate required mods - for mod in self.required_mods: - self._validate_mod_for_ruleset(mod, ruleset_key, "Required mod") - - # Validate allowed mods - for mod in self.allowed_mods: - self._validate_mod_for_ruleset(mod, ruleset_key, "Allowed mod") - - # Check internal compatibility of required mods - self._check_mod_compatibility(self.required_mods, ruleset_key) - - # Check compatibility between required and allowed mods - self._check_required_allowed_compatibility(ruleset_key) - - def validate_user_mods( - self, - user: "MultiplayerRoomUser", - proposed_mods: list[APIMod], - ) -> tuple[bool, list[APIMod]]: - """ - Validates user mods against playlist item rules and returns valid mods. - Returns (is_valid, valid_mods). - """ - from typing import Literal, cast - - ruleset_id = user.ruleset_id if user.ruleset_id is not None else self.ruleset_id - ruleset_key = cast(Literal[0, 1, 2, 3], ruleset_id) - - valid_mods = [] - all_proposed_valid = True - - # Check if mods are valid for the ruleset - for mod in proposed_mods: - if ruleset_key not in API_MODS or mod["acronym"] not in API_MODS[ruleset_key]: - all_proposed_valid = False - continue - valid_mods.append(mod) - - # Check mod compatibility within user mods - incompatible_mods = set() - final_valid_mods = [] - for mod in valid_mods: - if mod["acronym"] in incompatible_mods: - all_proposed_valid = False - continue - setting_mods = API_MODS[ruleset_key].get(mod["acronym"]) - if setting_mods: - incompatible_mods.update(setting_mods["IncompatibleMods"]) - final_valid_mods.append(mod) - - # If not freestyle, check against allowed mods - if not self.freestyle: - allowed_acronyms = {mod["acronym"] for mod in self.allowed_mods} - filtered_valid_mods = [] - for mod in final_valid_mods: - if mod["acronym"] not in allowed_acronyms: - all_proposed_valid = False - else: - filtered_valid_mods.append(mod) - final_valid_mods = filtered_valid_mods - - # Check compatibility with required mods - required_mod_acronyms = {mod["acronym"] for mod in self.required_mods} - all_mod_acronyms = {mod["acronym"] for mod in final_valid_mods} | required_mod_acronyms - - # Check for incompatibility between required and user mods - filtered_valid_mods = [] - for mod in final_valid_mods: - mod_acronym = mod["acronym"] - is_compatible = True - - for other_acronym in all_mod_acronyms: - if other_acronym == mod_acronym: - continue - setting_mods = API_MODS[ruleset_key].get(mod_acronym) - if setting_mods and other_acronym in setting_mods["IncompatibleMods"]: - is_compatible = False - all_proposed_valid = False - break - - if is_compatible: - filtered_valid_mods.append(mod) - - return all_proposed_valid, filtered_valid_mods - - def clone(self) -> "PlaylistItem": - copy = self.model_copy() - copy.required_mods = list(self.required_mods) - copy.allowed_mods = list(self.allowed_mods) - copy.expired = False - copy.played_at = None - return copy - - -class _MultiplayerCountdown(SignalRUnionMessage): - id: int = 0 - time_remaining: timedelta - is_exclusive: Annotated[bool, Field(default=True), SignalRMeta(member_ignore=True)] = True - - -class MatchStartCountdown(_MultiplayerCountdown): - union_type: ClassVar[Literal[0]] = 0 - - -class ForceGameplayStartCountdown(_MultiplayerCountdown): - union_type: ClassVar[Literal[1]] = 1 - - -class ServerShuttingDownCountdown(_MultiplayerCountdown): - union_type: ClassVar[Literal[2]] = 2 - - -MultiplayerCountdown = MatchStartCountdown | ForceGameplayStartCountdown | ServerShuttingDownCountdown - - -class MultiplayerRoomUser(BaseModel): - user_id: int - state: MultiplayerUserState = MultiplayerUserState.IDLE - availability: BeatmapAvailability = BeatmapAvailability(state=DownloadState.UNKNOWN, download_progress=None) - mods: list[APIMod] = Field(default_factory=list) - match_state: MatchUserState | None = None - ruleset_id: int | None = None # freestyle - beatmap_id: int | None = None # freestyle - - -class MultiplayerRoom(BaseModel): - room_id: int - state: MultiplayerRoomState - settings: MultiplayerRoomSettings - users: list[MultiplayerRoomUser] = Field(default_factory=list) - host: MultiplayerRoomUser | None = None - match_state: MatchRoomState | None = None - playlist: list[PlaylistItem] = Field(default_factory=list) - active_countdowns: list[MultiplayerCountdown] = Field(default_factory=list) - channel_id: int - - @classmethod - def from_db(cls, room: "Room") -> "MultiplayerRoom": - """ - 将 Room (数据库模型) 转换为 MultiplayerRoom (业务模型) - """ - - # 用户列表 - users = [MultiplayerRoomUser(user_id=room.host_id)] - host_user = MultiplayerRoomUser(user_id=room.host_id) - # playlist 转换 - playlist = [] - if room.playlist: - for item in room.playlist: - playlist.append( - PlaylistItem( - id=item.id, - owner_id=item.owner_id, - beatmap_id=item.beatmap_id, - beatmap_checksum=item.beatmap.checksum if item.beatmap else "", - ruleset_id=item.ruleset_id, - required_mods=item.required_mods, - allowed_mods=item.allowed_mods, - expired=item.expired, - playlist_order=item.playlist_order, - played_at=item.played_at, - star_rating=item.beatmap.difficulty_rating if item.beatmap is not None else 0.0, - freestyle=item.freestyle, - ) - ) - - return cls( - room_id=room.id, - state=getattr(room, "state", MultiplayerRoomState.OPEN), - settings=MultiplayerRoomSettings( - name=room.name, - playlist_item_id=playlist[0].id if playlist else 0, - password=getattr(room, "password", ""), - match_type=room.type, - queue_mode=room.queue_mode, - auto_start_duration=timedelta(seconds=room.auto_start_duration), - auto_skip=room.auto_skip, - ), - users=users, - host=host_user, - match_state=None, - playlist=playlist, - active_countdowns=[], - channel_id=room.channel_id or 0, - ) - - -class MultiplayerQueue: - def __init__(self, room: "ServerMultiplayerRoom"): - self.server_room = room - self.current_index = 0 - - @property - def hub(self) -> "MultiplayerHub": - return self.server_room.hub - - @property - def upcoming_items(self): - return sorted( - (item for item in self.room.playlist if not item.expired), - key=lambda i: i.playlist_order, - ) - - @property - def room(self): - return self.server_room.room - - async def update_order(self): - from app.database import Playlist - - match self.room.settings.queue_mode: - case QueueMode.ALL_PLAYERS_ROUND_ROBIN: - ordered_active_items = [] - - is_first_set = True - first_set_order_by_user_id = {} - - active_items = [item for item in self.room.playlist if not item.expired] - active_items.sort(key=lambda x: x.id) - - user_item_groups = {} - for item in active_items: - if item.owner_id not in user_item_groups: - user_item_groups[item.owner_id] = [] - user_item_groups[item.owner_id].append(item) - - max_items = max((len(items) for items in user_item_groups.values()), default=0) - - for i in range(max_items): - current_set = [] - for user_id, items in user_item_groups.items(): - if i < len(items): - current_set.append(items[i]) - - if is_first_set: - current_set.sort(key=lambda item: (item.playlist_order, item.id)) - ordered_active_items.extend(current_set) - first_set_order_by_user_id = { - item.owner_id: idx for idx, item in enumerate(ordered_active_items) - } - else: - current_set.sort(key=lambda item: first_set_order_by_user_id.get(item.owner_id, 0)) - ordered_active_items.extend(current_set) - - is_first_set = False - case _: - ordered_active_items = sorted( - (item for item in self.room.playlist if not item.expired), - key=lambda x: x.id, - ) - async with with_db() as session: - for idx, item in enumerate(ordered_active_items): - if item.playlist_order == idx: - continue - item.playlist_order = idx - await Playlist.update(item, self.room.room_id, session) - await self.hub.playlist_changed(self.server_room, item, beatmap_changed=False) - - async def update_current_item(self): - upcoming_items = self.upcoming_items - if upcoming_items: - # 优先选择未过期的项目 - next_item = upcoming_items[0] - else: - # 如果所有项目都过期了,选择最近添加的项目(played_at 为 None 或最新的) - # 优先选择 expired=False 的项目,然后是 played_at 最晚的 - next_item = max( - self.room.playlist, - key=lambda i: (not i.expired, i.played_at or datetime.min), - ) - self.current_index = self.room.playlist.index(next_item) - last_id = self.room.settings.playlist_item_id - self.room.settings.playlist_item_id = next_item.id - if last_id != next_item.id: - await self.hub.setting_changed(self.server_room, True) - - async def add_item(self, item: PlaylistItem, user: MultiplayerRoomUser): - from app.database import Playlist - - is_host = self.room.host and self.room.host.user_id == user.user_id - if self.room.settings.queue_mode == QueueMode.HOST_ONLY and not is_host: - raise InvokeException("You are not the host") - - limit = HOST_LIMIT if is_host else PER_USER_LIMIT - if len([True for u in self.room.playlist if u.owner_id == user.user_id and not u.expired]) >= limit: - raise InvokeException(f"You can only have {limit} items in the queue") - - if item.freestyle and len(item.allowed_mods) > 0: - raise InvokeException("Freestyle items cannot have allowed mods") - - async with with_db() as session: - fetcher = await get_fetcher() - async with session: - beatmap = await Beatmap.get_or_fetch(session, fetcher, bid=item.beatmap_id) - if beatmap is None: - raise InvokeException("Beatmap not found") - if item.beatmap_checksum != beatmap.checksum: - raise InvokeException("Checksum mismatch") - - item.validate_playlist_item_mods() - item.owner_id = user.user_id - item.star_rating = beatmap.difficulty_rating - await Playlist.add_to_db(item, self.room.room_id, session) - self.room.playlist.append(item) - await self.hub.playlist_added(self.server_room, item) - await self.update_order() - await self.update_current_item() - - async def edit_item(self, item: PlaylistItem, user: MultiplayerRoomUser): - from app.database import Playlist - - if item.freestyle and len(item.allowed_mods) > 0: - raise InvokeException("Freestyle items cannot have allowed mods") - - async with with_db() as session: - fetcher = await get_fetcher() - async with session: - beatmap = await Beatmap.get_or_fetch(session, fetcher, bid=item.beatmap_id) - if item.beatmap_checksum != beatmap.checksum: - raise InvokeException("Checksum mismatch") - - existing_item = next((i for i in self.room.playlist if i.id == item.id), None) - if existing_item is None: - raise InvokeException("Attempted to change an item that doesn't exist") - - if existing_item.owner_id != user.user_id and self.room.host != user: - raise InvokeException("Attempted to change an item which is not owned by the user") - - if existing_item.expired: - raise InvokeException("Attempted to change an item which has already been played") - - item.validate_playlist_item_mods() - item.owner_id = user.user_id - item.star_rating = float(beatmap.difficulty_rating) - item.playlist_order = existing_item.playlist_order - - await Playlist.update(item, self.room.room_id, session) - - # Update item in playlist - for idx, playlist_item in enumerate(self.room.playlist): - if playlist_item.id == item.id: - self.room.playlist[idx] = item - break - - await self.hub.playlist_changed( - self.server_room, - item, - beatmap_changed=item.beatmap_checksum != existing_item.beatmap_checksum, - ) - - async def remove_item(self, playlist_item_id: int, user: MultiplayerRoomUser): - from app.database import Playlist - - item = next( - (i for i in self.room.playlist if i.id == playlist_item_id), - None, - ) - - if item is None: - raise InvokeException("Item does not exist in the room") - - # Check if it's the only item and current item - if item == self.current_item: - upcoming_items = [i for i in self.room.playlist if not i.expired] - if len(upcoming_items) == 1: - raise InvokeException("The only item in the room cannot be removed") - - if item.owner_id != user.user_id and self.room.host != user: - raise InvokeException("Attempted to remove an item which is not owned by the user") - - if item.expired: - raise InvokeException("Attempted to remove an item which has already been played") - - async with with_db() as session: - await Playlist.delete_item(item.id, self.room.room_id, session) - - found_item = next((i for i in self.room.playlist if i.id == item.id), None) - if found_item: - self.room.playlist.remove(found_item) - self.current_index = self.room.playlist.index(self.upcoming_items[0]) - - await self.update_order() - await self.update_current_item() - await self.hub.playlist_removed(self.server_room, item.id) - - async def finish_current_item(self): - from app.database import Playlist - - async with with_db() as session: - played_at = utcnow() - await session.execute( - update(Playlist) - .where( - col(Playlist.id) == self.current_item.id, - col(Playlist.room_id) == self.room.room_id, - ) - .values(expired=True, played_at=played_at) - ) - self.room.playlist[self.current_index].expired = True - self.room.playlist[self.current_index].played_at = played_at - await self.hub.playlist_changed(self.server_room, self.current_item, True) - await self.update_order() - if self.room.settings.queue_mode == QueueMode.HOST_ONLY and all( - playitem.expired for playitem in self.room.playlist - ): - assert self.room.host - await self.add_item(self.current_item.clone(), self.room.host) - await self.update_current_item() - - async def update_queue_mode(self): - if self.room.settings.queue_mode == QueueMode.HOST_ONLY and all( - playitem.expired for playitem in self.room.playlist - ): - assert self.room.host - await self.add_item(self.current_item.clone(), self.room.host) - await self.update_order() - await self.update_current_item() - - @property - def current_item(self): - return self.room.playlist[self.current_index] - - -@dataclass -class CountdownInfo: - countdown: MultiplayerCountdown - duration: timedelta - task: asyncio.Task | None = None - - def __init__(self, countdown: MultiplayerCountdown): - self.countdown = countdown - self.duration = ( - countdown.time_remaining if countdown.time_remaining > timedelta(seconds=0) else timedelta(seconds=0) - ) - - -class _MatchRequest(SignalRUnionMessage): ... - - -class ChangeTeamRequest(_MatchRequest): - union_type: ClassVar[Literal[0]] = 0 - team_id: int - - -class StartMatchCountdownRequest(_MatchRequest): - union_type: ClassVar[Literal[1]] = 1 - duration: timedelta - - -class StopCountdownRequest(_MatchRequest): - union_type: ClassVar[Literal[2]] = 2 - id: int - - -MatchRequest = ChangeTeamRequest | StartMatchCountdownRequest | StopCountdownRequest - - -class MatchTypeHandler(ABC): - def __init__(self, room: "ServerMultiplayerRoom"): - self.room = room - self.hub = room.hub - - @abstractmethod - async def handle_join(self, user: MultiplayerRoomUser): ... - - @abstractmethod - async def handle_request(self, user: MultiplayerRoomUser, request: MatchRequest): ... - - @abstractmethod - async def handle_leave(self, user: MultiplayerRoomUser): ... - - @abstractmethod - def get_details(self) -> MatchStartedEventDetail: ... - - -class HeadToHeadHandler(MatchTypeHandler): - @override - async def handle_join(self, user: MultiplayerRoomUser): - if user.match_state is not None: - user.match_state = None - await self.hub.change_user_match_state(self.room, user) - - @override - async def handle_request(self, user: MultiplayerRoomUser, request: MatchRequest): ... - - @override - async def handle_leave(self, user: MultiplayerRoomUser): ... - - @override - def get_details(self) -> MatchStartedEventDetail: - detail = MatchStartedEventDetail(room_type="head_to_head", team=None) - return detail - - -class TeamVersusHandler(MatchTypeHandler): - @override - def __init__(self, room: "ServerMultiplayerRoom"): - super().__init__(room) - self.state = TeamVersusRoomState() - room.room.match_state = self.state - task = asyncio.create_task(self.hub.change_room_match_state(self.room)) - self.hub.tasks.add(task) - task.add_done_callback(self.hub.tasks.discard) - - def _get_best_available_team(self) -> int: - for team in self.state.teams: - if all( - ( - user.match_state is None - or not isinstance(user.match_state, TeamVersusUserState) - or user.match_state.team_id != team.id - ) - for user in self.room.room.users - ): - return team.id - - from collections import defaultdict - - team_counts = defaultdict(int) - for user in self.room.room.users: - if user.match_state is not None and isinstance(user.match_state, TeamVersusUserState): - team_counts[user.match_state.team_id] += 1 - - if team_counts: - min_count = min(team_counts.values()) - for team_id, count in team_counts.items(): - if count == min_count: - return team_id - return self.state.teams[0].id if self.state.teams else 0 - - @override - async def handle_join(self, user: MultiplayerRoomUser): - best_team_id = self._get_best_available_team() - user.match_state = TeamVersusUserState(team_id=best_team_id) - await self.hub.change_user_match_state(self.room, user) - - @override - async def handle_request(self, user: MultiplayerRoomUser, request: MatchRequest): - if not isinstance(request, ChangeTeamRequest): - return - - if request.team_id not in [team.id for team in self.state.teams]: - raise InvokeException("Invalid team ID") - - user.match_state = TeamVersusUserState(team_id=request.team_id) - await self.hub.change_user_match_state(self.room, user) - - @override - async def handle_leave(self, user: MultiplayerRoomUser): ... - - @override - def get_details(self) -> MatchStartedEventDetail: - teams: dict[int, Literal["blue", "red"]] = {} - for user in self.room.room.users: - if user.match_state is not None and isinstance(user.match_state, TeamVersusUserState): - teams[user.user_id] = "blue" if user.match_state.team_id == 1 else "red" - detail = MatchStartedEventDetail(room_type="team_versus", team=teams) - return detail - - -MATCH_TYPE_HANDLERS = { - MatchType.HEAD_TO_HEAD: HeadToHeadHandler, - MatchType.TEAM_VERSUS: TeamVersusHandler, -} - - -@dataclass -class ServerMultiplayerRoom: - room: MultiplayerRoom - category: RoomCategory - status: RoomStatus - start_at: datetime - hub: "MultiplayerHub" - match_type_handler: MatchTypeHandler - queue: MultiplayerQueue - _next_countdown_id: int - _countdown_id_lock: asyncio.Lock - _tracked_countdown: dict[int, CountdownInfo] - - def __init__( - self, - room: MultiplayerRoom, - category: RoomCategory, - start_at: datetime, - hub: "MultiplayerHub", - ): - self.room = room - self.category = category - self.status = RoomStatus.IDLE - self.start_at = start_at - self.hub = hub - self.queue = MultiplayerQueue(self) - self._next_countdown_id = 0 - self._countdown_id_lock = asyncio.Lock() - self._tracked_countdown = {} - - async def set_handler(self): - self.match_type_handler = MATCH_TYPE_HANDLERS[self.room.settings.match_type](self) - for i in self.room.users: - await self.match_type_handler.handle_join(i) - - async def get_next_countdown_id(self) -> int: - async with self._countdown_id_lock: - self._next_countdown_id += 1 - return self._next_countdown_id - - async def start_countdown( - self, - countdown: MultiplayerCountdown, - on_complete: Callable[["ServerMultiplayerRoom"], Awaitable[Any]] | None = None, - ): - async def _countdown_task(self: "ServerMultiplayerRoom"): - await asyncio.sleep(info.duration.total_seconds()) - if on_complete is not None: - await on_complete(self) - await self.stop_countdown(countdown) - - if countdown.is_exclusive: - await self.stop_all_countdowns(countdown.__class__) - countdown.id = await self.get_next_countdown_id() - info = CountdownInfo(countdown) - self.room.active_countdowns.append(info.countdown) - self._tracked_countdown[countdown.id] = info - await self.hub.send_match_event(self, CountdownStartedEvent(countdown=info.countdown)) - info.task = asyncio.create_task(_countdown_task(self)) - - async def stop_countdown(self, countdown: MultiplayerCountdown): - info = self._tracked_countdown.get(countdown.id) - if info is None: - return - del self._tracked_countdown[countdown.id] - self.room.active_countdowns.remove(countdown) - await self.hub.send_match_event(self, CountdownStoppedEvent(id=countdown.id)) - if info.task is not None and not info.task.done(): - info.task.cancel() - - async def stop_all_countdowns(self, typ: type[MultiplayerCountdown]): - for countdown in list(self._tracked_countdown.values()): - if isinstance(countdown.countdown, typ): - await self.stop_countdown(countdown.countdown) - - -class _MatchServerEvent(SignalRUnionMessage): ... - - -class CountdownStartedEvent(_MatchServerEvent): - countdown: MultiplayerCountdown - - union_type: ClassVar[Literal[0]] = 0 - - -class CountdownStoppedEvent(_MatchServerEvent): - id: int - - union_type: ClassVar[Literal[1]] = 1 - - -MatchServerEvent = CountdownStartedEvent | CountdownStoppedEvent - - -class GameplayAbortReason(IntEnum): - LOAD_TOOK_TOO_LONG = 0 - HOST_ABORTED = 1 - - -class MatchStartedEventDetail(TypedDict): - room_type: Literal["playlists", "head_to_head", "team_versus"] - team: dict[int, Literal["blue", "red"]] | None diff --git a/app/models/playlist.py b/app/models/playlist.py new file mode 100644 index 0000000..d938f4b --- /dev/null +++ b/app/models/playlist.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from datetime import datetime + +from app.models.mods import APIMod + +from pydantic import BaseModel, Field + + +class PlaylistItem(BaseModel): + id: int = Field(default=0, ge=-1) + owner_id: int + beatmap_id: int + beatmap_checksum: str = "" + ruleset_id: int = 0 + required_mods: list[APIMod] = Field(default_factory=list) + allowed_mods: list[APIMod] = Field(default_factory=list) + expired: bool = False + playlist_order: int = 0 + played_at: datetime | None = None + star_rating: float = 0.0 + freestyle: bool = False diff --git a/app/models/signalr.py b/app/models/signalr.py index 8a60b26..8b13789 100644 --- a/app/models/signalr.py +++ b/app/models/signalr.py @@ -1,37 +1 @@ -from __future__ import annotations -from dataclasses import dataclass -from typing import ClassVar - -from pydantic import ( - BaseModel, - Field, -) - - -@dataclass -class SignalRMeta: - member_ignore: bool = False # implement of IgnoreMember (msgpack) attribute - json_ignore: bool = False # implement of JsonIgnore (json) attribute - use_abbr: bool = True - - -class SignalRUnionMessage(BaseModel): - union_type: ClassVar[int] - - -class Transport(BaseModel): - transport: str - transfer_formats: list[str] = Field(default_factory=lambda: ["Binary", "Text"], alias="transferFormats") - - -class NegotiateResponse(BaseModel): - connectionId: str - connectionToken: str - negotiateVersion: int = 1 - availableTransports: list[Transport] - - -class UserState(BaseModel): - connection_id: str - connection_token: str diff --git a/app/models/spectator_hub.py b/app/models/spectator_hub.py index 8a5eb71..e69de29 100644 --- a/app/models/spectator_hub.py +++ b/app/models/spectator_hub.py @@ -1,131 +0,0 @@ -from __future__ import annotations - -import datetime -from enum import IntEnum -from typing import Annotated, Any - -from app.models.beatmap import BeatmapRankStatus -from app.models.mods import APIMod - -from .score import ( - ScoreStatistics, -) -from .signalr import SignalRMeta, UserState - -from pydantic import BaseModel, Field, field_validator - - -class SpectatedUserState(IntEnum): - Idle = 0 - Playing = 1 - Paused = 2 - Passed = 3 - Failed = 4 - Quit = 5 - - -class SpectatorState(BaseModel): - beatmap_id: int | None = None - ruleset_id: int | None = None # 0,1,2,3 - mods: list[APIMod] = Field(default_factory=list) - state: SpectatedUserState - maximum_statistics: ScoreStatistics = Field(default_factory=dict) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SpectatorState): - return False - return ( - self.beatmap_id == other.beatmap_id - and self.ruleset_id == other.ruleset_id - and self.mods == other.mods - and self.state == other.state - ) - - -class ScoreProcessorStatistics(BaseModel): - base_score: float - maximum_base_score: float - accuracy_judgement_count: int - combo_portion: float - bonus_portion: float - - -class FrameHeader(BaseModel): - total_score: int - accuracy: float - combo: int - max_combo: int - statistics: ScoreStatistics = Field(default_factory=dict) - score_processor_statistics: ScoreProcessorStatistics - received_time: datetime.datetime - mods: list[APIMod] = Field(default_factory=list) - - @field_validator("received_time", mode="before") - @classmethod - def validate_timestamp(cls, v: Any) -> datetime.datetime: - if isinstance(v, list): - return v[0] - if isinstance(v, datetime.datetime): - return v - if isinstance(v, int | float): - return datetime.datetime.fromtimestamp(v, tz=datetime.UTC) - if isinstance(v, str): - return datetime.datetime.fromisoformat(v) - raise ValueError(f"Cannot convert {type(v)} to datetime") - - -# class ReplayButtonState(IntEnum): -# NONE = 0 -# LEFT1 = 1 -# RIGHT1 = 2 -# LEFT2 = 4 -# RIGHT2 = 8 -# SMOKE = 16 - - -class LegacyReplayFrame(BaseModel): - time: float # from ReplayFrame,the parent of LegacyReplayFrame - mouse_x: float | None = None - mouse_y: float | None = None - button_state: int - - header: Annotated[FrameHeader | None, Field(default=None), SignalRMeta(member_ignore=True)] - - -class FrameDataBundle(BaseModel): - header: FrameHeader - frames: list[LegacyReplayFrame] - - -# Use for server -class APIUser(BaseModel): - id: int - name: str - - -class ScoreInfo(BaseModel): - mods: list[APIMod] - user: APIUser - ruleset: int - maximum_statistics: ScoreStatistics - id: int | None = None - total_score: int | None = None - accuracy: float | None = None - max_combo: int | None = None - combo: int | None = None - statistics: ScoreStatistics = Field(default_factory=dict) - - -class StoreScore(BaseModel): - score_info: ScoreInfo - replay_frames: list[LegacyReplayFrame] = Field(default_factory=list) - - -class StoreClientState(UserState): - state: SpectatorState | None = None - beatmap_status: BeatmapRankStatus | None = None - checksum: str | None = None - ruleset_id: int | None = None - score_token: int | None = None - watched_user: set[int] = Field(default_factory=set) - score: StoreScore | None = None diff --git a/app/router/__init__.py b/app/router/__init__.py index 4316997..fc5d2a7 100644 --- a/app/router/__init__.py +++ b/app/router/__init__.py @@ -1,6 +1,5 @@ from __future__ import annotations -# from app.signalr import signalr_router as signalr_router from .auth import router as auth_router from .fetcher import fetcher_router as fetcher_router from .file import file_router as file_router @@ -25,5 +24,4 @@ __all__ = [ "private_router", "redirect_api_router", "redirect_router", - # "signalr_router", ] diff --git a/app/router/lio.py b/app/router/lio.py index 28d8f35..969b214 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -15,7 +15,7 @@ from app.dependencies.database import Database, Redis from app.dependencies.fetcher import Fetcher from app.dependencies.storage import StorageService from app.log import log -from app.models.multiplayer_hub import PlaylistItem as HubPlaylistItem +from app.models.playlist import PlaylistItem from app.models.room import MatchType, QueueMode, RoomCategory, RoomStatus from app.utils import utcnow @@ -216,7 +216,7 @@ async def _add_playlist_items(db: Database, room_id: int, room_data: dict[str, A # Insert playlist items for item_data in items_raw: - hub_item = HubPlaylistItem( + playlist_item = PlaylistItem( id=-1, # Placeholder, will be assigned by add_to_db owner_id=item_data["owner_id"], ruleset_id=item_data["ruleset_id"], @@ -230,7 +230,7 @@ async def _add_playlist_items(db: Database, room_id: int, room_data: dict[str, A beatmap_checksum=item_data["beatmap_checksum"], star_rating=item_data["star_rating"], ) - await DBPlaylist.add_to_db(hub_item, room_id=room_id, session=db) + await DBPlaylist.add_to_db(playlist_item, room_id=room_id, session=db) async def _add_host_as_participant(db: Database, room_id: int, host_user_id: int) -> None: diff --git a/app/router/notification/banchobot.py b/app/router/notification/banchobot.py index c140650..7b01347 100644 --- a/app/router/notification/banchobot.py +++ b/app/router/notification/banchobot.py @@ -2,7 +2,6 @@ from __future__ import annotations import asyncio from collections.abc import Awaitable, Callable -from datetime import timedelta from math import ceil import random import shlex @@ -10,27 +9,15 @@ import shlex from app.calculator import calculate_weighted_pp from app.const import BANCHOBOT_ID from app.database import ChatMessageResp -from app.database.beatmap import Beatmap from app.database.chat import ChannelType, ChatChannel, ChatMessage, MessageType from app.database.score import Score, get_best_id from app.database.statistics import UserStatistics, get_rank from app.database.user import User -from app.dependencies.fetcher import get_fetcher -from app.exception import InvokeException -from app.models.mods import APIMod, get_available_mods, mod_to_save -from app.models.multiplayer_hub import ( - ChangeTeamRequest, - ServerMultiplayerRoom, - StartMatchCountdownRequest, -) -from app.models.room import MatchType, QueueMode, RoomStatus +from app.models.mods import mod_to_save from app.models.score import GameMode -from app.signalr.hub import MultiplayerHubs -from app.signalr.hub.hub import Client from .server import server -from httpx import HTTPError from sqlalchemy.orm import joinedload from sqlmodel import col, func, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -216,352 +203,6 @@ PP: {statistics.pp:.2f} """ -async def _mp_name( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp name " - - name = args[0] - try: - settings = room.room.settings.model_copy() - settings.name = name - await MultiplayerHubs.ChangeSettings(signalr_client, settings) - return f"Room name has changed to {name}" - except InvokeException as e: - return e.message - - -async def _mp_set( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp set []" - - teammode = {"0": MatchType.HEAD_TO_HEAD, "2": MatchType.TEAM_VERSUS}.get(args[0]) - if not teammode: - return "Invalid teammode. Use 0 for Head-to-Head or 2 for Team Versus." - queuemode = ( - { - "0": QueueMode.HOST_ONLY, - "1": QueueMode.ALL_PLAYERS, - "2": QueueMode.ALL_PLAYERS_ROUND_ROBIN, - }.get(args[1]) - if len(args) >= 2 - else None - ) - try: - settings = room.room.settings.model_copy() - settings.match_type = teammode - if queuemode: - settings.queue_mode = queuemode - await MultiplayerHubs.ChangeSettings(signalr_client, settings) - return f"Room setting 'teammode' has been changed to {teammode.name.lower()}" - except InvokeException as e: - return e.message - - -async def _mp_host( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp host " - - username = args[0] - user_id = (await session.exec(select(User.id).where(User.username == username))).first() - if not user_id: - return f"User '{username}' not found." - - try: - await MultiplayerHubs.TransferHost(signalr_client, user_id) - return f"User '{username}' has been hosted in the room." - except InvokeException as e: - return e.message - - -async def _mp_start( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - timer = None - if len(args) >= 1 and args[0].isdigit(): - timer = int(args[0]) - - try: - if timer is not None: - await MultiplayerHubs.SendMatchRequest( - signalr_client, - StartMatchCountdownRequest(duration=timedelta(seconds=timer)), - ) - return "" - else: - await MultiplayerHubs.StartMatch(signalr_client) - return "Good luck! Enjoy game!" - except InvokeException as e: - return e.message - - -async def _mp_abort( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - try: - await MultiplayerHubs.AbortMatch(signalr_client) - return "Match aborted." - except InvokeException as e: - return e.message - - -async def _mp_team( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -): - if room.room.settings.match_type != MatchType.TEAM_VERSUS: - return "This command is only available in Team Versus mode." - - if len(args) < 2: - return "Usage: !mp team " - - username = args[0] - team = {"red": 0, "blue": 1}.get(args[1]) - if team is None: - return "Invalid team colour. Use 'red' or 'blue'." - - user_id = (await session.exec(select(User.id).where(User.username == username))).first() - if not user_id: - return f"User '{username}' not found." - user_client = MultiplayerHubs.get_client_by_id(str(user_id)) - if not user_client: - return f"User '{username}' is not in the room." - assert room.room.host - if user_client.user_id != signalr_client.user_id and room.room.host.user_id != signalr_client.user_id: - return "You are not allowed to change other users' teams." - - try: - await MultiplayerHubs.SendMatchRequest(user_client, ChangeTeamRequest(team_id=team)) - return "" - except InvokeException as e: - return e.message - - -async def _mp_password( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - password = "" - if len(args) >= 1: - password = args[0] - - try: - settings = room.room.settings.model_copy() - settings.password = password - await MultiplayerHubs.ChangeSettings(signalr_client, settings) - return "Room password has been set." - except InvokeException as e: - return e.message - - -async def _mp_kick( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp kick " - - username = args[0] - user_id = (await session.exec(select(User.id).where(User.username == username))).first() - if not user_id: - return f"User '{username}' not found." - - try: - await MultiplayerHubs.KickUser(signalr_client, user_id) - return f"User '{username}' has been kicked from the room." - except InvokeException as e: - return e.message - - -async def _mp_map( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp map []" - - if room.status != RoomStatus.IDLE: - return "Cannot change map while the game is running." - - map_id = args[0] - if not map_id.isdigit(): - return "Invalid map ID." - map_id = int(map_id) - playmode = GameMode.parse(args[1].upper()) if len(args) >= 2 else None - if playmode not in ( - GameMode.OSU, - GameMode.TAIKO, - GameMode.FRUITS, - GameMode.MANIA, - None, - ): - return "Invalid playmode." - - try: - beatmap = await Beatmap.get_or_fetch(session, await get_fetcher(), bid=map_id) - if beatmap.mode != GameMode.OSU and playmode and playmode != beatmap.mode: - return f"Cannot convert to {playmode.value}. Original mode is {beatmap.mode.value}." - except HTTPError: - return "Beatmap not found" - - try: - current_item = room.queue.current_item - item = current_item.model_copy(deep=True) - item.owner_id = signalr_client.user_id - item.beatmap_checksum = beatmap.checksum - item.required_mods = [] - item.allowed_mods = [] - item.freestyle = False - item.beatmap_id = map_id - if playmode is not None: - item.ruleset_id = int(playmode) - if item.expired: - item.id = 0 - item.expired = False - item.played_at = None - await MultiplayerHubs.AddPlaylistItem(signalr_client, item) - else: - await MultiplayerHubs.EditPlaylistItem(signalr_client, item) - return "" - except InvokeException as e: - return e.message - - -async def _mp_mods( - signalr_client: Client, - room: ServerMultiplayerRoom, - args: list[str], - session: AsyncSession, -) -> str: - if len(args) < 1: - return "Usage: !mp mods [ ...]" - - if room.status != RoomStatus.IDLE: - return "Cannot change mods while the game is running." - - required_mods = [] - allowed_mods = [] - freestyle = False - freemod = False - for arg in args: - arg = arg.upper() - if arg == "NONE": - required_mods.clear() - allowed_mods.clear() - break - elif arg == "FREESTYLE": - freestyle = True - elif arg == "FREEMOD": - freemod = True - elif arg.startswith("+"): - mod = arg.removeprefix("+") - if len(mod) != 2: - return f"Invalid mod: {mod}." - allowed_mods.append(APIMod(acronym=mod)) - else: - if len(arg) != 2: - return f"Invalid mod: {arg}." - required_mods.append(APIMod(acronym=arg)) - - try: - current_item = room.queue.current_item - item = current_item.model_copy(deep=True) - item.owner_id = signalr_client.user_id - item.freestyle = freestyle - if freestyle: - item.allowed_mods = [] - elif freemod: - item.allowed_mods = get_available_mods(current_item.ruleset_id, required_mods) - else: - item.allowed_mods = allowed_mods - item.required_mods = required_mods - if item.expired: - item.id = 0 - item.expired = False - item.played_at = None - await MultiplayerHubs.AddPlaylistItem(signalr_client, item) - else: - await MultiplayerHubs.EditPlaylistItem(signalr_client, item) - return "" - except InvokeException as e: - return e.message - - -_MP_COMMANDS = { - "name": _mp_name, - "set": _mp_set, - "host": _mp_host, - "start": _mp_start, - "abort": _mp_abort, - "map": _mp_map, - "mods": _mp_mods, - "kick": _mp_kick, - "password": _mp_password, - "team": _mp_team, -} -_MP_HELP = """!mp name -!mp set [] -!mp host -!mp start [] -!mp abort -!mp map [] -!mp mods [ ...] -!mp kick -!mp password [] -!mp team """ - - -@bot.command("mp") -async def _mp(user: User, args: list[str], session: AsyncSession, channel: ChatChannel): - if not channel.name.startswith("room_"): - return - - room_id = int(channel.name[5:]) - room = MultiplayerHubs.rooms.get(room_id) - if not room: - return - signalr_client = MultiplayerHubs.get_client_by_id(str(user.id)) - if not signalr_client: - return - - if len(args) < 1: - return f"Usage: !mp <{'|'.join(_MP_COMMANDS.keys())}> [args]" - - command = args[0].lower() - if command not in _MP_COMMANDS: - return f"No such command: {command}" - - return await _MP_COMMANDS[command](signalr_client, room, args[1:], session) - - async def _score( user_id: int, session: AsyncSession, diff --git a/app/router/v2/room.py b/app/router/v2/room.py index 21ea109..decf936 100644 --- a/app/router/v2/room.py +++ b/app/router/v2/room.py @@ -16,7 +16,6 @@ from app.dependencies.database import Database, Redis from app.dependencies.user import ClientUser, get_current_user from app.models.room import RoomCategory, RoomStatus from app.service.room import create_playlist_room_from_api -from app.signalr.hub import MultiplayerHubs from app.utils import utcnow from .router import router @@ -391,14 +390,12 @@ async def get_room_events( first_event_id = min(first_event_id, event.id) last_event_id = max(last_event_id, event.id) - if room := MultiplayerHubs.rooms.get(room_id): - current_playlist_item_id = room.queue.current_item.id - room_resp = await RoomResp.from_hub(room) - else: - room = (await db.exec(select(Room).where(Room.id == room_id))).first() - if room is None: - raise HTTPException(404, "Room not found") - room_resp = await RoomResp.from_db(room, db) + room = (await db.exec(select(Room).where(Room.id == room_id))).first() + if room is None: + raise HTTPException(404, "Room not found") + room_resp = await RoomResp.from_db(room, db) + if room.category == RoomCategory.REALTIME and room_resp.current_playlist_item: + current_playlist_item_id = room_resp.current_playlist_item.id users = await db.exec(select(User).where(col(User.id).in_(user_ids))) user_resps = [await UserResp.from_db(user, db) for user in users] diff --git a/app/service/message_queue_processor.py b/app/service/message_queue_processor.py index 41ef9bf..199d8ee 100644 --- a/app/service/message_queue_processor.py +++ b/app/service/message_queue_processor.py @@ -217,8 +217,7 @@ class MessageQueueProcessor: ): """通知客户端消息ID已更新""" try: - # 这里我们需要通过 SignalR 发送消息更新通知 - # 但为了避免循环依赖,我们将通过 Redis 发布消息更新事件 + # 通过 Redis 发布消息更新事件,由聊天通知服务分发到客户端 update_event = { "event": "chat.message.update", "data": { @@ -229,7 +228,6 @@ class MessageQueueProcessor: }, } - # 发布到 Redis 频道,让 SignalR 服务处理 await self._redis_exec( self.redis_message.publish, f"chat_updates:{channel_id}", diff --git a/app/service/subscribers/score_processed.py b/app/service/subscribers/score_processed.py deleted file mode 100644 index 6775b97..0000000 --- a/app/service/subscribers/score_processed.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -from app.database import PlaylistBestScore, Score -from app.database.playlist_best_score import get_position -from app.dependencies.database import with_db -from app.models.metadata_hub import MultiplayerRoomScoreSetEvent - -from .base import RedisSubscriber - -from sqlmodel import select - -if TYPE_CHECKING: - from app.signalr.hub import MetadataHub - - -CHANNEL = "osu-channel:score:processed" - - -class ScoreSubscriber(RedisSubscriber): - def __init__(self): - super().__init__() - self.room_subscriber: dict[int, list[int]] = {} - self.metadata_hub: "MetadataHub | None " = None - self.subscribed = False - self.handlers[CHANNEL] = [self._handler] - - async def subscribe_room_score(self, room_id: int, user_id: int): - if room_id not in self.room_subscriber: - await self.subscribe(CHANNEL) - self.start() - self.room_subscriber.setdefault(room_id, []).append(user_id) - - async def unsubscribe_room_score(self, room_id: int, user_id: int): - if room_id in self.room_subscriber: - try: - self.room_subscriber[room_id].remove(user_id) - except ValueError: - pass - if not self.room_subscriber[room_id]: - del self.room_subscriber[room_id] - - async def _notify_room_score_processed(self, score_id: int): - if not self.metadata_hub: - return - async with with_db() as session: - score = await session.get(Score, score_id) - if not score or not score.passed or score.room_id is None or score.playlist_item_id is None: - return - if not self.room_subscriber.get(score.room_id, []): - return - - new_rank = None - user_best = ( - await session.exec( - select(PlaylistBestScore).where( - PlaylistBestScore.user_id == score.user_id, - PlaylistBestScore.room_id == score.room_id, - ) - ) - ).first() - if user_best and user_best.score_id == score_id: - new_rank = await get_position( - user_best.room_id, - user_best.playlist_id, - user_best.score_id, - session, - ) - - event = MultiplayerRoomScoreSetEvent( - room_id=score.room_id, - playlist_item_id=score.playlist_item_id, - score_id=score_id, - user_id=score.user_id, - total_score=score.total_score, - new_rank=new_rank, - ) - await self.metadata_hub.notify_room_score_processed(event) - - async def _handler(self, channel: str, data: str): - score_id = json.loads(data)["ScoreId"] - if self.metadata_hub: - await self._notify_room_score_processed(score_id) diff --git a/app/signalr/__init__.py b/app/signalr/__init__.py deleted file mode 100644 index 5938238..0000000 --- a/app/signalr/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from __future__ import annotations - -from .router import router as signalr_router - -__all__ = ["signalr_router"] diff --git a/app/signalr/hub/__init__.py b/app/signalr/hub/__init__.py deleted file mode 100644 index 231ecf4..0000000 --- a/app/signalr/hub/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from .hub import Hub -from .metadata import MetadataHub -from .multiplayer import MultiplayerHub -from .spectator import SpectatorHub - -SpectatorHubs = SpectatorHub() -MultiplayerHubs = MultiplayerHub() -MetadataHubs = MetadataHub() -Hubs: dict[str, Hub] = { - "spectator": SpectatorHubs, - "multiplayer": MultiplayerHubs, - "metadata": MetadataHubs, -} diff --git a/app/signalr/hub/hub.py b/app/signalr/hub/hub.py deleted file mode 100644 index daeba34..0000000 --- a/app/signalr/hub/hub.py +++ /dev/null @@ -1,322 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -import asyncio -import time -from typing import Any - -from app.config import settings -from app.exception import InvokeException -from app.log import logger -from app.models.signalr import UserState -from app.signalr.packet import ( - ClosePacket, - CompletionPacket, - InvocationPacket, - Packet, - PingPacket, - Protocol, -) -from app.signalr.store import ResultStore -from app.signalr.utils import get_signature - -from fastapi import WebSocket -from starlette.websockets import WebSocketDisconnect - - -class CloseConnection(Exception): - def __init__( - self, - message: str = "Connection closed", - allow_reconnect: bool = False, - from_client: bool = False, - ) -> None: - super().__init__(message) - self.message = message - self.allow_reconnect = allow_reconnect - self.from_client = from_client - - -class Client: - def __init__( - self, - connection_id: str, - connection_token: str, - connection: WebSocket, - protocol: Protocol, - ) -> None: - self.connection_id = connection_id - self.connection_token = connection_token - self.connection = connection - self.protocol = protocol - self._listen_task: asyncio.Task | None = None - self._ping_task: asyncio.Task | None = None - self._store = ResultStore() - - def __hash__(self) -> int: - return hash(self.connection_token) - - @property - def user_id(self) -> int: - return int(self.connection_id) - - async def send_packet(self, packet: Packet): - await self.connection.send_bytes(self.protocol.encode(packet)) - - async def receive_packets(self) -> list[Packet]: - message = await self.connection.receive() - d = message.get("bytes") or message.get("text", "").encode() - if not d: - return [] - return self.protocol.decode(d) - - async def _ping(self): - while True: - try: - await self.send_packet(PingPacket()) - await asyncio.sleep(settings.signalr_ping_interval) - except WebSocketDisconnect: - break - except RuntimeError as e: - if "disconnect message" in str(e) or "close message" in str(e): - break - else: - logger.error(f"Error in ping task for {self.connection_id}: {e}") - break - except Exception: - logger.exception(f"Error in client {self.connection_id}") - - -class Hub[TState: UserState]: - def __init__(self) -> None: - self.clients: dict[str, Client] = {} - self.waited_clients: dict[str, int] = {} - self.tasks: set[asyncio.Task] = set() - self.groups: dict[str, set[Client]] = {} - self.state: dict[int, TState] = {} - - def add_waited_client(self, connection_token: str, timestamp: int) -> None: - self.waited_clients[connection_token] = timestamp - - def get_client_by_id(self, id: str, default: Any = None) -> Client: - for client in self.clients.values(): - if client.connection_id == id: - return client - return default - - def get_before_clients(self, id: str, current_token: str) -> list[Client]: - clients = [] - for client in self.clients.values(): - if client.connection_id != id: - continue - if client.connection_token == current_token: - continue - clients.append(client) - return clients - - @abstractmethod - def create_state(self, client: Client) -> TState: - raise NotImplementedError - - def get_or_create_state(self, client: Client) -> TState: - if (state := self.state.get(client.user_id)) is not None: - return state - state = self.create_state(client) - self.state[client.user_id] = state - return state - - def add_to_group(self, client: Client, group_id: str) -> None: - self.groups.setdefault(group_id, set()).add(client) - - def remove_from_group(self, client: Client, group_id: str) -> None: - if group_id in self.groups: - self.groups[group_id].discard(client) - - async def kick_client(self, client: Client) -> None: - await self.call_noblock(client, "DisconnectRequested") - await client.send_packet(ClosePacket(allow_reconnect=False)) - await client.connection.close(code=1000, reason="Disconnected by server") - - async def add_client( - self, - connection_id: str, - connection_token: str, - protocol: Protocol, - connection: WebSocket, - ) -> Client: - if connection_token in self.clients: - raise ValueError(f"Client with connection token {connection_token} already exists.") - if connection_token in self.waited_clients: - if self.waited_clients[connection_token] < time.time() - settings.signalr_negotiate_timeout: - raise TimeoutError(f"Connection {connection_id} has waited too long.") - del self.waited_clients[connection_token] - client = Client(connection_id, connection_token, connection, protocol) - self.clients[connection_token] = client - task = asyncio.create_task(client._ping()) - self.tasks.add(task) - client._ping_task = task - return client - - async def remove_client(self, client: Client) -> None: - if client.connection_token not in self.clients: - return - del self.clients[client.connection_token] - if client._listen_task: - client._listen_task.cancel() - if client._ping_task: - client._ping_task.cancel() - for group in self.groups.values(): - group.discard(client) - await self.clean_state(client, False) - - @abstractmethod - async def _clean_state(self, state: TState) -> None: - return - - async def clean_state(self, client: Client, disconnected: bool) -> None: - if (state := self.state.get(client.user_id)) is None: - return - if disconnected and client.connection_token != state.connection_token: - return - try: - await self._clean_state(state) - del self.state[client.user_id] - except Exception: - ... - - async def on_connect(self, client: Client) -> None: - if method := getattr(self, "on_client_connect", None): - await method(client) - - async def send_packet(self, client: Client, packet: Packet) -> None: - logger.trace(f"[SignalR] send to {client.connection_id} packet {packet}") - try: - await client.send_packet(packet) - except WebSocketDisconnect as e: - logger.info(f"Client {client.connection_id} disconnected: {e.code}, {e.reason}") - await self.remove_client(client) - except RuntimeError as e: - if "disconnect message" in str(e): - logger.info(f"Client {client.connection_id} closed the connection.") - else: - logger.exception(f"RuntimeError in client {client.connection_id}: {e}") - await self.remove_client(client) - except Exception: - logger.exception(f"Error in client {client.connection_id}") - await self.remove_client(client) - - async def broadcast_call(self, method: str, *args: Any) -> None: - tasks = [] - for client in self.clients.values(): - tasks.append(self.call_noblock(client, method, *args)) - await asyncio.gather(*tasks) - - async def broadcast_group_call(self, group_id: str, method: str, *args: Any) -> None: - tasks = [] - for client in self.groups.get(group_id, []): - tasks.append(self.call_noblock(client, method, *args)) - await asyncio.gather(*tasks) - - async def _listen_client(self, client: Client) -> None: - try: - while True: - packets = await client.receive_packets() - for packet in packets: - if isinstance(packet, PingPacket): - continue - elif isinstance(packet, ClosePacket): - raise CloseConnection( - packet.error or "Connection closed by client", - packet.allow_reconnect, - True, - ) - task = asyncio.create_task(self._handle_packet(client, packet)) - self.tasks.add(task) - task.add_done_callback(self.tasks.discard) - except WebSocketDisconnect as e: - logger.info(f"Client {client.connection_id} disconnected: {e.code}, {e.reason}") - except RuntimeError as e: - if "disconnect message" in str(e): - logger.info(f"Client {client.connection_id} closed the connection.") - else: - logger.exception(f"RuntimeError in client {client.connection_id}: {e}") - except CloseConnection as e: - if not e.from_client: - await client.send_packet(ClosePacket(error=e.message, allow_reconnect=e.allow_reconnect)) - logger.info(f"Client {client.connection_id} closed the connection: {e.message}") - except Exception: - logger.exception(f"Error in client {client.connection_id}") - - await self.remove_client(client) - - async def _handle_packet(self, client: Client, packet: Packet) -> None: - if isinstance(packet, PingPacket): - return - elif isinstance(packet, InvocationPacket): - args = packet.arguments or [] - error = None - result = None - try: - result = await self.invoke_method(client, packet.target, args) - except InvokeException as e: - error = e.message - logger.debug(f"Client {client.connection_token} call {packet.target} failed: {error}") - except Exception: - logger.exception(f"Error invoking method {packet.target} for client {client.connection_id}") - error = "Unknown error occured in server" - if packet.invocation_id is not None: - await self.send_packet( - client, - CompletionPacket( - invocation_id=packet.invocation_id, - error=error, - result=result, - ), - ) - elif isinstance(packet, CompletionPacket): - client._store.add_result(packet.invocation_id, packet.result, packet.error) - - async def invoke_method(self, client: Client, method: str, args: list[Any]) -> Any: - method_ = getattr(self, method, None) - call_params = [] - if not method_: - raise InvokeException(f"Method '{method}' not found in hub.") - signature = get_signature(method_) - for name, param in signature.parameters.items(): - if name == "self" or param.annotation is Client: - continue - call_params.append(client.protocol.validate_object(args.pop(0), param.annotation)) - return await method_(client, *call_params) - - async def call(self, client: Client, method: str, *args: Any) -> Any: - invocation_id = client._store.get_invocation_id() - await self.send_packet( - client, - InvocationPacket( - header={}, - invocation_id=invocation_id, - target=method, - arguments=list(args), - stream_ids=None, - ), - ) - r = await client._store.fetch(invocation_id, None) - if r[1]: - raise InvokeException(r[1]) - return r[0] - - async def call_noblock(self, client: Client, method: str, *args: Any) -> None: - await self.send_packet( - client, - InvocationPacket( - header={}, - invocation_id=None, - target=method, - arguments=list(args), - stream_ids=None, - ), - ) - return None - - def __contains__(self, item: str) -> bool: - return item in self.clients or item in self.waited_clients diff --git a/app/signalr/hub/metadata.py b/app/signalr/hub/metadata.py deleted file mode 100644 index 1052be5..0000000 --- a/app/signalr/hub/metadata.py +++ /dev/null @@ -1,296 +0,0 @@ -from __future__ import annotations - -import asyncio -from collections import defaultdict -from collections.abc import Coroutine -import math -from typing import override - -from app.calculator import clamp -from app.database import Relationship, RelationshipType, User -from app.database.playlist_best_score import PlaylistBestScore -from app.database.playlists import Playlist -from app.database.room import Room -from app.database.score import Score -from app.dependencies.database import with_db -from app.log import logger -from app.models.metadata_hub import ( - TOTAL_SCORE_DISTRIBUTION_BINS, - DailyChallengeInfo, - MetadataClientState, - MultiplayerPlaylistItemStats, - MultiplayerRoomScoreSetEvent, - MultiplayerRoomStats, - OnlineStatus, - UserActivity, -) -from app.models.room import RoomCategory -from app.service.subscribers.score_processed import ScoreSubscriber -from app.utils import utcnow - -from .hub import Client, Hub - -from sqlmodel import col, select - -ONLINE_PRESENCE_WATCHERS_GROUP = "metadata:online-presence-watchers" - - -class MetadataHub(Hub[MetadataClientState]): - def __init__(self) -> None: - super().__init__() - self.subscriber = ScoreSubscriber() - self.subscriber.metadata_hub = self - self._daily_challenge_stats: MultiplayerRoomStats | None = None - self._today = utcnow().date() - self._lock = asyncio.Lock() - - def get_daily_challenge_stats(self, daily_challenge_room: int) -> MultiplayerRoomStats: - if self._daily_challenge_stats is None or self._today != utcnow().date(): - self._daily_challenge_stats = MultiplayerRoomStats( - room_id=daily_challenge_room, - playlist_item_stats={}, - ) - return self._daily_challenge_stats - - @staticmethod - def online_presence_watchers_group() -> str: - return ONLINE_PRESENCE_WATCHERS_GROUP - - @staticmethod - def room_watcher_group(room_id: int) -> str: - return f"metadata:multiplayer-room-watchers:{room_id}" - - def broadcast_tasks(self, user_id: int, store: MetadataClientState | None) -> set[Coroutine]: - if store is not None and not store.pushable: - return set() - data = store.for_push if store else None - return { - self.broadcast_group_call( - self.online_presence_watchers_group(), - "UserPresenceUpdated", - user_id, - data, - ), - self.broadcast_group_call( - self.friend_presence_watchers_group(user_id), - "FriendPresenceUpdated", - user_id, - data, - ), - } - - @staticmethod - def friend_presence_watchers_group(user_id: int): - return f"metadata:friend-presence-watchers:{user_id}" - - @override - async def _clean_state(self, state: MetadataClientState) -> None: - user_id = int(state.connection_id) - - if state.pushable: - await asyncio.gather(*self.broadcast_tasks(user_id, None)) - - async with with_db() as session: - async with session.begin(): - user = (await session.exec(select(User).where(User.id == int(state.connection_id)))).one() - user.last_visit = utcnow() - await session.commit() - - @override - def create_state(self, client: Client) -> MetadataClientState: - return MetadataClientState( - connection_id=client.connection_id, - connection_token=client.connection_token, - ) - - async def on_client_connect(self, client: Client) -> None: - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - - # CRITICAL FIX: Set online status IMMEDIATELY upon connection - # This matches the C# official implementation behavior - store.status = OnlineStatus.ONLINE - logger.info(f"[MetadataHub] Set user {user_id} status to ONLINE upon connection") - - async with with_db() as session: - async with session.begin(): - friends = ( - await session.exec( - select(Relationship.target_id).where( - Relationship.user_id == user_id, - Relationship.type == RelationshipType.FOLLOW, - ) - ) - ).all() - tasks = [] - for friend_id in friends: - self.groups.setdefault(self.friend_presence_watchers_group(friend_id), set()).add(client) - if (friend_state := self.state.get(friend_id)) and friend_state.pushable: - tasks.append( - self.broadcast_group_call( - self.friend_presence_watchers_group(friend_id), - "FriendPresenceUpdated", - friend_id, - friend_state.for_push if friend_state.pushable else None, - ) - ) - await asyncio.gather(*tasks) - - daily_challenge_room = ( - await session.exec( - select(Room).where( - col(Room.ends_at) > utcnow(), - Room.category == RoomCategory.DAILY_CHALLENGE, - ) - ) - ).first() - if daily_challenge_room: - await self.call_noblock( - client, - "DailyChallengeUpdated", - DailyChallengeInfo( - room_id=daily_challenge_room.id, - ), - ) - - # CRITICAL FIX: Immediately broadcast the user's online status to all watchers - # This ensures the user appears as "currently online" right after connection - # Similar to the C# implementation's immediate broadcast logic - online_presence_tasks = self.broadcast_tasks(user_id, store) - if online_presence_tasks: - await asyncio.gather(*online_presence_tasks) - logger.info(f"[MetadataHub] Broadcasted online status for user {user_id} to watchers") - - # Also send the user's own presence update to confirm online status - await self.call_noblock( - client, - "UserPresenceUpdated", - user_id, - store.for_push, - ) - logger.info(f"[MetadataHub] User {user_id} is now ONLINE and visible to other clients") - - async def UpdateStatus(self, client: Client, status: int) -> None: - status_ = OnlineStatus(status) - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - if store.status is not None and store.status == status_: - return - store.status = OnlineStatus(status_) - tasks = self.broadcast_tasks(user_id, store) - tasks.add( - self.call_noblock( - client, - "UserPresenceUpdated", - user_id, - store.for_push, - ) - ) - await asyncio.gather(*tasks) - - async def UpdateActivity(self, client: Client, activity: UserActivity | None) -> None: - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - store.activity = activity - tasks = self.broadcast_tasks(user_id, store) - tasks.add( - self.call_noblock( - client, - "UserPresenceUpdated", - user_id, - store.for_push, - ) - ) - await asyncio.gather(*tasks) - - async def BeginWatchingUserPresence(self, client: Client) -> None: - # Critical fix: Send all currently online users to the new watcher - # Must use for_push to get the correct UserPresence format - await asyncio.gather( - *[ - self.call_noblock( - client, - "UserPresenceUpdated", - user_id, - store.for_push, # Fixed: use for_push instead of store - ) - for user_id, store in self.state.items() - if store.pushable - ] - ) - self.add_to_group(client, self.online_presence_watchers_group()) - logger.info( - f"[MetadataHub] Client {client.connection_id} now watching user presence, " - f"sent {len([s for s in self.state.values() if s.pushable])} online users" - ) - - async def EndWatchingUserPresence(self, client: Client) -> None: - self.remove_from_group(client, self.online_presence_watchers_group()) - - async def notify_room_score_processed(self, event: MultiplayerRoomScoreSetEvent): - await self.broadcast_group_call(self.room_watcher_group(event.room_id), "MultiplayerRoomScoreSet", event) - - async def BeginWatchingMultiplayerRoom(self, client: Client, room_id: int): - self.add_to_group(client, self.room_watcher_group(room_id)) - await self.subscriber.subscribe_room_score(room_id, client.user_id) - stats = self.get_daily_challenge_stats(room_id) - await self.update_daily_challenge_stats(stats) - return list(stats.playlist_item_stats.values()) - - async def update_daily_challenge_stats(self, stats: MultiplayerRoomStats) -> None: - async with with_db() as session: - playlist_ids = ( - await session.exec( - select(Playlist.id).where( - Playlist.room_id == stats.room_id, - ) - ) - ).all() - for playlist_id in playlist_ids: - item = stats.playlist_item_stats.get(playlist_id, None) - if item is None: - item = MultiplayerPlaylistItemStats( - playlist_item_id=playlist_id, - total_score_distribution=[0] * TOTAL_SCORE_DISTRIBUTION_BINS, - cumulative_score=0, - last_processed_score_id=0, - ) - stats.playlist_item_stats[playlist_id] = item - last_processed_score_id = item.last_processed_score_id - scores = ( - await session.exec( - select(PlaylistBestScore).where( - PlaylistBestScore.room_id == stats.room_id, - PlaylistBestScore.playlist_id == playlist_id, - PlaylistBestScore.score_id > last_processed_score_id, - col(PlaylistBestScore.score).has(col(Score.passed).is_(True)), - ) - ) - ).all() - if len(scores) == 0: - continue - - async with self._lock: - if item.last_processed_score_id == last_processed_score_id: - totals = defaultdict(int) - for score in scores: - bin_index = int( - clamp( - math.floor(score.total_score / 100000), - 0, - TOTAL_SCORE_DISTRIBUTION_BINS - 1, - ) - ) - totals[bin_index] += 1 - - item.cumulative_score += sum(score.total_score for score in scores) - - for j in range(TOTAL_SCORE_DISTRIBUTION_BINS): - item.total_score_distribution[j] += totals.get(j, 0) - - if scores: - item.last_processed_score_id = max(score.score_id for score in scores) - - async def EndWatchingMultiplayerRoom(self, client: Client, room_id: int): - self.remove_from_group(client, self.room_watcher_group(room_id)) - await self.subscriber.unsubscribe_room_score(room_id, client.user_id) diff --git a/app/signalr/hub/multiplayer.py b/app/signalr/hub/multiplayer.py deleted file mode 100644 index 345de1d..0000000 --- a/app/signalr/hub/multiplayer.py +++ /dev/null @@ -1,1393 +0,0 @@ -from __future__ import annotations - -import asyncio -from datetime import timedelta -from typing import override - -from app.database import Room -from app.database.beatmap import Beatmap -from app.database.chat import ChannelType, ChatChannel -from app.database.multiplayer_event import MultiplayerEvent -from app.database.playlists import Playlist -from app.database.relationship import Relationship, RelationshipType -from app.database.room_participated_user import RoomParticipatedUser -from app.database.user import User -from app.dependencies.database import get_redis, with_db -from app.dependencies.fetcher import get_fetcher -from app.exception import InvokeException -from app.log import logger -from app.models.mods import APIMod -from app.models.multiplayer_hub import ( - BeatmapAvailability, - ForceGameplayStartCountdown, - GameplayAbortReason, - MatchRequest, - MatchServerEvent, - MatchStartCountdown, - MatchStartedEventDetail, - MultiplayerClientState, - MultiplayerRoom, - MultiplayerRoomSettings, - MultiplayerRoomUser, - PlaylistItem, - ServerMultiplayerRoom, - ServerShuttingDownCountdown, - StartMatchCountdownRequest, - StopCountdownRequest, -) -from app.models.room import ( - DownloadState, - MatchType, - MultiplayerRoomState, - MultiplayerUserState, - RoomCategory, - RoomStatus, -) -from app.models.score import GameMode -from app.utils import utcnow - -from .hub import Client, Hub - -from httpx import HTTPError -from sqlalchemy import update -from sqlmodel import col, exists, select - -GAMEPLAY_LOAD_TIMEOUT = 30 - - -class MultiplayerEventLogger: - def __init__(self): - pass - - async def log_event(self, event: MultiplayerEvent): - try: - async with with_db() as session: - session.add(event) - await session.commit() - except Exception as e: - logger.warning(f"Failed to log multiplayer room event to database: {e}") - - async def room_created(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="room_created", - ) - await self.log_event(event) - - async def room_disbanded(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="room_disbanded", - ) - await self.log_event(event) - - async def player_joined(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="player_joined", - ) - await self.log_event(event) - - async def player_left(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="player_left", - ) - await self.log_event(event) - - async def player_kicked(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="player_kicked", - ) - await self.log_event(event) - - async def host_changed(self, room_id: int, user_id: int): - event = MultiplayerEvent( - room_id=room_id, - user_id=user_id, - event_type="host_changed", - ) - await self.log_event(event) - - async def game_started(self, room_id: int, playlist_item_id: int, details: MatchStartedEventDetail): - event = MultiplayerEvent( - room_id=room_id, - playlist_item_id=playlist_item_id, - event_type="game_started", - event_detail=details, # pyright: ignore[reportArgumentType] - ) - await self.log_event(event) - - async def game_aborted(self, room_id: int, playlist_item_id: int): - event = MultiplayerEvent( - room_id=room_id, - playlist_item_id=playlist_item_id, - event_type="game_aborted", - ) - await self.log_event(event) - - async def game_completed(self, room_id: int, playlist_item_id: int): - event = MultiplayerEvent( - room_id=room_id, - playlist_item_id=playlist_item_id, - event_type="game_completed", - ) - await self.log_event(event) - - -class MultiplayerHub(Hub[MultiplayerClientState]): - @override - def __init__(self): - super().__init__() - self.rooms: dict[int, ServerMultiplayerRoom] = {} - self.event_logger = MultiplayerEventLogger() - - @staticmethod - def group_id(room: int) -> str: - return f"room:{room}" - - @override - def create_state(self, client: Client) -> MultiplayerClientState: - return MultiplayerClientState( - connection_id=client.connection_id, - connection_token=client.connection_token, - ) - - @override - async def _clean_state(self, state: MultiplayerClientState): - user_id = int(state.connection_id) - - if state.room_id != 0 and state.room_id in self.rooms: - server_room = self.rooms[state.room_id] - room = server_room.room - user = next((u for u in room.users if u.user_id == user_id), None) - if user is not None: - await self.make_user_leave(self.get_client_by_id(str(user_id)), server_room, user) - - async def on_client_connect(self, client: Client) -> None: - """Track online users when connecting to multiplayer hub""" - logger.info(f"[MultiplayerHub] Client {client.user_id} connected") - - def _ensure_in_room(self, client: Client) -> ServerMultiplayerRoom: - store = self.get_or_create_state(client) - if store.room_id == 0: - raise InvokeException("You are not in a room") - if store.room_id not in self.rooms: - raise InvokeException("Room does not exist") - server_room = self.rooms[store.room_id] - return server_room - - def _ensure_host(self, client: Client, server_room: ServerMultiplayerRoom): - room = server_room.room - if room.host is None or room.host.user_id != client.user_id: - raise InvokeException("You are not the host of this room") - - async def CreateRoom(self, client: Client, room: MultiplayerRoom): - logger.info(f"[MultiplayerHub] {client.user_id} creating room") - store = self.get_or_create_state(client) - if store.room_id != 0: - raise InvokeException("You are already in a room") - async with with_db() as session: - async with session: - db_room = Room( - name=room.settings.name, - category=RoomCategory.REALTIME, - type=room.settings.match_type, - queue_mode=room.settings.queue_mode, - auto_skip=room.settings.auto_skip, - auto_start_duration=int(room.settings.auto_start_duration.total_seconds()), - host_id=client.user_id, - status=RoomStatus.IDLE, - ) - session.add(db_room) - await session.commit() - await session.refresh(db_room) - - channel = ChatChannel( - name=f"room_{db_room.id}", - description="Multiplayer room", - type=ChannelType.MULTIPLAYER, - ) - session.add(channel) - await session.commit() - await session.refresh(channel) - await session.refresh(db_room) - room.channel_id = channel.channel_id - db_room.channel_id = channel.channel_id - - item = room.playlist[0] - item.owner_id = client.user_id - room.room_id = db_room.id - starts_at = db_room.starts_at or utcnow() - beatmap_exists = await session.exec(select(exists().where(col(Beatmap.id) == item.beatmap_id))) - if not beatmap_exists.one(): - fetcher = await get_fetcher() - try: - await Beatmap.get_or_fetch(session, fetcher, bid=item.beatmap_id) - except HTTPError: - raise InvokeException("Failed to fetch beatmap, please retry later") - await Playlist.add_to_db(item, room.room_id, session) - - server_room = ServerMultiplayerRoom( - room=room, - category=RoomCategory.NORMAL, - start_at=starts_at, - hub=self, - ) - self.rooms[room.room_id] = server_room - await server_room.set_handler() - await self.event_logger.room_created(room.room_id, client.user_id) - return await self.JoinRoomWithPassword(client, room.room_id, room.settings.password) - - async def JoinRoom(self, client: Client, room_id: int): - return self.JoinRoomWithPassword(client, room_id, "") - - async def JoinRoomWithPassword(self, client: Client, room_id: int, password: str): - logger.info(f"[MultiplayerHub] {client.user_id} joining room {room_id}") - store = self.get_or_create_state(client) - if store.room_id != 0: - raise InvokeException("You are already in a room") - user = MultiplayerRoomUser(user_id=client.user_id) - if room_id not in self.rooms: - raise InvokeException("Room does not exist") - server_room = self.rooms[room_id] - room = server_room.room - for u in room.users: - if u.user_id == client.user_id: - raise InvokeException("You are already in this room") - if room.settings.password != password: - raise InvokeException("Incorrect password") - if room.host is None: - # from CreateRoom - room.host = user - store.room_id = room_id - await self.broadcast_group_call(self.group_id(room_id), "UserJoined", user) - room.users.append(user) - self.add_to_group(client, self.group_id(room_id)) - await server_room.match_type_handler.handle_join(user) - - # Critical fix: Send current room and gameplay state to new user - # This ensures spectators joining ongoing games get proper state sync - await self._send_room_state_to_new_user(client, server_room) - - await self.event_logger.player_joined(room_id, user.user_id) - - async with with_db() as session: - async with session.begin(): - if ( - participated_user := ( - await session.exec( - select(RoomParticipatedUser).where( - RoomParticipatedUser.room_id == room_id, - RoomParticipatedUser.user_id == client.user_id, - ) - ) - ).first() - ) is None: - participated_user = RoomParticipatedUser( - room_id=room_id, - user_id=client.user_id, - ) - session.add(participated_user) - else: - participated_user.left_at = None - participated_user.joined_at = utcnow() - - db_room = await session.get(Room, room_id) - if db_room is None: - raise InvokeException("Room does not exist in database") - db_room.participant_count += 1 - - redis = get_redis() - await redis.publish("chat:room:joined", f"{room.channel_id}:{user.user_id}") - - return room - - async def change_beatmap_availability( - self, - room_id: int, - user: MultiplayerRoomUser, - beatmap_availability: BeatmapAvailability, - ): - availability = user.availability - if ( - availability.state == beatmap_availability.state - and availability.download_progress == beatmap_availability.download_progress - ): - return - user.availability = beatmap_availability - await self.broadcast_group_call( - self.group_id(room_id), - "UserBeatmapAvailabilityChanged", - user.user_id, - beatmap_availability, - ) - - async def ChangeBeatmapAvailability(self, client: Client, beatmap_availability: BeatmapAvailability): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - await self.change_beatmap_availability( - room.room_id, - user, - beatmap_availability, - ) - - async def AddPlaylistItem(self, client: Client, item: PlaylistItem): - server_room = self._ensure_in_room(client) - room = server_room.room - - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - logger.info(f"[MultiplayerHub] {client.user_id} adding beatmap {item.beatmap_id} to room {room.room_id}") - await server_room.queue.add_item( - item, - user, - ) - - async def EditPlaylistItem(self, client: Client, item: PlaylistItem): - server_room = self._ensure_in_room(client) - room = server_room.room - - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - logger.info(f"[MultiplayerHub] {client.user_id} editing item {item.id} in room {room.room_id}") - await server_room.queue.edit_item( - item, - user, - ) - - async def RemovePlaylistItem(self, client: Client, item_id: int): - server_room = self._ensure_in_room(client) - room = server_room.room - - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - logger.info(f"[MultiplayerHub] {client.user_id} removing item {item_id} from room {room.room_id}") - await server_room.queue.remove_item( - item_id, - user, - ) - - async def change_db_settings(self, room: ServerMultiplayerRoom): - async with with_db() as session: - await session.execute( - update(Room) - .where(col(Room.id) == room.room.room_id) - .values( - name=room.room.settings.name, - type=room.room.settings.match_type, - queue_mode=room.room.settings.queue_mode, - auto_skip=room.room.settings.auto_skip, - auto_start_duration=int(room.room.settings.auto_start_duration.total_seconds()), - host_id=room.room.host.user_id if room.room.host else None, - ) - ) - await session.commit() - - async def setting_changed(self, room: ServerMultiplayerRoom, beatmap_changed: bool): - await self.change_db_settings(room) - await self.validate_styles(room) - await self.unready_all_users(room, beatmap_changed) - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "SettingsChanged", - room.room.settings, - ) - - async def playlist_added(self, room: ServerMultiplayerRoom, item: PlaylistItem): - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "PlaylistItemAdded", - item, - ) - - async def playlist_removed(self, room: ServerMultiplayerRoom, item_id: int): - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "PlaylistItemRemoved", - item_id, - ) - - async def playlist_changed(self, room: ServerMultiplayerRoom, item: PlaylistItem, beatmap_changed: bool): - if item.id == room.room.settings.playlist_item_id: - await self.validate_styles(room) - await self.unready_all_users(room, beatmap_changed) - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "PlaylistItemChanged", - item, - ) - - async def ChangeUserStyle(self, client: Client, beatmap_id: int | None, ruleset_id: int | None): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - await self.change_user_style( - beatmap_id, - ruleset_id, - server_room, - user, - ) - - async def validate_styles(self, room: ServerMultiplayerRoom): - fetcher = await get_fetcher() - if not room.queue.current_item.freestyle: - for user in room.room.users: - await self.change_user_style( - None, - None, - room, - user, - ) - async with with_db() as session: - try: - beatmap = await Beatmap.get_or_fetch(session, fetcher, bid=room.queue.current_item.beatmap_id) - except HTTPError: - raise InvokeException("Current item beatmap not found") - beatmap_ids = ( - await session.exec( - select(Beatmap.id, Beatmap.mode).where( - Beatmap.beatmapset_id == beatmap.beatmapset_id, - ) - ) - ).all() - for user in room.room.users: - beatmap_id = user.beatmap_id - ruleset_id = user.ruleset_id - user_beatmap = next( - (b for b in beatmap_ids if b[0] == beatmap_id), - None, - ) - if beatmap_id is not None and user_beatmap is None: - beatmap_id = None - beatmap_ruleset = user_beatmap[1] if user_beatmap else beatmap.mode - if ruleset_id is not None and beatmap_ruleset != GameMode.OSU and ruleset_id != beatmap_ruleset: - ruleset_id = None - await self.change_user_style( - beatmap_id, - ruleset_id, - room, - user, - ) - - for user in room.room.users: - is_valid, valid_mods = room.queue.current_item.validate_user_mods(user, user.mods) - if not is_valid: - await self.change_user_mods(valid_mods, room, user) - - async def change_user_style( - self, - beatmap_id: int | None, - ruleset_id: int | None, - room: ServerMultiplayerRoom, - user: MultiplayerRoomUser, - ): - if user.beatmap_id == beatmap_id and user.ruleset_id == ruleset_id: - return - - if beatmap_id is not None or ruleset_id is not None: - if not room.queue.current_item.freestyle: - raise InvokeException("Current item does not allow free user styles.") - - async with with_db() as session: - item_beatmap = await session.get(Beatmap, room.queue.current_item.beatmap_id) - if item_beatmap is None: - raise InvokeException("Item beatmap not found") - - user_beatmap = item_beatmap if beatmap_id is None else await session.get(Beatmap, beatmap_id) - - if user_beatmap is None: - raise InvokeException("Invalid beatmap selected.") - - if user_beatmap.beatmapset_id != item_beatmap.beatmapset_id: - raise InvokeException("Selected beatmap is not from the same beatmap set.") - - if ( - ruleset_id is not None - and user_beatmap.mode != GameMode.OSU - and ruleset_id != int(user_beatmap.mode) - ): - raise InvokeException("Selected ruleset is not supported for the given beatmap.") - - user.beatmap_id = beatmap_id - user.ruleset_id = ruleset_id - - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "UserStyleChanged", - user.user_id, - beatmap_id, - ruleset_id, - ) - - async def ChangeUserMods(self, client: Client, new_mods: list[APIMod]): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - await self.change_user_mods(new_mods, server_room, user) - - async def change_user_mods( - self, - new_mods: list[APIMod], - room: ServerMultiplayerRoom, - user: MultiplayerRoomUser, - ): - is_valid, valid_mods = room.queue.current_item.validate_user_mods(user, new_mods) - if not is_valid: - incompatible_mods = [mod["acronym"] for mod in new_mods if mod not in valid_mods] - raise InvokeException(f"Incompatible mods were selected: {','.join(incompatible_mods)}") - - if user.mods == valid_mods: - return - - user.mods = valid_mods - - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "UserModsChanged", - user.user_id, - valid_mods, - ) - - async def validate_user_stare( - self, - room: ServerMultiplayerRoom, - old: MultiplayerUserState, - new: MultiplayerUserState, - ): - match new: - case MultiplayerUserState.IDLE: - if old.is_playing: - raise InvokeException("Cannot return to idle without aborting gameplay.") - case MultiplayerUserState.READY: - if old != MultiplayerUserState.IDLE: - raise InvokeException(f"Cannot change state from {old} to {new}") - if room.queue.current_item.expired: - raise InvokeException("Cannot ready up while all items have been played.") - case MultiplayerUserState.WAITING_FOR_LOAD: - raise InvokeException(f"Cannot change state from {old} to {new}") - case MultiplayerUserState.LOADED: - if old != MultiplayerUserState.WAITING_FOR_LOAD: - raise InvokeException(f"Cannot change state from {old} to {new}") - case MultiplayerUserState.READY_FOR_GAMEPLAY: - if old != MultiplayerUserState.LOADED: - raise InvokeException(f"Cannot change state from {old} to {new}") - case MultiplayerUserState.PLAYING: - raise InvokeException("State is managed by the server.") - case MultiplayerUserState.FINISHED_PLAY: - if old != MultiplayerUserState.PLAYING: - raise InvokeException(f"Cannot change state from {old} to {new}") - case MultiplayerUserState.RESULTS: - # Allow server-managed transitions to RESULTS state - # This includes spectators who need to see results - if old not in ( - MultiplayerUserState.FINISHED_PLAY, - MultiplayerUserState.SPECTATING, # Allow spectators to see results - ): - raise InvokeException(f"Cannot change state from {old} to {new}") - case MultiplayerUserState.SPECTATING: - # Enhanced spectator validation - allow transitions from more states - # This matches official osu-server-spectator behavior - if old not in ( - MultiplayerUserState.IDLE, - MultiplayerUserState.READY, - MultiplayerUserState.RESULTS, # Allow spectating after results - ): - # Allow spectating during gameplay states only if the room is in appropriate state - if not ( - old.is_playing - and room.room.state - in ( - MultiplayerRoomState.WAITING_FOR_LOAD, - MultiplayerRoomState.PLAYING, - ) - ): - raise InvokeException(f"Cannot change state from {old} to {new}") - case _: - raise InvokeException(f"Invalid state transition from {old} to {new}") - - async def ChangeState(self, client: Client, state: MultiplayerUserState): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - if user.state == state: - return - - # Special handling for state changes during gameplay - match state: - case MultiplayerUserState.IDLE: - if user.state.is_playing: - return - case MultiplayerUserState.LOADED | MultiplayerUserState.READY_FOR_GAMEPLAY: - if not user.state.is_playing: - return - - logger.info(f"[MultiplayerHub] User {user.user_id} changing state from {user.state} to {state}") - - await self.validate_user_stare( - server_room, - user.state, - state, - ) - - await self.change_user_state(server_room, user, state) - - # Enhanced spectator handling based on official implementation - if state == MultiplayerUserState.SPECTATING: - await self.handle_spectator_state_change(client, server_room, user) - - await self.update_room_state(server_room) - - async def change_user_state( - self, - room: ServerMultiplayerRoom, - user: MultiplayerRoomUser, - state: MultiplayerUserState, - ): - logger.info(f"[MultiplayerHub] {user.user_id}'s state changed from {user.state} to {state}") - user.state = state - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "UserStateChanged", - user.user_id, - user.state, - ) - - async def handle_spectator_state_change( - self, client: Client, room: ServerMultiplayerRoom, user: MultiplayerRoomUser - ): - """ - Handle special logic for users entering spectator mode during ongoing gameplay. - Based on official osu-server-spectator implementation. - """ - room_state = room.room.state - - # If switching to spectating during gameplay, immediately request load - if room_state == MultiplayerRoomState.WAITING_FOR_LOAD: - logger.info(f"[MultiplayerHub] Spectator {user.user_id} joining during load phase") - await self.call_noblock(client, "LoadRequested") - - elif room_state == MultiplayerRoomState.PLAYING: - logger.info(f"[MultiplayerHub] Spectator {user.user_id} joining during active gameplay") - await self.call_noblock(client, "LoadRequested") - - # Also sync the spectator with current game state - await self._send_current_gameplay_state_to_spectator(client, room) - - async def _send_current_gameplay_state_to_spectator(self, client: Client, room: ServerMultiplayerRoom): - """ - Send current gameplay state information to a newly joined spectator. - This helps spectators sync with ongoing gameplay. - """ - try: - # Send current room state - await self.call_noblock(client, "RoomStateChanged", room.room.state) - - # Send current user states for all players - for room_user in room.room.users: - if room_user.state.is_playing or room_user.state == MultiplayerUserState.RESULTS: - await self.call_noblock( - client, - "UserStateChanged", - room_user.user_id, - room_user.state, - ) - - # If the room is in OPEN state but we have users in RESULTS state, - # this means the game just finished and we should send ResultsReady - if room.room.state == MultiplayerRoomState.OPEN and any( - u.state == MultiplayerUserState.RESULTS for u in room.room.users - ): - logger.debug(f"[MultiplayerHub] Sending ResultsReady to new spectator {client.user_id}") - await self.call_noblock(client, "ResultsReady") - - logger.debug(f"[MultiplayerHub] Sent current gameplay state to spectator {client.user_id}") - except Exception as e: - logger.error(f"[MultiplayerHub] Failed to send gameplay state to spectator {client.user_id}: {e}") - - async def _send_room_state_to_new_user(self, client: Client, room: ServerMultiplayerRoom): - """ - Send complete room state to a newly joined user. - Critical for spectators joining ongoing games. - """ - try: - # Send current room state - if room.room.state != MultiplayerRoomState.OPEN: - await self.call_noblock(client, "RoomStateChanged", room.room.state) - - # If room is in gameplay state, send LoadRequested immediately - if room.room.state in ( - MultiplayerRoomState.WAITING_FOR_LOAD, - MultiplayerRoomState.PLAYING, - ): - logger.info( - f"[MultiplayerHub] Sending LoadRequested to user {client.user_id} " - f"joining ongoing game (room state: {room.room.state})" - ) - await self.call_noblock(client, "LoadRequested") - - # Send all user states to help with synchronization - for room_user in room.room.users: - if room_user.user_id != client.user_id: # Don't send own state - await self.call_noblock( - client, - "UserStateChanged", - room_user.user_id, - room_user.state, - ) - - # Critical fix: If room is OPEN but has users in RESULTS state, - # send ResultsReady to new joiners (including spectators) - if room.room.state == MultiplayerRoomState.OPEN and any( - u.state == MultiplayerUserState.RESULTS for u in room.room.users - ): - logger.info(f"[MultiplayerHub] Sending ResultsReady to newly joined user {client.user_id}") - await self.call_noblock(client, "ResultsReady") - - # Critical addition: Send current playing users to SpectatorHub for cross-hub sync - # This ensures spectators can watch multiplayer players properly - await self._sync_with_spectator_hub(client, room) - - logger.debug(f"[MultiplayerHub] Sent complete room state to new user {client.user_id}") - except Exception as e: - logger.error(f"[MultiplayerHub] Failed to send room state to user {client.user_id}: {e}") - - async def _sync_with_spectator_hub(self, client: Client, room: ServerMultiplayerRoom): - """ - Sync with SpectatorHub to ensure cross-hub spectating works properly. - This is crucial for users watching multiplayer players from other pages. - """ - try: - # Import here to avoid circular imports - from app.signalr.hub import SpectatorHubs - - # For each user in the room, check their state and sync appropriately - for room_user in room.room.users: - if room_user.state.is_playing: - spectator_state = SpectatorHubs.state.get(room_user.user_id) - if spectator_state and spectator_state.state: - # Send the spectator state to help with cross-hub watching - await self.call_noblock( - client, - "UserBeganPlaying", - room_user.user_id, - spectator_state.state, - ) - logger.debug( - f"[MultiplayerHub] Synced spectator state for user {room_user.user_id} " - f"to new client {client.user_id}" - ) - - # Critical addition: Notify SpectatorHub about users in RESULTS state - elif room_user.state == MultiplayerUserState.RESULTS: - # Create a synthetic finished state for cross-hub spectating - try: - from app.models.spectator_hub import ( - SpectatedUserState, - SpectatorState, - ) - - finished_state = SpectatorState( - beatmap_id=room.queue.current_item.beatmap_id, - ruleset_id=room_user.ruleset_id or 0, - mods=room_user.mods, - state=SpectatedUserState.Passed, # Assume passed for results - maximum_statistics={}, - ) - - await self.call_noblock( - client, - "UserFinishedPlaying", - room_user.user_id, - finished_state, - ) - logger.debug( - f"[MultiplayerHub] Sent synthetic finished state for user {room_user.user_id} " - f"to client {client.user_id}" - ) - except Exception as e: - logger.debug(f"[MultiplayerHub] Failed to create synthetic finished state: {e}") - - except Exception as e: - logger.debug(f"[MultiplayerHub] Failed to sync with SpectatorHub: {e}") - # This is not critical, so we don't raise the exception - - async def update_room_state(self, room: ServerMultiplayerRoom): - match room.room.state: - case MultiplayerRoomState.OPEN: - if room.room.settings.auto_start_enabled: - if ( - not room.queue.current_item.expired - and any(u.state == MultiplayerUserState.READY for u in room.room.users) - and not any( - isinstance(countdown, MatchStartCountdown) for countdown in room.room.active_countdowns - ) - ): - await room.start_countdown( - MatchStartCountdown(time_remaining=room.room.settings.auto_start_duration), - self.start_match, - ) - case MultiplayerRoomState.WAITING_FOR_LOAD: - played_count = len([True for user in room.room.users if user.state.is_playing]) - ready_count = len( - [True for user in room.room.users if user.state == MultiplayerUserState.READY_FOR_GAMEPLAY] - ) - if played_count == ready_count: - await self.start_gameplay(room) - case MultiplayerRoomState.PLAYING: - if all(u.state != MultiplayerUserState.PLAYING for u in room.room.users): - any_user_finished_playing = False - - # Handle finished players first - for u in filter( - lambda u: u.state == MultiplayerUserState.FINISHED_PLAY, - room.room.users, - ): - any_user_finished_playing = True - await self.change_user_state(room, u, MultiplayerUserState.RESULTS) - - # Critical fix: Handle spectators who should also see results - # Move spectators to RESULTS state so they can see the results screen - for u in filter( - lambda u: u.state == MultiplayerUserState.SPECTATING, - room.room.users, - ): - logger.debug(f"[MultiplayerHub] Moving spectator {u.user_id} to RESULTS state") - await self.change_user_state(room, u, MultiplayerUserState.RESULTS) - - await self.change_room_state(room, MultiplayerRoomState.OPEN) - - # Send ResultsReady to all room members - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "ResultsReady", - ) - - # Critical addition: Notify SpectatorHub about finished games - # This ensures cross-hub spectating works properly - await self._notify_spectator_hub_game_ended(room) - - if any_user_finished_playing: - await self.event_logger.game_completed( - room.room.room_id, - room.queue.current_item.id, - ) - else: - await self.event_logger.game_aborted( - room.room.room_id, - room.queue.current_item.id, - ) - await room.queue.finish_current_item() - - async def change_room_state(self, room: ServerMultiplayerRoom, state: MultiplayerRoomState): - logger.debug(f"[MultiplayerHub] Room {room.room.room_id} state changed from {room.room.state} to {state}") - room.room.state = state - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "RoomStateChanged", - state, - ) - - async def StartMatch(self, client: Client): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - self._ensure_host(client, server_room) - - # Check host state - host must be ready or spectating - if room.host and room.host.state not in ( - MultiplayerUserState.SPECTATING, - MultiplayerUserState.READY, - ): - raise InvokeException("Can't start match when the host is not ready.") - - # Check if any users are ready - if all(u.state != MultiplayerUserState.READY for u in room.users): - raise InvokeException("Can't start match when no users are ready.") - - await self.start_match(server_room) - - async def start_match(self, room: ServerMultiplayerRoom): - if room.room.state != MultiplayerRoomState.OPEN: - raise InvokeException("Can't start match when already in a running state.") - if room.queue.current_item.expired: - raise InvokeException("Current playlist item is expired") - - if all(u.state != MultiplayerUserState.READY for u in room.room.users): - await room.queue.finish_current_item() - - logger.info(f"[MultiplayerHub] Room {room.room.room_id} match started") - - ready_users = [ - u - for u in room.room.users - if u.availability.state == DownloadState.LOCALLY_AVAILABLE - and (u.state == MultiplayerUserState.READY or u.state == MultiplayerUserState.IDLE) - ] - for u in ready_users: - await self.change_user_state(room, u, MultiplayerUserState.WAITING_FOR_LOAD) - await self.change_room_state( - room, - MultiplayerRoomState.WAITING_FOR_LOAD, - ) - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "LoadRequested", - ) - await room.start_countdown( - ForceGameplayStartCountdown(time_remaining=timedelta(seconds=GAMEPLAY_LOAD_TIMEOUT)), - self.start_gameplay, - ) - await self.event_logger.game_started( - room.room.room_id, - room.queue.current_item.id, - details=room.match_type_handler.get_details(), - ) - - async def start_gameplay(self, room: ServerMultiplayerRoom): - if room.room.state != MultiplayerRoomState.WAITING_FOR_LOAD: - raise InvokeException("Room is not ready for gameplay") - if room.queue.current_item.expired: - raise InvokeException("Current playlist item is expired") - await room.stop_all_countdowns(ForceGameplayStartCountdown) - playing = False - played_user = 0 - for user in room.room.users: - client = self.get_client_by_id(str(user.user_id)) - if client is None: - continue - - if user.state in ( - MultiplayerUserState.READY_FOR_GAMEPLAY, - MultiplayerUserState.LOADED, - ): - playing = True - played_user += 1 - await self.change_user_state(room, user, MultiplayerUserState.PLAYING) - await self.call_noblock(client, "GameplayStarted") - elif user.state == MultiplayerUserState.WAITING_FOR_LOAD: - await self.change_user_state(room, user, MultiplayerUserState.IDLE) - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "GameplayAborted", - GameplayAbortReason.LOAD_TOOK_TOO_LONG, - ) - await self.change_room_state( - room, - (MultiplayerRoomState.PLAYING if playing else MultiplayerRoomState.OPEN), - ) - if playing: - redis = get_redis() - await redis.set( - f"multiplayer:{room.room.room_id}:gameplay:players", - played_user, - ex=3600, - ) - - # Ensure spectator hub is aware of all active players for the new game. - # This helps spectators receive score data for every participant, - # especially in subsequent rounds where state may get out of sync. - for room_user in room.room.users: - if (client := self.get_client_by_id(str(room_user.user_id))) is not None: - try: - await self._sync_with_spectator_hub(client, room) - except Exception as e: - logger.debug( - f"[MultiplayerHub] Failed to resync spectator hub for user {room_user.user_id}: {e}" - ) - else: - await room.queue.finish_current_item() - - async def send_match_event(self, room: ServerMultiplayerRoom, event: MatchServerEvent): - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "MatchEvent", - event, - ) - - async def make_user_leave( - self, - client: Client | None, - room: ServerMultiplayerRoom, - user: MultiplayerRoomUser, - kicked: bool = False, - ): - if client: - self.remove_from_group(client, self.group_id(room.room.room_id)) - room.room.users.remove(user) - - target_store = self.state.get(user.user_id) - if target_store: - target_store.room_id = 0 - - redis = get_redis() - await redis.publish("chat:room:left", f"{room.room.channel_id}:{user.user_id}") - - async with with_db() as session: - async with session.begin(): - participated_user = ( - await session.exec( - select(RoomParticipatedUser).where( - RoomParticipatedUser.room_id == room.room.room_id, - RoomParticipatedUser.user_id == user.user_id, - ) - ) - ).first() - if participated_user is not None: - participated_user.left_at = utcnow() - - db_room = await session.get(Room, room.room.room_id) - if db_room is None: - raise InvokeException("Room does not exist in database") - if db_room.participant_count > 0: - db_room.participant_count -= 1 - - if len(room.room.users) == 0: - await self.end_room(room) - return - await self.update_room_state(room) - if len(room.room.users) != 0 and room.room.host and room.room.host.user_id == user.user_id: - next_host = room.room.users[0] - await self.set_host(room, next_host) - - if kicked: - if client: - await self.call_noblock(client, "UserKicked", user) - await self.broadcast_group_call(self.group_id(room.room.room_id), "UserKicked", user) - else: - await self.broadcast_group_call(self.group_id(room.room.room_id), "UserLeft", user) - - async def end_room(self, room: ServerMultiplayerRoom): - assert room.room.host - async with with_db() as session: - await session.execute( - update(Room) - .where(col(Room.id) == room.room.room_id) - .values( - name=room.room.settings.name, - ends_at=utcnow(), - type=room.room.settings.match_type, - queue_mode=room.room.settings.queue_mode, - auto_skip=room.room.settings.auto_skip, - auto_start_duration=int(room.room.settings.auto_start_duration.total_seconds()), - host_id=room.room.host.user_id, - ) - ) - await self.event_logger.room_disbanded( - room.room.room_id, - room.room.host.user_id, - ) - del self.rooms[room.room.room_id] - logger.info(f"[MultiplayerHub] Room {room.room.room_id} ended") - - async def LeaveRoom(self, client: Client): - store = self.get_or_create_state(client) - if store.room_id == 0: - return - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - await self.event_logger.player_left( - room.room_id, - user.user_id, - ) - await self.make_user_leave(client, server_room, user) - logger.info(f"[MultiplayerHub] {client.user_id} left room {room.room_id}") - - async def KickUser(self, client: Client, user_id: int): - server_room = self._ensure_in_room(client) - room = server_room.room - self._ensure_host(client, server_room) - - if user_id == client.user_id: - raise InvokeException("Can't kick self") - - user = next((u for u in room.users if u.user_id == user_id), None) - if user is None: - raise InvokeException("User not found in this room") - - await self.event_logger.player_kicked( - room.room_id, - user.user_id, - ) - target_client = self.get_client_by_id(str(user.user_id)) - await self.make_user_leave(target_client, server_room, user, kicked=True) - logger.info(f"[MultiplayerHub] {user.user_id} was kicked from room {room.room_id}by {client.user_id}") - - async def set_host(self, room: ServerMultiplayerRoom, user: MultiplayerRoomUser): - room.room.host = user - await self.change_db_settings(room) - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "HostChanged", - user.user_id, - ) - - async def TransferHost(self, client: Client, user_id: int): - server_room = self._ensure_in_room(client) - room = server_room.room - self._ensure_host(client, server_room) - - new_host = next((u for u in room.users if u.user_id == user_id), None) - if new_host is None: - raise InvokeException("User not found in this room") - await self.event_logger.host_changed( - room.room_id, - new_host.user_id, - ) - await self.set_host(server_room, new_host) - logger.info(f"[MultiplayerHub] {client.user_id} transferred host to {new_host.user_id} in room {room.room_id}") - - async def AbortGameplay(self, client: Client): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - if not user.state.is_playing: - raise InvokeException("Cannot abort gameplay while not in a gameplay state") - - await self.change_user_state( - server_room, - user, - MultiplayerUserState.IDLE, - ) - await self.update_room_state(server_room) - - async def AbortMatch(self, client: Client): - server_room = self._ensure_in_room(client) - room = server_room.room - self._ensure_host(client, server_room) - - if room.state != MultiplayerRoomState.PLAYING and room.state != MultiplayerRoomState.WAITING_FOR_LOAD: - raise InvokeException("Cannot abort a match that hasn't started.") - - await asyncio.gather( - *[ - self.change_user_state(server_room, u, MultiplayerUserState.IDLE) - for u in room.users - if u.state.is_playing - ] - ) - await self.broadcast_group_call( - self.group_id(room.room_id), - "GameplayAborted", - GameplayAbortReason.HOST_ABORTED, - ) - await self.update_room_state(server_room) - logger.info(f"[MultiplayerHub] {client.user_id} aborted match in room {room.room_id}") - - async def change_user_match_state(self, room: ServerMultiplayerRoom, user: MultiplayerRoomUser): - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "MatchUserStateChanged", - user.user_id, - user.match_state, - ) - - async def change_room_match_state(self, room: ServerMultiplayerRoom): - await self.broadcast_group_call( - self.group_id(room.room.room_id), - "MatchRoomStateChanged", - room.room.match_state, - ) - - async def ChangeSettings(self, client: Client, settings: MultiplayerRoomSettings): - server_room = self._ensure_in_room(client) - self._ensure_host(client, server_room) - room = server_room.room - - if room.state != MultiplayerRoomState.OPEN: - raise InvokeException("Cannot change settings while playing") - - if settings.match_type == MatchType.PLAYLISTS: - raise InvokeException("Invalid match type selected") - - settings.playlist_item_id = room.settings.playlist_item_id - previous_settings = room.settings - room.settings = settings - - if previous_settings.match_type != settings.match_type: - await server_room.set_handler() - if previous_settings.queue_mode != settings.queue_mode: - await server_room.queue.update_queue_mode() - - await self.setting_changed(server_room, beatmap_changed=False) - await self.update_room_state(server_room) - - async def SendMatchRequest(self, client: Client, request: MatchRequest): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - if isinstance(request, StartMatchCountdownRequest): - if room.host and room.host.user_id != user.user_id: - raise InvokeException("You are not the host of this room") - if room.state != MultiplayerRoomState.OPEN: - raise InvokeException("Cannot start match countdown when not open") - await server_room.start_countdown( - MatchStartCountdown(time_remaining=request.duration), - self.start_match, - ) - elif isinstance(request, StopCountdownRequest): - countdown = next( - (c for c in room.active_countdowns if c.id == request.id), - None, - ) - if countdown is None: - return - if (isinstance(countdown, MatchStartCountdown) and room.settings.auto_start_enabled) or isinstance( - countdown, (ForceGameplayStartCountdown | ServerShuttingDownCountdown) - ): - raise InvokeException("Cannot stop the requested countdown") - - await server_room.stop_countdown(countdown) - else: - await server_room.match_type_handler.handle_request(user, request) - - async def InvitePlayer(self, client: Client, user_id: int): - server_room = self._ensure_in_room(client) - room = server_room.room - user = next((u for u in room.users if u.user_id == client.user_id), None) - if user is None: - raise InvokeException("You are not in this room") - - async with with_db() as session: - db_user = await session.get(User, user_id) - target_relationship = ( - await session.exec( - select(Relationship).where( - Relationship.user_id == user_id, - Relationship.target_id == client.user_id, - ) - ) - ).first() - inviter_relationship = ( - await session.exec( - select(Relationship).where( - Relationship.user_id == client.user_id, - Relationship.target_id == user_id, - ) - ) - ).first() - if db_user is None: - raise InvokeException("User not found") - if db_user.id == client.user_id: - raise InvokeException("You cannot invite yourself") - if db_user.id in [u.user_id for u in room.users]: - raise InvokeException("User already invited") - if db_user.is_restricted: - raise InvokeException("User is restricted") - if inviter_relationship and inviter_relationship.type == RelationshipType.BLOCK: - raise InvokeException("Cannot perform action due to user being blocked") - if target_relationship and target_relationship.type == RelationshipType.BLOCK: - raise InvokeException("Cannot perform action due to user being blocked") - if ( - db_user.pm_friends_only - and target_relationship is not None - and target_relationship.type != RelationshipType.FOLLOW - ): - raise InvokeException("Cannot perform action because user has disabled non-friend communications") - - target_client = self.get_client_by_id(str(user_id)) - if target_client is None: - raise InvokeException("User is not online") - await self.call_noblock( - target_client, - "Invited", - client.user_id, - room.room_id, - room.settings.password, - ) - - async def unready_all_users(self, room: ServerMultiplayerRoom, reset_beatmap_availability: bool): - await asyncio.gather( - *[ - self.change_user_state( - room, - user, - MultiplayerUserState.IDLE, - ) - for user in room.room.users - if user.state == MultiplayerUserState.READY - ] - ) - if reset_beatmap_availability: - await asyncio.gather( - *[ - self.change_beatmap_availability( - room.room.room_id, - user, - BeatmapAvailability(state=DownloadState.UNKNOWN), - ) - for user in room.room.users - ] - ) - await room.stop_all_countdowns(MatchStartCountdown) - - async def _notify_spectator_hub_game_ended(self, room: ServerMultiplayerRoom): - """ - Notify SpectatorHub about ended multiplayer game. - This ensures cross-hub spectating works properly when games end. - """ - try: - # Import here to avoid circular imports - from app.models.spectator_hub import SpectatedUserState, SpectatorState - from app.signalr.hub import SpectatorHubs - - # For each user who finished the game, notify SpectatorHub - for room_user in room.room.users: - if room_user.state == MultiplayerUserState.RESULTS: - # Create a synthetic finished state - finished_state = SpectatorState( - beatmap_id=room.queue.current_item.beatmap_id, - ruleset_id=room_user.ruleset_id or 0, - mods=room_user.mods, - state=SpectatedUserState.Passed, # Assume passed for results - maximum_statistics={}, - ) - - # Notify all SpectatorHub watchers that this user finished - await SpectatorHubs.broadcast_group_call( - SpectatorHubs.group_id(room_user.user_id), - "UserFinishedPlaying", - room_user.user_id, - finished_state, - ) - - logger.debug(f"[MultiplayerHub] Notified SpectatorHub that user {room_user.user_id} finished game") - - except Exception as e: - logger.debug(f"[MultiplayerHub] Failed to notify SpectatorHub about game end: {e}") - # This is not critical, so we don't raise the exception diff --git a/app/signalr/hub/spectator.py b/app/signalr/hub/spectator.py deleted file mode 100644 index 05fbc42..0000000 --- a/app/signalr/hub/spectator.py +++ /dev/null @@ -1,585 +0,0 @@ -from __future__ import annotations - -import asyncio -import json -import lzma -import struct -import time -from typing import override - -from app.calculator import clamp -from app.config import settings -from app.database import Beatmap, User -from app.database.failtime import FailTime, FailTimeResp -from app.database.score import Score -from app.database.score_token import ScoreToken -from app.database.statistics import UserStatistics -from app.dependencies.database import get_redis, with_db -from app.dependencies.fetcher import get_fetcher -from app.dependencies.storage import get_storage_service -from app.exception import InvokeException -from app.log import logger -from app.models.mods import APIMod, mods_to_int -from app.models.score import GameMode, LegacyReplaySoloScoreInfo, ScoreStatistics -from app.models.spectator_hub import ( - APIUser, - FrameDataBundle, - LegacyReplayFrame, - ScoreInfo, - SpectatedUserState, - SpectatorState, - StoreClientState, - StoreScore, -) -from app.utils import unix_timestamp_to_windows - -from .hub import Client, Hub - -from httpx import HTTPError -from sqlalchemy.orm import joinedload -from sqlmodel import select - -READ_SCORE_TIMEOUT = 30 -REPLAY_LATEST_VER = 30000016 - - -def encode_uleb128(num: int) -> bytes | bytearray: - if num == 0: - return b"\x00" - - ret = bytearray() - - while num != 0: - ret.append(num & 0x7F) - num >>= 7 - if num != 0: - ret[-1] |= 0x80 - - return ret - - -def encode_string(s: str) -> bytes: - """Write `s` into bytes (ULEB128 & string).""" - if s: - encoded = s.encode() - ret = b"\x0b" + encode_uleb128(len(encoded)) + encoded - else: - ret = b"\x00" - - return ret - - -async def save_replay( - ruleset_id: int, - md5: str, - username: str, - score: Score, - statistics: ScoreStatistics, - maximum_statistics: ScoreStatistics, - frames: list[LegacyReplayFrame], -) -> None: - data = bytearray() - data.extend(struct.pack(" str: - return f"watch:{user_id}" - - @override - def create_state(self, client: Client) -> StoreClientState: - return StoreClientState( - connection_id=client.connection_id, - connection_token=client.connection_token, - ) - - @override - async def _clean_state(self, state: StoreClientState) -> None: - """ - Enhanced cleanup based on official osu-server-spectator implementation. - Properly notifies watched users when spectator disconnects. - """ - user_id = int(state.connection_id) - if state.state: - await self._end_session(user_id, state.state, state) - - # Critical fix: Notify all watched users that this spectator has disconnected - # This matches the official CleanUpState implementation - for watched_user_id in state.watched_user: - if (target_client := self.get_client_by_id(str(watched_user_id))) is not None: - await self.call_noblock(target_client, "UserEndedWatching", user_id) - logger.debug(f"[SpectatorHub] Notified {watched_user_id} that {user_id} stopped watching") - - async def on_client_connect(self, client: Client) -> None: - """ - Enhanced connection handling based on official implementation. - Send all active player states to newly connected clients. - """ - logger.info(f"[SpectatorHub] Client {client.user_id} connected") - - # Send all current player states to the new client - # This matches the official OnConnectedAsync behavior - active_states = [] - for user_id, store in self.state.items(): - if store.state is not None: - active_states.append((user_id, store.state)) - - if active_states: - logger.debug(f"[SpectatorHub] Sending {len(active_states)} active player states to {client.user_id}") - # Send states sequentially to avoid overwhelming the client - for user_id, state in active_states: - try: - await self.call_noblock(client, "UserBeganPlaying", user_id, state) - except Exception as e: - logger.debug(f"[SpectatorHub] Failed to send state for user {user_id}: {e}") - - # Also sync with MultiplayerHub for cross-hub spectating - await self._sync_with_multiplayer_hub(client) - - async def _sync_with_multiplayer_hub(self, client: Client) -> None: - """ - Sync with MultiplayerHub to get active multiplayer game states. - This ensures spectators can see multiplayer games from other pages. - """ - try: - # Import here to avoid circular imports - from app.signalr.hub import MultiplayerHubs - - # Check all active multiplayer rooms for playing users - for room_id, server_room in MultiplayerHubs.rooms.items(): - for room_user in server_room.room.users: - # Send state for users who are playing or in results - if room_user.state.is_playing and room_user.user_id not in self.state: - # Create a synthetic SpectatorState for multiplayer players - # This helps with cross-hub spectating - try: - synthetic_state = SpectatorState( - beatmap_id=server_room.queue.current_item.beatmap_id, - ruleset_id=room_user.ruleset_id or 0, # Default to osu! - mods=room_user.mods, - state=SpectatedUserState.Playing, - maximum_statistics={}, - ) - - await self.call_noblock( - client, - "UserBeganPlaying", - room_user.user_id, - synthetic_state, - ) - logger.debug( - f"[SpectatorHub] Sent synthetic multiplayer state for user {room_user.user_id}" - ) - except Exception as e: - logger.debug(f"[SpectatorHub] Failed to create synthetic state: {e}") - - # Critical addition: Notify about finished players in multiplayer games - elif ( - hasattr(room_user.state, "name") - and room_user.state.name == "RESULTS" - and room_user.user_id not in self.state - ): - try: - # Create a synthetic finished state - finished_state = SpectatorState( - beatmap_id=server_room.queue.current_item.beatmap_id, - ruleset_id=room_user.ruleset_id or 0, - mods=room_user.mods, - state=SpectatedUserState.Passed, # Assume passed for results - maximum_statistics={}, - ) - - await self.call_noblock( - client, - "UserFinishedPlaying", - room_user.user_id, - finished_state, - ) - logger.debug(f"[SpectatorHub] Sent synthetic finished state for user {room_user.user_id}") - except Exception as e: - logger.debug(f"[SpectatorHub] Failed to create synthetic finished state: {e}") - - except Exception as e: - logger.debug(f"[SpectatorHub] Failed to sync with MultiplayerHub: {e}") - # This is not critical, so we don't raise the exception - - async def BeginPlaySession(self, client: Client, score_token: int, state: SpectatorState) -> None: - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - if store.state is not None: - logger.warning(f"[SpectatorHub] User {user_id} began new session without ending previous one; cleaning up") - try: - await self._end_session(user_id, store.state, store) - finally: - store.state = None - store.beatmap_status = None - store.checksum = None - store.ruleset_id = None - store.score_token = None - store.score = None - if state.beatmap_id is None or state.ruleset_id is None: - return - - fetcher = await get_fetcher() - async with with_db() as session: - async with session.begin(): - try: - beatmap = await Beatmap.get_or_fetch(session, fetcher, bid=state.beatmap_id) - except HTTPError: - raise InvokeException(f"Beatmap {state.beatmap_id} not found.") - user = (await session.exec(select(User).where(User.id == user_id))).first() - if not user: - return - name = user.username - store.state = state - store.beatmap_status = beatmap.beatmap_status - store.checksum = beatmap.checksum - store.ruleset_id = state.ruleset_id - store.score_token = score_token - store.score = StoreScore( - score_info=ScoreInfo( - mods=state.mods, - user=APIUser(id=user_id, name=name), - ruleset=state.ruleset_id, - maximum_statistics=state.maximum_statistics, - ) - ) - logger.info(f"[SpectatorHub] {client.user_id} began playing {state.beatmap_id}") - - await self.broadcast_group_call( - self.group_id(user_id), - "UserBeganPlaying", - user_id, - state, - ) - - async def SendFrameData(self, client: Client, frame_data: FrameDataBundle) -> None: - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - if store.state is None or store.score is None: - return - - header = frame_data.header - score_info = store.score.score_info - score_info.accuracy = header.accuracy - score_info.combo = header.combo - score_info.max_combo = header.max_combo - score_info.statistics = header.statistics - store.score.replay_frames.extend(frame_data.frames) - - await self.broadcast_group_call(self.group_id(user_id), "UserSentFrames", user_id, frame_data) - - async def EndPlaySession(self, client: Client, state: SpectatorState) -> None: - user_id = int(client.connection_id) - store = self.get_or_create_state(client) - score = store.score - - # Early return if no active session - if ( - score is None - or store.score_token is None - or store.beatmap_status is None - or store.state is None - or store.score is None - ): - return - - try: - # Process score if conditions are met - if (settings.enable_all_beatmap_leaderboard and store.beatmap_status.has_leaderboard()) and any( - k.is_hit() and v > 0 for k, v in score.score_info.statistics.items() - ): - await self._process_score(store, client) - - # End the play session and notify watchers - await self._end_session(user_id, state, store) - - finally: - # CRITICAL FIX: Always clear state in finally block to ensure cleanup - # This matches the official C# implementation pattern - store.state = None - store.beatmap_status = None - store.checksum = None - store.ruleset_id = None - store.score_token = None - store.score = None - logger.info(f"[SpectatorHub] Cleared all session state for user {user_id}") - - async def _process_score(self, store: StoreClientState, client: Client) -> None: - user_id = int(client.connection_id) - assert store.state is not None - assert store.score_token is not None - assert store.checksum is not None - assert store.ruleset_id is not None - assert store.score is not None - async with with_db() as session: - async with session: - start_time = time.time() - score_record = None - while time.time() - start_time < READ_SCORE_TIMEOUT: - sub_query = select(ScoreToken.score_id).where( - ScoreToken.id == store.score_token, - ) - result = await session.exec( - select(Score) - .options(joinedload(Score.beatmap)) - .where( - Score.id == sub_query.scalar_subquery(), - Score.user_id == user_id, - ) - ) - score_record = result.first() - if score_record: - break - if not score_record: - return - if not score_record.passed: - return - await self.call_noblock( - client, - "UserScoreProcessed", - user_id, - score_record.id, - ) - # save replay - score_record.has_replay = True - await session.commit() - await session.refresh(score_record) - await save_replay( - ruleset_id=store.ruleset_id, - md5=store.checksum, - username=store.score.score_info.user.name, - score=score_record, - statistics=store.score.score_info.statistics, - maximum_statistics=store.score.score_info.maximum_statistics, - frames=store.score.replay_frames, - ) - - async def _end_session(self, user_id: int, state: SpectatorState, store: StoreClientState) -> None: - async def _add_failtime(): - async with with_db() as session: - failtime = await session.get(FailTime, state.beatmap_id) - total_length = ( - await session.exec(select(Beatmap.total_length).where(Beatmap.id == state.beatmap_id)) - ).one() - index = clamp(round((exit_time / total_length) * 100), 0, 99) - if failtime is not None: - resp = FailTimeResp.from_db(failtime) - else: - resp = FailTimeResp() - if state.state == SpectatedUserState.Failed: - resp.fail[index] += 1 - elif state.state == SpectatedUserState.Quit: - resp.exit[index] += 1 - - assert state.beatmap_id - new_failtime = FailTime.from_resp(state.beatmap_id, resp) - if failtime is not None: - await session.merge(new_failtime) - else: - session.add(new_failtime) - await session.commit() - - async def _edit_playtime(token: int, ruleset_id: int, mods: list[APIMod]): - redis = get_redis() - key = f"score:existed_time:{token}" - messages = await redis.xrange(key, min="-", max="+", count=1) - if not messages: - return - before_time = int(messages[0][1]["time"]) - await redis.delete(key) - async with with_db() as session: - gamemode = GameMode.from_int(ruleset_id).to_special_mode(mods) - statistics = ( - await session.exec( - select(UserStatistics).where( - UserStatistics.user_id == user_id, - UserStatistics.mode == gamemode, - ) - ) - ).first() - if statistics is None: - return - statistics.play_time -= before_time - statistics.play_time += round(min(before_time, exit_time)) - - if state.state == SpectatedUserState.Playing: - state.state = SpectatedUserState.Quit - logger.debug(f"[SpectatorHub] Changed state from Playing to Quit for user {user_id}") - - # Calculate exit time safely - exit_time = 0 - if store.score and store.score.replay_frames: - exit_time = max(frame.time for frame in store.score.replay_frames) // 1000 - - # Background task for playtime editing - only if we have valid data - if store.score_token and store.ruleset_id and store.score: - task = asyncio.create_task( - _edit_playtime( - store.score_token, - store.ruleset_id, - store.score.score_info.mods, - ) - ) - self.tasks.add(task) - task.add_done_callback(self.tasks.discard) - - # Background task for failtime tracking - only for failed/quit states with valid data - if ( - state.beatmap_id is not None - and exit_time > 0 - and state.state in (SpectatedUserState.Failed, SpectatedUserState.Quit) - ): - task = asyncio.create_task(_add_failtime()) - self.tasks.add(task) - task.add_done_callback(self.tasks.discard) - - logger.info(f"[SpectatorHub] {user_id} finished playing {state.beatmap_id} with {state.state}") - await self.broadcast_group_call( - self.group_id(user_id), - "UserFinishedPlaying", - user_id, - state, - ) - - async def StartWatchingUser(self, client: Client, target_id: int) -> None: - """ - Enhanced StartWatchingUser based on official osu-server-spectator implementation. - Properly handles state synchronization and watcher notifications. - """ - user_id = int(client.connection_id) - - logger.info(f"[SpectatorHub] {user_id} started watching {target_id}") - - try: - # Get target user's current state if it exists - target_store = self.state.get(target_id) - if not target_store or not target_store.state: - logger.info(f"[SpectatorHub] Rejecting watch request for {target_id}: user not playing") - raise InvokeException("Target user is not currently playing") - - if target_store.state.state != SpectatedUserState.Playing: - logger.info( - f"[SpectatorHub] Rejecting watch request for {target_id}: state is {target_store.state.state}" - ) - raise InvokeException("Target user is not currently playing") - - logger.debug(f"[SpectatorHub] {target_id} is currently playing, sending state") - # Send current state to the watcher immediately - await self.call_noblock( - client, - "UserBeganPlaying", - target_id, - target_store.state, - ) - except InvokeException: - # Re-raise to inform caller without adding to group - raise - except Exception as e: - # User isn't tracked or error occurred - this is not critical - logger.debug(f"[SpectatorHub] Could not get state for {target_id}: {e}") - raise InvokeException("Target user is not currently playing") from e - - # Add watcher to our tracked users only after validation - store = self.get_or_create_state(client) - store.watched_user.add(target_id) - - # Add to SignalR group for this target user - self.add_to_group(client, self.group_id(target_id)) - - # Get watcher's username and notify the target user - try: - async with with_db() as session: - username = (await session.exec(select(User.username).where(User.id == user_id))).first() - if not username: - logger.warning(f"[SpectatorHub] Could not find username for user {user_id}") - return - - # Notify target user that someone started watching - if (target_client := self.get_client_by_id(str(target_id))) is not None: - # Create watcher info array (matches official format) - watcher_info = [[user_id, username]] - await self.call_noblock(target_client, "UserStartedWatching", watcher_info) - logger.debug(f"[SpectatorHub] Notified {target_id} that {username} started watching") - except Exception as e: - logger.error(f"[SpectatorHub] Error notifying target user {target_id}: {e}") - - async def EndWatchingUser(self, client: Client, target_id: int) -> None: - """ - Enhanced EndWatchingUser based on official osu-server-spectator implementation. - Properly cleans up watcher state and notifies target user. - """ - user_id = int(client.connection_id) - - logger.info(f"[SpectatorHub] {user_id} ended watching {target_id}") - - # Remove from SignalR group - self.remove_from_group(client, self.group_id(target_id)) - - # Remove from our tracked watched users - store = self.get_or_create_state(client) - store.watched_user.discard(target_id) - - # Notify target user that watcher stopped watching - if (target_client := self.get_client_by_id(str(target_id))) is not None: - await self.call_noblock(target_client, "UserEndedWatching", user_id) - logger.debug(f"[SpectatorHub] Notified {target_id} that {user_id} stopped watching") - else: - logger.debug(f"[SpectatorHub] Target user {target_id} not found for end watching notification") diff --git a/app/signalr/packet.py b/app/signalr/packet.py deleted file mode 100644 index 4c0acdd..0000000 --- a/app/signalr/packet.py +++ /dev/null @@ -1,492 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -import datetime -from enum import Enum, IntEnum -import inspect -import json -from types import NoneType, UnionType -from typing import ( - Any, - Protocol as TypingProtocol, - Union, - get_args, - get_origin, -) - -from app.models.signalr import SignalRMeta, SignalRUnionMessage -from app.utils import camel_to_snake, snake_to_camel, snake_to_pascal - -import msgpack_lazer_api as m -from pydantic import BaseModel - -SEP = b"\x1e" - - -class PacketType(IntEnum): - INVOCATION = 1 - STREAM_ITEM = 2 - COMPLETION = 3 - STREAM_INVOCATION = 4 - CANCEL_INVOCATION = 5 - PING = 6 - CLOSE = 7 - - -@dataclass(kw_only=True) -class Packet: - type: PacketType - header: dict[str, Any] | None = None - - -@dataclass(kw_only=True) -class InvocationPacket(Packet): - type: PacketType = PacketType.INVOCATION - invocation_id: str | None - target: str - arguments: list[Any] | None = None - stream_ids: list[str] | None = None - - -@dataclass(kw_only=True) -class CompletionPacket(Packet): - type: PacketType = PacketType.COMPLETION - invocation_id: str - result: Any - error: str | None = None - - -@dataclass(kw_only=True) -class PingPacket(Packet): - type: PacketType = PacketType.PING - - -@dataclass(kw_only=True) -class ClosePacket(Packet): - type: PacketType = PacketType.CLOSE - error: str | None = None - allow_reconnect: bool = False - - -PACKETS = { - PacketType.INVOCATION: InvocationPacket, - PacketType.COMPLETION: CompletionPacket, - PacketType.PING: PingPacket, - PacketType.CLOSE: ClosePacket, -} - - -class Protocol(TypingProtocol): - @staticmethod - def decode(input: bytes) -> list[Packet]: ... - - @staticmethod - def encode(packet: Packet) -> bytes: ... - - @classmethod - def validate_object(cls, v: Any, typ: type) -> Any: ... - - -class MsgpackProtocol: - @classmethod - def serialize_msgpack(cls, v: Any) -> Any: - typ = v.__class__ - if issubclass(typ, BaseModel): - return cls.serialize_to_list(v) - elif issubclass(typ, list): - return [cls.serialize_msgpack(item) for item in v] - elif issubclass(typ, datetime.datetime): - return [v, 0] - elif issubclass(typ, datetime.timedelta): - return int(v.total_seconds() * 10_000_000) - elif isinstance(v, dict): - return {cls.serialize_msgpack(k): cls.serialize_msgpack(value) for k, value in v.items()} - elif issubclass(typ, Enum): - list_ = list(typ) - return list_.index(v) if v in list_ else v.value - return v - - @classmethod - def serialize_to_list(cls, value: BaseModel) -> list[Any]: - values = [] - for field, info in value.__class__.model_fields.items(): - metadata = next((m for m in info.metadata if isinstance(m, SignalRMeta)), None) - if metadata and metadata.member_ignore: - continue - values.append(cls.serialize_msgpack(v=getattr(value, field))) - if issubclass(value.__class__, SignalRUnionMessage): - return [value.__class__.union_type, values] - else: - return values - - @staticmethod - def process_object(v: Any, typ: type[BaseModel]) -> Any: - if isinstance(v, list): - d = {} - i = 0 - for field, info in typ.model_fields.items(): - metadata = next((m for m in info.metadata if isinstance(m, SignalRMeta)), None) - if metadata and metadata.member_ignore: - continue - anno = info.annotation - if anno is None: - d[camel_to_snake(field)] = v[i] - else: - d[field] = MsgpackProtocol.validate_object(v[i], anno) - i += 1 - return d - return v - - @staticmethod - def _encode_varint(value: int) -> bytes: - result = [] - while value >= 0x80: - result.append((value & 0x7F) | 0x80) - value >>= 7 - result.append(value & 0x7F) - return bytes(result) - - @staticmethod - def _decode_varint(data: bytes, offset: int = 0) -> tuple[int, int]: - result = 0 - shift = 0 - pos = offset - - while pos < len(data): - byte = data[pos] - result |= (byte & 0x7F) << shift - pos += 1 - if (byte & 0x80) == 0: - break - shift += 7 - - return result, pos - - @staticmethod - def decode(input: bytes) -> list[Packet]: - length, offset = MsgpackProtocol._decode_varint(input) - message_data = input[offset : offset + length] - unpacked = m.decode(message_data) - packet_type = PacketType(unpacked[0]) - if packet_type not in PACKETS: - raise ValueError(f"Unknown packet type: {packet_type}") - match packet_type: - case PacketType.INVOCATION: - return [ - InvocationPacket( - header=unpacked[1], - invocation_id=unpacked[2], - target=unpacked[3], - arguments=unpacked[4] if len(unpacked) > 4 else None, - stream_ids=unpacked[5] if len(unpacked) > 5 else None, - ) - ] - case PacketType.COMPLETION: - result_kind = unpacked[3] - return [ - CompletionPacket( - header=unpacked[1], - invocation_id=unpacked[2], - error=unpacked[4] if result_kind == 1 else None, - result=unpacked[5] if result_kind == 3 else None, - ) - ] - case PacketType.PING: - return [PingPacket()] - case PacketType.CLOSE: - return [ - ClosePacket( - error=unpacked[1], - allow_reconnect=unpacked[2] if len(unpacked) > 2 else False, - ) - ] - raise ValueError(f"Unsupported packet type: {packet_type}") - - @classmethod - def validate_object(cls, v: Any, typ: type) -> Any: - if issubclass(typ, BaseModel): - return typ.model_validate(obj=cls.process_object(v, typ)) - elif inspect.isclass(typ) and issubclass(typ, datetime.datetime): - return v[0] - elif inspect.isclass(typ) and issubclass(typ, datetime.timedelta): - return datetime.timedelta(seconds=int(v / 10_000_000)) - elif get_origin(typ) is list: - return [cls.validate_object(item, get_args(typ)[0]) for item in v] - elif inspect.isclass(typ) and issubclass(typ, Enum): - list_ = list(typ) - return list_[v] if isinstance(v, int) and 0 <= v < len(list_) else typ(v) - elif get_origin(typ) is dict: - return { - cls.validate_object(k, get_args(typ)[0]): cls.validate_object(v, get_args(typ)[1]) for k, v in v.items() - } - elif (origin := get_origin(typ)) is Union or origin is UnionType: - args = get_args(typ) - if len(args) == 2 and NoneType in args: - non_none_args = [arg for arg in args if arg is not NoneType] - if len(non_none_args) == 1: - if v is None: - return None - return cls.validate_object(v, non_none_args[0]) - - # suppose use `MessagePack-CSharp Union | None` - # except `X (Other Type) | None` - if NoneType in args and v is None: - return None - if not all(issubclass(arg, SignalRUnionMessage) or arg is NoneType for arg in args): - raise ValueError(f"Cannot validate {v} to {typ}, only SignalRUnionMessage subclasses are supported") - union_type = v[0] - for arg in args: - assert issubclass(arg, SignalRUnionMessage) - if arg.union_type == union_type: - return cls.validate_object(v[1], arg) - return v - - @staticmethod - def encode(packet: Packet) -> bytes: - payload = [packet.type.value, packet.header or {}] - if isinstance(packet, InvocationPacket): - payload.extend( - [ - packet.invocation_id, - packet.target, - ] - ) - if packet.arguments is not None: - payload.append([MsgpackProtocol.serialize_msgpack(arg) for arg in packet.arguments]) - if packet.stream_ids is not None: - payload.append(packet.stream_ids) - elif isinstance(packet, CompletionPacket): - result_kind = 2 - if packet.error: - result_kind = 1 - elif packet.result is not None: - result_kind = 3 - payload.extend( - [ - packet.invocation_id, - result_kind, - packet.error or MsgpackProtocol.serialize_msgpack(packet.result) or None, - ] - ) - elif isinstance(packet, ClosePacket): - payload.extend( - [ - packet.error or "", - packet.allow_reconnect, - ] - ) - elif isinstance(packet, PingPacket): - payload.pop(-1) - data = m.encode(payload) - return MsgpackProtocol._encode_varint(len(data)) + data - - -class JSONProtocol: - @classmethod - def serialize_to_json(cls, v: Any, dict_key: bool = False, in_union: bool = False): - typ = v.__class__ - if issubclass(typ, BaseModel): - return cls.serialize_model(v, in_union) - elif isinstance(v, dict): - return {cls.serialize_to_json(k, True): cls.serialize_to_json(value) for k, value in v.items()} - elif isinstance(v, list): - return [cls.serialize_to_json(item) for item in v] - elif isinstance(v, datetime.datetime): - return v.isoformat() - elif isinstance(v, datetime.timedelta): - # d.hh:mm:ss - total_seconds = int(v.total_seconds()) - hours, remainder = divmod(total_seconds, 3600) - minutes, seconds = divmod(remainder, 60) - return f"{hours:02}:{minutes:02}:{seconds:02}" - elif isinstance(v, Enum) and dict_key: - return v.value - elif isinstance(v, Enum): - list_ = list(typ) - return list_.index(v) - return v - - @classmethod - def serialize_model(cls, v: BaseModel, in_union: bool = False) -> dict[str, Any]: - d = {} - is_union = issubclass(v.__class__, SignalRUnionMessage) - for field, info in v.__class__.model_fields.items(): - metadata = next((m for m in info.metadata if isinstance(m, SignalRMeta)), None) - if metadata and metadata.json_ignore: - continue - name = ( - snake_to_camel( - field, - metadata.use_abbr if metadata else True, - ) - if not is_union - else snake_to_pascal( - field, - metadata.use_abbr if metadata else True, - ) - ) - d[name] = cls.serialize_to_json(getattr(v, field), in_union=is_union) - if is_union and not in_union: - return { - "$dtype": v.__class__.__name__, - "$value": d, - } - return d - - @staticmethod - def process_object(v: Any, typ: type[BaseModel], from_union: bool = False) -> dict[str, Any]: - d = {} - for field, info in typ.model_fields.items(): - metadata = next((m for m in info.metadata if isinstance(m, SignalRMeta)), None) - if metadata and metadata.json_ignore: - continue - name = ( - snake_to_camel(field, metadata.use_abbr if metadata else True) - if not from_union - else snake_to_pascal(field, metadata.use_abbr if metadata else True) - ) - value = v.get(name) - anno = typ.model_fields[field].annotation - if anno is None: - d[field] = value - continue - d[field] = JSONProtocol.validate_object(value, anno) - return d - - @staticmethod - def decode(input: bytes) -> list[Packet]: - packets_raw = input.removesuffix(SEP).split(SEP) - packets = [] - if len(packets_raw) > 1: - for packet_raw in packets_raw: - packets.extend(JSONProtocol.decode(packet_raw)) - return packets - else: - data = json.loads(packets_raw[0]) - packet_type = PacketType(data["type"]) - if packet_type not in PACKETS: - raise ValueError(f"Unknown packet type: {packet_type}") - match packet_type: - case PacketType.INVOCATION: - return [ - InvocationPacket( - header=data.get("header"), - invocation_id=data.get("invocationId"), - target=data["target"], - arguments=data.get("arguments"), - stream_ids=data.get("streamIds"), - ) - ] - case PacketType.COMPLETION: - return [ - CompletionPacket( - header=data.get("header"), - invocation_id=data["invocationId"], - error=data.get("error"), - result=data.get("result"), - ) - ] - case PacketType.PING: - return [PingPacket()] - case PacketType.CLOSE: - return [ - ClosePacket( - error=data.get("error"), - allow_reconnect=data.get("allowReconnect", False), - ) - ] - raise ValueError(f"Unsupported packet type: {packet_type}") - - @classmethod - def validate_object(cls, v: Any, typ: type, from_union: bool = False) -> Any: - if issubclass(typ, BaseModel): - return typ.model_validate(JSONProtocol.process_object(v, typ, from_union)) - elif inspect.isclass(typ) and issubclass(typ, datetime.datetime): - return datetime.datetime.fromisoformat(v) - elif inspect.isclass(typ) and issubclass(typ, datetime.timedelta): - # d.hh:mm:ss - parts = v.split(":") - if len(parts) == 3: - return datetime.timedelta(hours=int(parts[0]), minutes=int(parts[1]), seconds=int(parts[2])) - elif len(parts) == 2: - return datetime.timedelta(minutes=int(parts[0]), seconds=int(parts[1])) - elif len(parts) == 1: - return datetime.timedelta(seconds=int(parts[0])) - elif get_origin(typ) is list: - return [cls.validate_object(item, get_args(typ)[0]) for item in v] - elif inspect.isclass(typ) and issubclass(typ, Enum): - list_ = list(typ) - return list_[v] if isinstance(v, int) and 0 <= v < len(list_) else typ(v) - elif get_origin(typ) is dict: - return { - cls.validate_object(k, get_args(typ)[0]): cls.validate_object(v, get_args(typ)[1]) for k, v in v.items() - } - elif (origin := get_origin(typ)) is Union or origin is UnionType: - args = get_args(typ) - if len(args) == 2 and NoneType in args: - non_none_args = [arg for arg in args if arg is not NoneType] - if len(non_none_args) == 1: - if v is None: - return None - return cls.validate_object(v, non_none_args[0]) - - # suppose use `MessagePack-CSharp Union | None` - # except `X (Other Type) | None` - if NoneType in args and v is None: - return None - if not all(issubclass(arg, SignalRUnionMessage) or arg is NoneType for arg in args): - raise ValueError(f"Cannot validate {v} to {typ}, only SignalRUnionMessage subclasses are supported") - # https://github.com/ppy/osu/blob/98acd9/osu.Game/Online/SignalRDerivedTypeWorkaroundJsonConverter.cs - union_type = v["$dtype"] - for arg in args: - assert issubclass(arg, SignalRUnionMessage) - if arg.__name__ == union_type: - return cls.validate_object(v["$value"], arg, True) - return v - - @staticmethod - def encode(packet: Packet) -> bytes: - payload: dict[str, Any] = { - "type": packet.type.value, - } - if packet.header: - payload["header"] = packet.header - if isinstance(packet, InvocationPacket): - payload.update( - { - "target": packet.target, - } - ) - if packet.invocation_id is not None: - payload["invocationId"] = packet.invocation_id - if packet.arguments is not None: - payload["arguments"] = [JSONProtocol.serialize_to_json(arg) for arg in packet.arguments] - if packet.stream_ids is not None: - payload["streamIds"] = packet.stream_ids - elif isinstance(packet, CompletionPacket): - payload.update( - { - "invocationId": packet.invocation_id, - } - ) - if packet.error is not None: - payload["error"] = packet.error - if packet.result is not None: - payload["result"] = JSONProtocol.serialize_to_json(packet.result) - elif isinstance(packet, PingPacket): - pass - elif isinstance(packet, ClosePacket): - payload.update( - { - "allowReconnect": packet.allow_reconnect, - } - ) - if packet.error is not None: - payload["error"] = packet.error - return json.dumps(payload).encode("utf-8") + SEP - - -PROTOCOLS: dict[str, Protocol] = { - "json": JSONProtocol, - "messagepack": MsgpackProtocol, -} diff --git a/app/signalr/router.py b/app/signalr/router.py deleted file mode 100644 index 753bea3..0000000 --- a/app/signalr/router.py +++ /dev/null @@ -1,119 +0,0 @@ -from __future__ import annotations - -import asyncio -import json -import time -from typing import Literal -import uuid - -from app.database import User as DBUser -from app.dependencies.database import DBFactory, get_db_factory -from app.dependencies.user import get_current_user, get_current_user_and_token -from app.log import logger -from app.models.signalr import NegotiateResponse, Transport - -from .hub import Hubs -from .packet import PROTOCOLS, SEP - -from fastapi import APIRouter, Depends, Header, HTTPException, Query, WebSocket -from fastapi.security import SecurityScopes - -router = APIRouter(prefix="/signalr", include_in_schema=False) -logger.warning( - "The Python version of SignalR server is deprecated. " - "Maybe it will be removed or be fixed to continuously use in the future" -) - - -@router.post("/{hub}/negotiate", response_model=NegotiateResponse) -async def negotiate( - hub: Literal["spectator", "multiplayer", "metadata"], - negotiate_version: int = Query(1, alias="negotiateVersion"), - user: DBUser = Depends(get_current_user), -): - connectionId = str(user.id) - connectionToken = f"{connectionId}:{uuid.uuid4()}" - Hubs[hub].add_waited_client( - connection_token=connectionToken, - timestamp=int(time.time()), - ) - return NegotiateResponse( - connectionId=connectionId, - connectionToken=connectionToken, - negotiateVersion=negotiate_version, - availableTransports=[Transport(transport="WebSockets")], - ) - - -@router.websocket("/{hub}") -async def connect( - hub: Literal["spectator", "multiplayer", "metadata"], - websocket: WebSocket, - id: str, - authorization: str = Header(...), - factory: DBFactory = Depends(get_db_factory), -): - token = authorization[7:] - user_id = id.split(":")[0] - hub_ = Hubs[hub] - if id not in hub_: - await websocket.close(code=1008) - return - try: - async for session in factory(): - if ( - user_and_token := await get_current_user_and_token( - session, SecurityScopes(scopes=["*"]), token_pw=token - ) - ) is None or str(user_and_token[0].id) != user_id: - await websocket.close(code=1008) - return - except HTTPException: - await websocket.close(code=1008) - return - await websocket.accept() - - # handshake - handshake = await websocket.receive() - message = handshake.get("bytes") or handshake.get("text") - if not message: - await websocket.close(code=1008) - return - handshake_payload = json.loads(message[:-1]) - error = "" - protocol = handshake_payload.get("protocol", "json") - - client = None - try: - client = await hub_.add_client( - connection_id=user_id, - connection_token=id, - connection=websocket, - protocol=PROTOCOLS[protocol], - ) - except KeyError: - error = f"Protocol '{protocol}' is not supported." - except TimeoutError: - error = f"Connection {id} has waited too long." - except ValueError as e: - error = str(e) - payload = {"error": error} if error else {} - # finish handshake - await websocket.send_bytes(json.dumps(payload).encode() + SEP) - if error or not client: - await websocket.close(code=1008) - return - - connected_clients = hub_.get_before_clients(user_id, id) - for connected_client in connected_clients: - await hub_.kick_client(connected_client) - - await hub_.clean_state(client, False) - task = asyncio.create_task(hub_.on_connect(client)) - hub_.tasks.add(task) - task.add_done_callback(hub_.tasks.discard) - await hub_._listen_client(client) - try: - await websocket.close() - except Exception: - ... diff --git a/app/signalr/store.py b/app/signalr/store.py deleted file mode 100644 index 3d5591a..0000000 --- a/app/signalr/store.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -import asyncio -import sys -from typing import Any - - -class ResultStore: - def __init__(self) -> None: - self._seq: int = 1 - self._futures: dict[str, asyncio.Future] = {} - - @property - def current_invocation_id(self) -> int: - return self._seq - - def get_invocation_id(self) -> str: - s = self._seq - self._seq = (self._seq + 1) % sys.maxsize - return str(s) - - def add_result(self, invocation_id: str, result: Any, error: str | None = None) -> None: - if isinstance(invocation_id, str) and invocation_id.isdecimal(): - if future := self._futures.get(invocation_id): - future.set_result((result, error)) - - async def fetch( - self, - invocation_id: str, - timeout: float | None, # noqa: ASYNC109 - ) -> tuple[Any, str | None]: - future = asyncio.get_event_loop().create_future() - self._futures[invocation_id] = future - try: - return await asyncio.wait_for(future, timeout) - finally: - del self._futures[invocation_id] diff --git a/app/signalr/utils.py b/app/signalr/utils.py deleted file mode 100644 index d7d23cf..0000000 --- a/app/signalr/utils.py +++ /dev/null @@ -1,42 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -import inspect -import sys -from typing import Any, ForwardRef, cast - -# https://github.com/pydantic/pydantic/blob/main/pydantic/v1/typing.py#L61-L75 -if sys.version_info < (3, 12, 4): - - def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: - return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set()) -else: - - def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: - return cast(Any, type_)._evaluate(globalns, localns, type_params=(), recursive_guard=set()) - - -def get_annotation(param: inspect.Parameter, globalns: dict[str, Any]) -> Any: - annotation = param.annotation - if isinstance(annotation, str): - annotation = ForwardRef(annotation) - try: - annotation = evaluate_forwardref(annotation, globalns, globalns) - except Exception: - return inspect.Parameter.empty - return annotation - - -def get_signature(call: Callable[..., Any]) -> inspect.Signature: - signature = inspect.signature(call) - globalns = getattr(call, "__globals__", {}) - typed_params = [ - inspect.Parameter( - name=param.name, - kind=param.kind, - default=param.default, - annotation=get_annotation(param, globalns), - ) - for param in signature.parameters.values() - ] - return inspect.Signature(typed_params) diff --git a/app/tasks/daily_challenge.py b/app/tasks/daily_challenge.py index 3057fcb..8dd5e1c 100644 --- a/app/tasks/daily_challenge.py +++ b/app/tasks/daily_challenge.py @@ -14,7 +14,6 @@ from app.database.user import User from app.dependencies.database import get_redis, with_db from app.dependencies.scheduler import get_scheduler from app.log import logger -from app.models.metadata_hub import DailyChallengeInfo from app.models.mods import APIMod, get_available_mods from app.models.room import RoomCategory from app.service.room import create_playlist_room @@ -54,8 +53,6 @@ async def create_daily_challenge_room( @get_scheduler().scheduled_job("cron", hour=0, minute=0, second=0, id="daily_challenge") async def daily_challenge_job(): - from app.signalr.hub import MetadataHubs - now = utcnow() redis = get_redis() key = f"daily_challenge:{now.date()}" @@ -108,7 +105,6 @@ async def daily_challenge_job(): allowed_mods=allowed_mods_list, duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60), ) - await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id)) logger.success(f"Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}") return except (ValueError, json.JSONDecodeError) as e: diff --git a/main.py b/main.py index ee8dcb4..282fd18 100644 --- a/main.py +++ b/main.py @@ -166,9 +166,6 @@ app.include_router(auth_router) app.include_router(private_router) app.include_router(lio_router) -# from app.signalr import signalr_router -# app.include_router(signalr_router) - # 会话验证中间件 if settings.enable_session_verification: app.add_middleware(VerifySessionMiddleware) diff --git a/osu_lazer_api.code-workspace b/osu_lazer_api.code-workspace index 7796233..a224bcb 100644 --- a/osu_lazer_api.code-workspace +++ b/osu_lazer_api.code-workspace @@ -4,10 +4,7 @@ "path": "." }, { - "path": "packages/msgpack_lazer_api" - }, - { - "path": "spectator-server" - } + "path": "spectator-server" + } ] } diff --git a/packages/msgpack_lazer_api/Cargo.lock b/packages/msgpack_lazer_api/Cargo.lock deleted file mode 100644 index 7672a52..0000000 --- a/packages/msgpack_lazer_api/Cargo.lock +++ /dev/null @@ -1,424 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "bumpalo" -version = "3.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "cc" -version = "1.2.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7" -dependencies = [ - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" - -[[package]] -name = "chrono" -version = "0.4.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits", - "wasm-bindgen", - "windows-link", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "iana-time-zone" -version = "0.1.63" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "indoc" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" - -[[package]] -name = "js-sys" -version = "0.3.77" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "libc" -version = "0.2.174" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" - -[[package]] -name = "log" -version = "0.4.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "msgpack-lazer-api" -version = "0.1.0" -dependencies = [ - "chrono", - "pyo3", - "rmp", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "portable-atomic" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" - -[[package]] -name = "proc-macro2" -version = "1.0.95" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "pyo3" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" -dependencies = [ - "chrono", - "indoc", - "libc", - "memoffset", - "once_cell", - "portable-atomic", - "pyo3-build-config", - "pyo3-ffi", - "pyo3-macros", - "unindent", -] - -[[package]] -name = "pyo3-build-config" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" -dependencies = [ - "once_cell", - "target-lexicon", -] - -[[package]] -name = "pyo3-ffi" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" -dependencies = [ - "libc", - "pyo3-build-config", -] - -[[package]] -name = "pyo3-macros" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" -dependencies = [ - "proc-macro2", - "pyo3-macros-backend", - "quote", - "syn", -] - -[[package]] -name = "pyo3-macros-backend" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" -dependencies = [ - "heck", - "proc-macro2", - "pyo3-build-config", - "quote", - "syn", -] - -[[package]] -name = "quote" -version = "1.0.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rmp" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" -dependencies = [ - "byteorder", - "num-traits", - "paste", -] - -[[package]] -name = "rustversion" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "syn" -version = "2.0.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "target-lexicon" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" - -[[package]] -name = "unicode-ident" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" - -[[package]] -name = "unindent" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" - -[[package]] -name = "wasm-bindgen" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "windows-core" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link", - "windows-result", - "windows-strings", -] - -[[package]] -name = "windows-implement" -version = "0.60.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-interface" -version = "0.59.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link", -] diff --git a/packages/msgpack_lazer_api/Cargo.toml b/packages/msgpack_lazer_api/Cargo.toml deleted file mode 100644 index b1722aa..0000000 --- a/packages/msgpack_lazer_api/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "msgpack-lazer-api" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[lib] -name = "msgpack_lazer_api" -crate-type = ["cdylib"] - -[dependencies] -chrono = "0.4.41" -pyo3 = { version = "0.25.0", features = ["extension-module", "chrono"] } -rmp = "0.8.14" diff --git a/packages/msgpack_lazer_api/msgpack_lazer_api.pyi b/packages/msgpack_lazer_api/msgpack_lazer_api.pyi deleted file mode 100644 index 433c53b..0000000 --- a/packages/msgpack_lazer_api/msgpack_lazer_api.pyi +++ /dev/null @@ -1,4 +0,0 @@ -from typing import Any - -def encode(obj: Any) -> bytes: ... -def decode(data: bytes) -> Any: ... diff --git a/packages/msgpack_lazer_api/pyproject.toml b/packages/msgpack_lazer_api/pyproject.toml deleted file mode 100644 index e3b5305..0000000 --- a/packages/msgpack_lazer_api/pyproject.toml +++ /dev/null @@ -1,16 +0,0 @@ -[build-system] -requires = ["maturin>=1.9,<2.0"] -build-backend = "maturin" - -[project] -name = "msgpack-lazer-api" -requires-python = ">=3.12" -classifiers = [ - "Programming Language :: Rust", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", -] -dynamic = ["version"] - -[tool.maturin] -features = ["pyo3/extension-module"] diff --git a/packages/msgpack_lazer_api/src/decode.rs b/packages/msgpack_lazer_api/src/decode.rs deleted file mode 100644 index 1e36c42..0000000 --- a/packages/msgpack_lazer_api/src/decode.rs +++ /dev/null @@ -1,312 +0,0 @@ -use chrono::{TimeZone, Utc}; -use pyo3::types::PyDict; -use pyo3::{prelude::*, IntoPyObjectExt}; -use std::io::Read; - -pub fn read_object( - py: Python<'_>, - cursor: &mut std::io::Cursor<&[u8]>, - api_mod: bool, -) -> PyResult { - match rmp::decode::read_marker(cursor) { - Ok(marker) => match marker { - rmp::Marker::Null => Ok(py.None()), - rmp::Marker::True => Ok(true.into_py_any(py)?), - rmp::Marker::False => Ok(false.into_py_any(py)?), - rmp::Marker::FixPos(val) => Ok(val.into_pyobject(py)?.into_any().unbind()), - rmp::Marker::FixNeg(val) => Ok(val.into_pyobject(py)?.into_any().unbind()), - rmp::Marker::U8 => { - let mut buf = [0u8; 1]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - Ok(buf[0].into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::U16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = u16::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::U32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = u32::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::U64 => { - let mut buf = [0u8; 8]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = u64::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::I8 => { - let mut buf = [0u8; 1]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = i8::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::I16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = i16::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::I32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = i32::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::I64 => { - let mut buf = [0u8; 8]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = i64::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::Bin8 => { - let mut buf = [0u8; 1]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = buf[0] as u32; - let mut data = vec![0u8; len as usize]; - cursor.read_exact(&mut data).map_err(to_py_err)?; - Ok(data.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::Bin16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u16::from_be_bytes(buf) as u32; - let mut data = vec![0u8; len as usize]; - cursor.read_exact(&mut data).map_err(to_py_err)?; - Ok(data.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::Bin32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u32::from_be_bytes(buf); - let mut data = vec![0u8; len as usize]; - cursor.read_exact(&mut data).map_err(to_py_err)?; - Ok(data.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::FixStr(len) => read_string(py, cursor, len as u32), - rmp::Marker::Str8 => { - let mut buf = [0u8; 1]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = buf[0] as u32; - read_string(py, cursor, len) - } - rmp::Marker::Str16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u16::from_be_bytes(buf) as u32; - read_string(py, cursor, len) - } - rmp::Marker::Str32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u32::from_be_bytes(buf); - read_string(py, cursor, len) - } - rmp::Marker::FixArray(len) => read_array(py, cursor, len as u32, api_mod), - rmp::Marker::Array16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u16::from_be_bytes(buf) as u32; - read_array(py, cursor, len, api_mod) - } - rmp::Marker::Array32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u32::from_be_bytes(buf); - read_array(py, cursor, len, api_mod) - } - rmp::Marker::FixMap(len) => read_map(py, cursor, len as u32), - rmp::Marker::Map16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u16::from_be_bytes(buf) as u32; - read_map(py, cursor, len) - } - rmp::Marker::Map32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u32::from_be_bytes(buf); - read_map(py, cursor, len) - } - rmp::Marker::F32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = f32::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::F64 => { - let mut buf = [0u8; 8]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let val = f64::from_be_bytes(buf); - Ok(val.into_pyobject(py)?.into_any().unbind()) - } - rmp::Marker::FixExt1 => read_ext(py, cursor, 1), - rmp::Marker::FixExt2 => read_ext(py, cursor, 2), - rmp::Marker::FixExt4 => read_ext(py, cursor, 4), - rmp::Marker::FixExt8 => read_ext(py, cursor, 8), - rmp::Marker::FixExt16 => read_ext(py, cursor, 16), - rmp::Marker::Ext8 => { - let mut buf = [0u8; 1]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = buf[0] as u32; - read_ext(py, cursor, len) - } - rmp::Marker::Ext16 => { - let mut buf = [0u8; 2]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u16::from_be_bytes(buf) as u32; - read_ext(py, cursor, len) - } - rmp::Marker::Ext32 => { - let mut buf = [0u8; 4]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let len = u32::from_be_bytes(buf); - read_ext(py, cursor, len) - } - _ => Err(PyErr::new::( - "Unsupported MessagePack marker", - )), - }, - Err(e) => Err(PyErr::new::(format!( - "Failed to read marker: {:?}", - e - ))), - } -} - -fn read_string( - py: Python<'_>, - cursor: &mut std::io::Cursor<&[u8]>, - len: u32, -) -> PyResult { - let mut buf = vec![0u8; len as usize]; - cursor.read_exact(&mut buf).map_err(to_py_err)?; - let s = String::from_utf8(buf) - .map_err(|_| PyErr::new::("Invalid UTF-8"))?; - Ok(s.into_pyobject(py)?.into_any().unbind()) -} - -fn read_array( - py: Python, - cursor: &mut std::io::Cursor<&[u8]>, - len: u32, - api_mod: bool, -) -> PyResult { - let mut items = Vec::new(); - let array_len = if api_mod { len * 2 } else { len }; - let dict = PyDict::new(py); - let mut i = 0; - if len == 2 && !api_mod { - // 姑且这样判断:列表长度为2,第一个元素为长度为2的字符串,api_mod 模式未启用(不存在嵌套 APIMod) - let obj1 = read_object(py, cursor, false)?; - if obj1.extract::(py).map_or(false, |k| k.len() == 2) { - let obj2 = read_object(py, cursor, true)?; - - let api_mod_dict = PyDict::new(py); - api_mod_dict.set_item("acronym", obj1)?; - api_mod_dict.set_item("settings", obj2)?; - - return Ok(api_mod_dict.into_pyobject(py)?.into_any().unbind()); - } else { - items.push(obj1); - i += 1; - } - } - while i < array_len { - if api_mod && i % 2 == 0 { - let key = read_object(py, cursor, false)?; - let value = read_object(py, cursor, false)?; - dict.set_item(key, value)?; - i += 2; - } else { - let item = read_object(py, cursor, api_mod)?; - items.push(item); - i += 1; - } - } - - if api_mod { - return Ok(dict.into_pyobject(py)?.into_any().unbind()); - } else { - Ok(items.into_pyobject(py)?.into_any().unbind()) - } -} - -fn read_map(py: Python, cursor: &mut std::io::Cursor<&[u8]>, len: u32) -> PyResult { - let mut pairs = Vec::new(); - for _ in 0..len { - let key = read_object(py, cursor, false)?; - let value = read_object(py, cursor, false)?; - pairs.push((key, value)); - } - - let dict = PyDict::new(py); - for (key, value) in pairs { - dict.set_item(key, value)?; - } - return Ok(dict.into_pyobject(py)?.into_any().unbind()); -} - -fn to_py_err(err: std::io::Error) -> PyErr { - PyErr::new::(format!("IO error: {}", err)) -} - -fn read_ext(py: Python, cursor: &mut std::io::Cursor<&[u8]>, len: u32) -> PyResult { - // Read the extension type - let mut type_buf = [0u8; 1]; - cursor.read_exact(&mut type_buf).map_err(to_py_err)?; - let ext_type = type_buf[0] as i8; - - // Read the extension data - let mut data = vec![0u8; len as usize]; - cursor.read_exact(&mut data).map_err(to_py_err)?; - - // Handle timestamp extension (type = -1) - if ext_type == -1 { - read_timestamp(py, &data) - } else { - // For other extension types, return as bytes or handle as needed - Err(PyErr::new::(format!( - "Unsupported extension type: {}", - ext_type - ))) - } -} - -fn read_timestamp(py: Python, data: &[u8]) -> PyResult { - let (secs, nsec) = match data.len() { - 4 => { - // timestamp32: 4-byte big endian seconds - let secs = u32::from_be_bytes([data[0], data[1], data[2], data[3]]) as u64; - (secs, 0u32) - } - 8 => { - // timestamp64: 8-byte packed => upper 34 bits nsec, lower 30 bits secs - let packed = u64::from_be_bytes([ - data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7], - ]); - let nsec = (packed >> 34) as u32; - let secs = packed & 0x3FFFFFFFF; // lower 34 bits - (secs, nsec) - } - 12 => { - // timestamp96: 12 bytes = 4-byte nsec + 8-byte seconds signed - let nsec = u32::from_be_bytes([data[0], data[1], data[2], data[3]]); - let secs = i64::from_be_bytes([ - data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], - ]) as u64; - (secs, nsec) - } - _ => { - return Err(PyErr::new::(format!( - "Invalid timestamp data length: {}", - data.len() - ))); - } - }; - let time = Utc.timestamp_opt(secs as i64, nsec).single(); - Ok(time.into_pyobject(py)?.into_any().unbind()) -} diff --git a/packages/msgpack_lazer_api/src/encode.rs b/packages/msgpack_lazer_api/src/encode.rs deleted file mode 100644 index 3ff4864..0000000 --- a/packages/msgpack_lazer_api/src/encode.rs +++ /dev/null @@ -1,156 +0,0 @@ -use chrono::{DateTime, Utc}; -use pyo3::prelude::{PyAnyMethods, PyDictMethods, PyListMethods, PyResult, PyStringMethods}; -use pyo3::types::{PyBool, PyBytes, PyDateTime, PyDict, PyFloat, PyInt, PyList, PyNone, PyString}; -use pyo3::{Bound, PyAny}; -use std::io::Write; - -fn write_list(buf: &mut Vec, obj: &Bound<'_, PyList>) { - rmp::encode::write_array_len(buf, obj.len() as u32).unwrap(); - for item in obj.iter() { - write_object(buf, &item); - } -} - -fn write_string(buf: &mut Vec, obj: &Bound<'_, PyString>) { - let s = obj.to_string_lossy(); - rmp::encode::write_str(buf, &s).unwrap(); -} - -fn write_integer(buf: &mut Vec, obj: &Bound<'_, PyInt>) { - if let Ok(val) = obj.extract::() { - rmp::encode::write_i32(buf, val).unwrap(); - } else if let Ok(val) = obj.extract::() { - rmp::encode::write_i64(buf, val).unwrap(); - } else { - panic!("Unsupported integer type"); - } -} - -fn write_float(buf: &mut Vec, obj: &Bound<'_, PyAny>) { - if let Ok(val) = obj.extract::() { - rmp::encode::write_f32(buf, val).unwrap(); - } else if let Ok(val) = obj.extract::() { - rmp::encode::write_f64(buf, val).unwrap(); - } else { - panic!("Unsupported float type"); - } -} - -fn write_bool(buf: &mut Vec, obj: &Bound<'_, PyBool>) { - if let Ok(b) = obj.extract::() { - rmp::encode::write_bool(buf, b).unwrap(); - } else { - panic!("Unsupported boolean type"); - } -} - -fn write_bin(buf: &mut Vec, obj: &Bound<'_, PyBytes>) { - if let Ok(bytes) = obj.extract::>() { - rmp::encode::write_bin(buf, &bytes).unwrap(); - } else { - panic!("Unsupported binary type"); - } -} - -fn write_hashmap(buf: &mut Vec, obj: &Bound<'_, PyDict>) { - rmp::encode::write_map_len(buf, obj.len() as u32).unwrap(); - for (key, value) in obj.iter() { - write_object(buf, &key); - write_object(buf, &value); - } -} - -fn write_nil(buf: &mut Vec) { - rmp::encode::write_nil(buf).unwrap(); -} - -fn is_api_mod(dict: &Bound<'_, PyDict>) -> bool { - if let Ok(Some(acronym)) = dict.get_item("acronym") { - if let Ok(acronym_str) = acronym.extract::() { - return acronym_str.len() == 2; - } - } - false -} - -// https://github.com/ppy/osu/blob/3dced3/osu.Game/Online/API/ModSettingsDictionaryFormatter.cs -fn write_api_mod(buf: &mut Vec, api_mod: &Bound<'_, PyDict>) -> PyResult<()> { - let acronym = api_mod - .get_item("acronym")? - .ok_or_else(|| pyo3::exceptions::PyKeyError::new_err("APIMod missing 'acronym' field"))?; - let acronym_str = acronym.extract::()?; - - let settings = api_mod - .get_item("settings")? - .unwrap_or_else(|| PyDict::new(acronym.py()).into_any()); - let settings_dict = settings.downcast::()?; - - rmp::encode::write_array_len(buf, 2).unwrap(); - rmp::encode::write_str(buf, &acronym_str).unwrap(); - rmp::encode::write_array_len(buf, settings_dict.len() as u32).unwrap(); - - for (k, v) in settings_dict.iter() { - let key_str = k.extract::()?; - rmp::encode::write_str(buf, &key_str).unwrap(); - write_object(buf, &v); - } - - Ok(()) -} - -fn write_datetime(buf: &mut Vec, obj: &Bound<'_, PyDateTime>) { - if let Ok(dt) = obj.extract::>() { - let secs = dt.timestamp(); - let nsec = dt.timestamp_subsec_nanos(); - write_timestamp(buf, secs, nsec); - } else { - panic!("Unsupported datetime type. Check your input, timezone is needed."); - } -} - -fn write_timestamp(wr: &mut Vec, secs: i64, nsec: u32) { - let buf: Vec = if nsec == 0 && secs >= 0 && secs <= u32::MAX as i64 { - // timestamp32: 4-byte big endian seconds - secs.to_be_bytes()[4..].to_vec() - } else if secs >= -(1 << 34) && secs < (1 << 34) { - // timestamp64: 8-byte packed => upper 34 bits nsec, lower 34 bits secs - let packed = ((nsec as u64) << 34) | (secs as u64 & ((1 << 34) - 1)); - packed.to_be_bytes().to_vec() - } else { - // timestamp96: 12 bytes = 4-byte nsec + 8-byte seconds signed - let mut v = Vec::with_capacity(12); - v.extend_from_slice(&nsec.to_be_bytes()); - v.extend_from_slice(&secs.to_be_bytes()); - v - }; - rmp::encode::write_ext_meta(wr, buf.len() as u32, -1).unwrap(); - wr.write_all(&buf).unwrap(); -} - -pub fn write_object(buf: &mut Vec, obj: &Bound<'_, PyAny>) { - if let Ok(list) = obj.downcast::() { - write_list(buf, list); - } else if let Ok(string) = obj.downcast::() { - write_string(buf, string); - } else if let Ok(boolean) = obj.downcast::() { - write_bool(buf, boolean); - } else if let Ok(float) = obj.downcast::() { - write_float(buf, float); - } else if let Ok(integer) = obj.downcast::() { - write_integer(buf, integer); - } else if let Ok(bytes) = obj.downcast::() { - write_bin(buf, bytes); - } else if let Ok(dict) = obj.downcast::() { - if is_api_mod(dict) { - write_api_mod(buf, dict).unwrap_or_else(|_| write_hashmap(buf, dict)); - } else { - write_hashmap(buf, dict); - } - } else if let Ok(_none) = obj.downcast::() { - write_nil(buf); - } else if let Ok(datetime) = obj.downcast::() { - write_datetime(buf, datetime); - } else { - panic!("Unsupported type"); - } -} diff --git a/packages/msgpack_lazer_api/src/lib.rs b/packages/msgpack_lazer_api/src/lib.rs deleted file mode 100644 index 220e645..0000000 --- a/packages/msgpack_lazer_api/src/lib.rs +++ /dev/null @@ -1,26 +0,0 @@ -mod decode; -mod encode; - -use pyo3::prelude::*; - -#[pyfunction] -#[pyo3(name = "encode")] -fn encode_py(obj: &Bound<'_, PyAny>) -> PyResult> { - let mut buf = Vec::new(); - encode::write_object(&mut buf, obj); - Ok(buf) -} - -#[pyfunction] -#[pyo3(name = "decode")] -fn decode_py(py: Python, data: &[u8]) -> PyResult { - let mut cursor = std::io::Cursor::new(data); - decode::read_object(py, &mut cursor, false) -} - -#[pymodule] -fn msgpack_lazer_api(m: &Bound<'_, PyModule>) -> PyResult<()> { - m.add_function(wrap_pyfunction!(encode_py, m)?)?; - m.add_function(wrap_pyfunction!(decode_py, m)?)?; - Ok(()) -} diff --git a/pyproject.toml b/pyproject.toml index 3527b16..9862e00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "g0v0-server" version = "0.1.0" -description = "3rd-party osu!lazer server which supports the latest osu!lazer." +description = "3rd-party osu!lazer private server which supports the latest osu!lazer." readme = "README.md" requires-python = ">=3.12" dependencies = [ @@ -18,7 +18,6 @@ dependencies = [ "httpx>=0.28.1", "loguru>=0.7.3", "maxminddb>=2.8.2", - "msgpack-lazer-api", "newrelic>=10.1.0", "osupyparser>=1.0.7", "passlib[bcrypt]>=1.7.4", @@ -103,20 +102,14 @@ exclude = ["migrations/", ".venv/", "venv/"] [tool.uv.workspace] members = [ - "packages/msgpack_lazer_api", "packages/osupyparser", ] [tool.uv.sources] -msgpack-lazer-api = { workspace = true } osupyparser = { git = "https://github.com/MingxuanGame/osupyparser.git" } -[tool.uv] -cache-keys = [{file = "pyproject.toml"}, {file = "packages/msgpack_lazer_api/Cargo.toml"}, {file = "**/*.rs"}] - [dependency-groups] dev = [ - "maturin>=1.9.2", "pre-commit>=4.2.0", "pyright>=1.1.404", "ruff>=0.12.4", diff --git a/uv.lock b/uv.lock index f48d358..66d910d 100644 --- a/uv.lock +++ b/uv.lock @@ -2,12 +2,6 @@ version = 1 revision = 3 requires-python = ">=3.12" -[manifest] -members = [ - "g0v0-server", - "msgpack-lazer-api", -] - [[package]] name = "aioboto3" version = "15.1.0" @@ -172,16 +166,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -207,52 +201,68 @@ wheels = [ [[package]] name = "bcrypt" -version = "4.3.0" +version = "5.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/2c/3d44e853d1fe969d229bd58d39ae6902b3d924af0e2b5a60d17d4b809ded/bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281", size = 483719, upload-time = "2025-02-28T01:22:34.539Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e2/58ff6e2a22eca2e2cff5370ae56dba29d70b1ea6fc08ee9115c3ae367795/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb", size = 272001, upload-time = "2025-02-28T01:22:38.078Z" }, - { url = "https://files.pythonhosted.org/packages/37/1f/c55ed8dbe994b1d088309e366749633c9eb90d139af3c0a50c102ba68a1a/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180", size = 277451, upload-time = "2025-02-28T01:22:40.787Z" }, - { url = "https://files.pythonhosted.org/packages/d7/1c/794feb2ecf22fe73dcfb697ea7057f632061faceb7dcf0f155f3443b4d79/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f", size = 272792, upload-time = "2025-02-28T01:22:43.144Z" }, - { url = "https://files.pythonhosted.org/packages/13/b7/0b289506a3f3598c2ae2bdfa0ea66969812ed200264e3f61df77753eee6d/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09", size = 289752, upload-time = "2025-02-28T01:22:45.56Z" }, - { url = "https://files.pythonhosted.org/packages/dc/24/d0fb023788afe9e83cc118895a9f6c57e1044e7e1672f045e46733421fe6/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d", size = 277762, upload-time = "2025-02-28T01:22:47.023Z" }, - { url = "https://files.pythonhosted.org/packages/e4/38/cde58089492e55ac4ef6c49fea7027600c84fd23f7520c62118c03b4625e/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd", size = 272384, upload-time = "2025-02-28T01:22:49.221Z" }, - { url = "https://files.pythonhosted.org/packages/de/6a/d5026520843490cfc8135d03012a413e4532a400e471e6188b01b2de853f/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af", size = 277329, upload-time = "2025-02-28T01:22:51.603Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a3/4fc5255e60486466c389e28c12579d2829b28a527360e9430b4041df4cf9/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231", size = 305241, upload-time = "2025-02-28T01:22:53.283Z" }, - { url = "https://files.pythonhosted.org/packages/c7/15/2b37bc07d6ce27cc94e5b10fd5058900eb8fb11642300e932c8c82e25c4a/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c", size = 309617, upload-time = "2025-02-28T01:22:55.461Z" }, - { url = "https://files.pythonhosted.org/packages/5f/1f/99f65edb09e6c935232ba0430c8c13bb98cb3194b6d636e61d93fe60ac59/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f", size = 335751, upload-time = "2025-02-28T01:22:57.81Z" }, - { url = "https://files.pythonhosted.org/packages/00/1b/b324030c706711c99769988fcb694b3cb23f247ad39a7823a78e361bdbb8/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d", size = 355965, upload-time = "2025-02-28T01:22:59.181Z" }, - { url = "https://files.pythonhosted.org/packages/aa/dd/20372a0579dd915dfc3b1cd4943b3bca431866fcb1dfdfd7518c3caddea6/bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4", size = 155316, upload-time = "2025-02-28T01:23:00.763Z" }, - { url = "https://files.pythonhosted.org/packages/6d/52/45d969fcff6b5577c2bf17098dc36269b4c02197d551371c023130c0f890/bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669", size = 147752, upload-time = "2025-02-28T01:23:02.908Z" }, - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, ] [[package]] @@ -297,14 +307,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.40.30" +version = "1.40.33" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b0/d8/a8cf273b0342ee4e8c8ed30cf2b32b00616e1090c4df12beba8bb65334a1/botocore_stubs-1.40.30.tar.gz", hash = "sha256:73baabaef96fa74af4034c22e37fd71a752075867dd31e06e5a3809ffbc151ec", size = 42768, upload-time = "2025-09-12T20:24:45.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/94/16f8e1f41feaa38f1350aa5a4c60c5724b6c8524ca0e6c28523bf5070e74/botocore_stubs-1.40.33.tar.gz", hash = "sha256:89c51ae0b28d9d79fde8c497cf908ddf872ce027d2737d4d4ba473fde9cdaa82", size = 42742, upload-time = "2025-09-17T20:25:56.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/42/7d20894547feccb3f0ce16150d98795a2aae3b469f8dd582d99078478b39/botocore_stubs-1.40.30-py3-none-any.whl", hash = "sha256:7d511dcd45f327446ebb130b71d9d124b026f572e12da956df58fdc56ab426ac", size = 66843, upload-time = "2025-09-12T20:24:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/6d8fe12a955b16094460e89ea7c4e063f131f4b3bd461b96bcd625d0c79e/botocore_stubs-1.40.33-py3-none-any.whl", hash = "sha256:ad21fee32cbdc7ad4730f29baf88424c7086bf88a745f8e43660ca3e9a7e5f89", size = 66843, upload-time = "2025-09-17T20:25:54.052Z" }, ] [[package]] @@ -384,14 +394,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -405,37 +415,58 @@ wheels = [ [[package]] name = "cryptography" -version = "45.0.7" +version = "46.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" }, - { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" }, - { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" }, - { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" }, - { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" }, - { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" }, - { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" }, - { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" }, - { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" }, + { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" }, + { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" }, + { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" }, + { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" }, + { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" }, + { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" }, + { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" }, + { url = "https://files.pythonhosted.org/packages/6f/cc/47fc6223a341f26d103cb6da2216805e08a37d3b52bee7f3b2aee8066f95/cryptography-46.0.2-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:bda55e8dbe8533937956c996beaa20266a8eca3570402e52ae52ed60de1faca8", size = 7198626, upload-time = "2025-10-01T00:27:54.8Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/d66a8591207c28bbe4ac7afa25c4656dc19dc0db29a219f9809205639ede/cryptography-46.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7155c0b004e936d381b15425273aee1cebc94f879c0ce82b0d7fecbf755d53a", size = 4287584, upload-time = "2025-10-01T00:27:57.018Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/fac3ab6302b928e0398c269eddab5978e6c1c50b2b77bb5365ffa8633b37/cryptography-46.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a61c154cc5488272a6c4b86e8d5beff4639cdb173d75325ce464d723cda0052b", size = 4433796, upload-time = "2025-10-01T00:27:58.631Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d8/24392e5d3c58e2d83f98fe5a2322ae343360ec5b5b93fe18bc52e47298f5/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:9ec3f2e2173f36a9679d3b06d3d01121ab9b57c979de1e6a244b98d51fea1b20", size = 4292126, upload-time = "2025-10-01T00:28:00.643Z" }, + { url = "https://files.pythonhosted.org/packages/ed/38/3d9f9359b84c16c49a5a336ee8be8d322072a09fac17e737f3bb11f1ce64/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2fafb6aa24e702bbf74de4cb23bfa2c3beb7ab7683a299062b69724c92e0fa73", size = 3993056, upload-time = "2025-10-01T00:28:02.8Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a3/4c44fce0d49a4703cc94bfbe705adebf7ab36efe978053742957bc7ec324/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:0c7ffe8c9b1fcbb07a26d7c9fa5e857c2fe80d72d7b9e0353dcf1d2180ae60ee", size = 4967604, upload-time = "2025-10-01T00:28:04.783Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/49d73218747c8cac16bb8318a5513fde3129e06a018af3bc4dc722aa4a98/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5840f05518caa86b09d23f8b9405a7b6d5400085aa14a72a98fdf5cf1568c0d2", size = 4465367, upload-time = "2025-10-01T00:28:06.864Z" }, + { url = "https://files.pythonhosted.org/packages/1b/64/9afa7d2ee742f55ca6285a54386ed2778556a4ed8871571cb1c1bfd8db9e/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:27c53b4f6a682a1b645fbf1cd5058c72cf2f5aeba7d74314c36838c7cbc06e0f", size = 4291678, upload-time = "2025-10-01T00:28:08.982Z" }, + { url = "https://files.pythonhosted.org/packages/50/48/1696d5ea9623a7b72ace87608f6899ca3c331709ac7ebf80740abb8ac673/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:512c0250065e0a6b286b2db4bbcc2e67d810acd53eb81733e71314340366279e", size = 4931366, upload-time = "2025-10-01T00:28:10.74Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/9dfc778401a334db3b24435ee0733dd005aefb74afe036e2d154547cb917/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:07c0eb6657c0e9cca5891f4e35081dbf985c8131825e21d99b4f440a8f496f36", size = 4464738, upload-time = "2025-10-01T00:28:12.491Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b1/abcde62072b8f3fd414e191a6238ce55a0050e9738090dc6cded24c12036/cryptography-46.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:48b983089378f50cba258f7f7aa28198c3f6e13e607eaf10472c26320332ca9a", size = 4419305, upload-time = "2025-10-01T00:28:14.145Z" }, + { url = "https://files.pythonhosted.org/packages/c7/1f/3d2228492f9391395ca34c677e8f2571fb5370fe13dc48c1014f8c509864/cryptography-46.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e6f6775eaaa08c0eec73e301f7592f4367ccde5e4e4df8e58320f2ebf161ea2c", size = 4681201, upload-time = "2025-10-01T00:28:15.951Z" }, + { url = "https://files.pythonhosted.org/packages/de/77/b687745804a93a55054f391528fcfc76c3d6bfd082ce9fb62c12f0d29fc1/cryptography-46.0.2-cp314-cp314t-win32.whl", hash = "sha256:e8633996579961f9b5a3008683344c2558d38420029d3c0bc7ff77c17949a4e1", size = 3022492, upload-time = "2025-10-01T00:28:17.643Z" }, + { url = "https://files.pythonhosted.org/packages/60/a5/8d498ef2996e583de0bef1dcc5e70186376f00883ae27bf2133f490adf21/cryptography-46.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:48c01988ecbb32979bb98731f5c2b2f79042a6c58cc9a319c8c2f9987c7f68f9", size = 3496215, upload-time = "2025-10-01T00:28:19.272Z" }, + { url = "https://files.pythonhosted.org/packages/56/db/ee67aaef459a2706bc302b15889a1a8126ebe66877bab1487ae6ad00f33d/cryptography-46.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:8e2ad4d1a5899b7caa3a450e33ee2734be7cc0689010964703a7c4bcc8dd4fd0", size = 2919255, upload-time = "2025-10-01T00:28:21.115Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" }, + { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" }, + { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" }, + { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" }, + { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" }, + { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, ] [[package]] @@ -483,16 +514,16 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.118.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/3c/2b9345a6504e4055eaa490e0b41c10e338ad61d9aeaae41d97807873cdf2/fastapi-0.118.0.tar.gz", hash = "sha256:5e81654d98c4d2f53790a7d32d25a7353b30c81441be7d0958a26b5d761fa1c8", size = 310536, upload-time = "2025-09-29T03:37:23.126Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/54/20/54e2bdaad22ca91a59455251998d43094d5c3d3567c52c7c04774b3f43f2/fastapi-0.118.0-py3-none-any.whl", hash = "sha256:705137a61e2ef71019d2445b123aa8845bd97273c395b744d5a7dfe559056855", size = 97694, upload-time = "2025-09-29T03:37:21.338Z" }, ] [[package]] @@ -595,7 +626,6 @@ dependencies = [ { name = "httpx" }, { name = "loguru" }, { name = "maxminddb" }, - { name = "msgpack-lazer-api" }, { name = "newrelic" }, { name = "osupyparser" }, { name = "passlib", extra = ["bcrypt"] }, @@ -617,7 +647,6 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "maturin" }, { name = "pre-commit" }, { name = "pyright" }, { name = "ruff" }, @@ -639,7 +668,6 @@ requires-dist = [ { name = "httpx", specifier = ">=0.28.1" }, { name = "loguru", specifier = ">=0.7.3" }, { name = "maxminddb", specifier = ">=2.8.2" }, - { name = "msgpack-lazer-api", editable = "packages/msgpack_lazer_api" }, { name = "newrelic", specifier = ">=10.1.0" }, { name = "osupyparser", git = "https://github.com/MingxuanGame/osupyparser.git" }, { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, @@ -661,7 +689,6 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ - { name = "maturin", specifier = ">=1.9.2" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pyright", specifier = ">=1.1.404" }, { name = "ruff", specifier = ">=0.12.4" }, @@ -762,11 +789,11 @@ wheels = [ [[package]] name = "identify" -version = "2.6.14" +version = "2.6.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, ] [[package]] @@ -814,61 +841,65 @@ wheels = [ [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, -] - -[[package]] -name = "maturin" -version = "1.9.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/7c/b11b870fc4fd84de2099906314ce45488ae17be32ff5493519a6cddc518a/maturin-1.9.4.tar.gz", hash = "sha256:235163a0c99bc6f380fb8786c04fd14dcf6cd622ff295ea3de525015e6ac40cf", size = 213647, upload-time = "2025-08-27T11:37:57.079Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/90/0d99389eea1939116fca841cad0763600c8d3183a02a9478d066736c60e8/maturin-1.9.4-py3-none-linux_armv6l.whl", hash = "sha256:6ff37578e3f5fdbe685110d45f60af1f5a7dfce70a1e26dfe3810af66853ecae", size = 8276133, upload-time = "2025-08-27T11:37:23.325Z" }, - { url = "https://files.pythonhosted.org/packages/f4/ed/c8ec68b383e50f084bf1fa9605e62a90cd32a3f75d9894ed3a6e5d4cc5b3/maturin-1.9.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f3837bb53611b2dafa1c090436c330f2d743ba305ef00d8801a371f4495e7e1b", size = 15994496, upload-time = "2025-08-27T11:37:27.092Z" }, - { url = "https://files.pythonhosted.org/packages/84/4e/401ff5f3cfc6b123364d4b94379bf910d7baee32c9c95b72784ff2329357/maturin-1.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4227d627d8e3bfe45877a8d65e9d8351a9d01434549f0da75d2c06a1b570de58", size = 8362228, upload-time = "2025-08-27T11:37:31.181Z" }, - { url = "https://files.pythonhosted.org/packages/51/8e/c56176dd360da9650c62b8a5ecfb85432cf011e97e46c186901e6996002e/maturin-1.9.4-py3-none-manylinux_2_12_i686.manylinux2010_i686.musllinux_1_1_i686.whl", hash = "sha256:1bb2aa0fa29032e9c5aac03ac400396ddea12cadef242f8967e9c8ef715313a1", size = 8271397, upload-time = "2025-08-27T11:37:33.672Z" }, - { url = "https://files.pythonhosted.org/packages/d2/46/001fcc5c6ad509874896418d6169a61acd619df5b724f99766308c44a99f/maturin-1.9.4-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:a0868d52934c8a5d1411b42367633fdb5cd5515bec47a534192282167448ec30", size = 8775625, upload-time = "2025-08-27T11:37:35.86Z" }, - { url = "https://files.pythonhosted.org/packages/b4/2e/26fa7574f01c19b7a74680fd70e5bae2e8c40fed9683d1752e765062cc2b/maturin-1.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:68b7b833b25741c0f553b78e8b9e095b31ae7c6611533b3c7b71f84c2cb8fc44", size = 8051117, upload-time = "2025-08-27T11:37:38.278Z" }, - { url = "https://files.pythonhosted.org/packages/73/ee/ca7308832d4f5b521c1aa176d9265f6f93e0bd1ad82a90fd9cd799f6b28c/maturin-1.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:08dc86312afee55af778af919818632e35d8d0464ccd79cb86700d9ea560ccd7", size = 8132122, upload-time = "2025-08-27T11:37:40.499Z" }, - { url = "https://files.pythonhosted.org/packages/45/e8/c623955da75e801a06942edf1fdc4e772a9e8fbc1ceebbdc85d59584dc10/maturin-1.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:ef20ffdd943078c4c3699c29fb2ed722bb6b4419efdade6642d1dbf248f94a70", size = 10586762, upload-time = "2025-08-27T11:37:42.718Z" }, - { url = "https://files.pythonhosted.org/packages/3c/4b/19ad558fdf54e151b1b4916ed45f1952ada96684ee6db64f9cd91cabec09/maturin-1.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:368e958468431dfeec80f75eea9639b4356d8c42428b0128444424b083fecfb0", size = 8926988, upload-time = "2025-08-27T11:37:45.492Z" }, - { url = "https://files.pythonhosted.org/packages/7e/27/153ad15eccae26921e8a01812da9f3b7f9013368f8f92c36853f2043b2a3/maturin-1.9.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:273f879214f63f79bfe851cd7d541f8150bdbfae5dfdc3c0c4d125d02d1f41b4", size = 8536758, upload-time = "2025-08-27T11:37:48.213Z" }, - { url = "https://files.pythonhosted.org/packages/43/e3/f304c3bdc3fba9adebe5348d4d2dd015f1152c0a9027aaf52cae0bb182c8/maturin-1.9.4-py3-none-win32.whl", hash = "sha256:ed2e54d132ace7e61829bd49709331007dd9a2cc78937f598aa76a4f69b6804d", size = 7265200, upload-time = "2025-08-27T11:37:50.881Z" }, - { url = "https://files.pythonhosted.org/packages/14/14/f86d0124bf1816b99005c058a1dbdca7cb5850d9cf4b09dcae07a1bc6201/maturin-1.9.4-py3-none-win_amd64.whl", hash = "sha256:8e450bb2c9afdf38a0059ee2e1ec2b17323f152b59c16f33eb9c74edaf1f9f79", size = 8237391, upload-time = "2025-08-27T11:37:53.23Z" }, - { url = "https://files.pythonhosted.org/packages/3f/25/8320fc2591e45b750c3ae71fa596b47aefa802d07d6abaaa719034a85160/maturin-1.9.4-py3-none-win_arm64.whl", hash = "sha256:7a6f980a9b67a5c13c844c268eabd855b54a6a765df4b4bb07d15a990572a4c9", size = 6988277, upload-time = "2025-08-27T11:37:55.429Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] [[package]] @@ -925,10 +956,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/33/09601f476fd9d494e967f15c1e05aa1e35bdf5ee54555596e05e5c9ec8c9/maxminddb-2.8.2-cp314-cp314t-win_arm64.whl", hash = "sha256:929a00528db82ffa5aa928a9cd1a972e8f93c36243609c25574dfd920c21533b", size = 33990, upload-time = "2025-07-25T20:31:23.367Z" }, ] -[[package]] -name = "msgpack-lazer-api" -source = { editable = "packages/msgpack_lazer_api" } - [[package]] name = "multidict" version = "6.6.4" @@ -994,18 +1021,18 @@ wheels = [ [[package]] name = "newrelic" -version = "10.17.0" +version = "11.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/82/71eb49483a2675eec2a5cfce2ea08c898c9329072a83f6671d2a6f68d101/newrelic-10.17.0.tar.gz", hash = "sha256:f092109ac024f9524fafdd06126924c0fbd2af54684571167c0ee1d1cc1bcb7d", size = 1286818, upload-time = "2025-09-04T22:15:25.988Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/99/dffd5c418b3551b4598871c2e74c39933d2622e1996f30d751f9bd2f4de9/newrelic-11.0.0.tar.gz", hash = "sha256:3419599597dfcb5c7dd78dd46d12097d20c72b19cc1c89218783804976d0931e", size = 1287164, upload-time = "2025-09-25T22:50:28.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/e3/c99b99ebedccb960b835a656d27e0d062904fbe49f8d98bf77eae617b649/newrelic-10.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ab9cbce3f8da5bfcd085b8a59591cfb653d75834b362e3eccb86bdf21eea917", size = 863000, upload-time = "2025-09-04T22:14:55.091Z" }, - { url = "https://files.pythonhosted.org/packages/73/c8/7c5175f1e070063033a38aa50b920d3523c018a4244890068d211bd98c14/newrelic-10.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:889969407457a9344927b1a9e9d5afde77345857db4d2ea75b119d02c02d9174", size = 862657, upload-time = "2025-09-04T22:14:56.546Z" }, - { url = "https://files.pythonhosted.org/packages/1f/37/34d71dcf78c7f539381ba71e0b7624ce170822d13364768241f11832ad0d/newrelic-10.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d74c06863356b19c1fcf19900f85057ab33ef779aa3e0de3cb7e3d3f37ca8e20", size = 860626, upload-time = "2025-09-04T22:14:58.707Z" }, - { url = "https://files.pythonhosted.org/packages/7f/40/137ed6c2558ba38237193501e3da466dfefe019a7fdf43e7c57eda0bb293/newrelic-10.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:083777458f93d16ae9e605fd66a6298d856e32deea11cf3270ed237858dcbfe6", size = 860491, upload-time = "2025-09-04T22:15:00.497Z" }, - { url = "https://files.pythonhosted.org/packages/44/60/731dbb138b7ec21a292769f5bed36a207ea957fbd6bfabca8954bfb029b3/newrelic-10.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165c7045da474342d4ceab67776e8aeb66e77674fab7484b4e2e4ea68d02ed4d", size = 862986, upload-time = "2025-09-04T22:15:01.831Z" }, - { url = "https://files.pythonhosted.org/packages/0a/d2/fb51ee95e13e00a81d526cdd12359dc543e66ccb975ac0e164ef1008a852/newrelic-10.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3c3c82c95b57fe30fc1f684bf920bd8b0ecae70a16cc11f55d0867ffb7520d", size = 862661, upload-time = "2025-09-04T22:15:03.303Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e2/adea40e0e6d6ef16083fbb48aae45a353910b61edbd253969ded2042b1e0/newrelic-10.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5789f739e6ca1c4a49649e406b538d67c063ec33ab4658d01049303dfad9398b", size = 860798, upload-time = "2025-09-04T22:15:05.018Z" }, - { url = "https://files.pythonhosted.org/packages/eb/bb/2ee2ef672ab1dafcc40c17a13fa33cd5055275426f8853fbfadde563ccf6/newrelic-10.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4144dd96fa55772456326b3a6032a53d54aa9245ffc5041b002ce9eb5dbd0992", size = 860672, upload-time = "2025-09-04T22:15:06.346Z" }, + { url = "https://files.pythonhosted.org/packages/56/f5/ecdd3300dda9168a207eeea53db63219d2b6400e16f2a53b89c3c1c973e1/newrelic-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b42fe1c4c9e35a9fdf4c264faa1e92f6918d0014c31dad69fede2798bc4482bb", size = 885585, upload-time = "2025-09-25T22:50:02.947Z" }, + { url = "https://files.pythonhosted.org/packages/78/98/35ac917abd6340e81395a12075f131e1b102bf1ca40a598f601b409bc80b/newrelic-11.0.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:927b150eefd9843a5abfc407a9eeac02f4119a842ea0597cd30e33c4b1d5bc61", size = 885753, upload-time = "2025-09-25T22:50:04.653Z" }, + { url = "https://files.pythonhosted.org/packages/c8/86/976281a0882ec357d5a67b3ea6e5c7a80552b42196e5b3c6a13f9ff1ba77/newrelic-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1ca9deba4f44e13eab9eb9866757135a88544b33d971e0929c6884821286bb5", size = 883124, upload-time = "2025-09-25T22:50:06.085Z" }, + { url = "https://files.pythonhosted.org/packages/97/c7/4f5acac16611050200c555ad6ba49a3c95cc59b6fcab2dec45a2f45120c3/newrelic-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee444805ea0df9bcd3bb62873e16f8c5fe3848e0169fc90f8f74b611a51ae2d0", size = 883631, upload-time = "2025-09-25T22:50:07.667Z" }, + { url = "https://files.pythonhosted.org/packages/e7/1a/70d4f214c652e7f9df357e0723b7ac2bb7361d89e2e0f403b6c458d8c7e8/newrelic-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a060dd3918e19e629c9e0e5c16b8c97c4ea1989c7fc12e1845abec2b688e80f2", size = 885596, upload-time = "2025-09-25T22:50:09.041Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/aa6fdb9089c64686ca06d1faa7411083ff961aeccd37e6e8a9283c68b1a5/newrelic-11.0.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5bbdc9c1062d8178dd47aaf26bda8c1ebc2700ed82927973ac005cdd1668a8", size = 885777, upload-time = "2025-09-25T22:50:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/e5/0a/391f17dd79507a3082a7ab1c3580d947937f51578cdfe2e0aedbf5e9286b/newrelic-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c27e089768c8883d4df35f5f1f406ec475ff5053f1463082e01115b25881e4a6", size = 883238, upload-time = "2025-09-25T22:50:12.364Z" }, + { url = "https://files.pythonhosted.org/packages/96/02/b878cc2f472dd9868eb46a3b6de40a5b04991db697ddf25179e967eb2967/newrelic-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eb16e55ac25781812bfde1e7e4c181857f9578957919106e049e1a784b333942", size = 883744, upload-time = "2025-09-25T22:50:13.738Z" }, ] [[package]] @@ -1266,16 +1293,16 @@ wheels = [ [[package]] name = "pydantic-settings" -version = "2.10.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, ] [[package]] @@ -1298,15 +1325,15 @@ wheels = [ [[package]] name = "pyright" -version = "1.1.405" +version = "1.1.406" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151, upload-time = "2025-10-02T01:04:45.488Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982, upload-time = "2025-10-02T01:04:43.137Z" }, ] [[package]] @@ -1360,28 +1387,48 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] [[package]] @@ -1433,28 +1480,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.13.0" +version = "0.13.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/8e/f9f9ca747fea8e3ac954e3690d4698c9737c23b51731d02df999c150b1c9/ruff-0.13.3.tar.gz", hash = "sha256:5b0ba0db740eefdfbcce4299f49e9eaefc643d4d007749d77d047c2bab19908e", size = 5438533, upload-time = "2025-10-02T19:29:31.582Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" }, - { url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" }, - { url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" }, - { url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" }, - { url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" }, - { url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" }, - { url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" }, - { url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" }, - { url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" }, - { url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" }, - { url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" }, - { url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" }, - { url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" }, + { url = "https://files.pythonhosted.org/packages/d2/33/8f7163553481466a92656d35dea9331095122bb84cf98210bef597dd2ecd/ruff-0.13.3-py3-none-linux_armv6l.whl", hash = "sha256:311860a4c5e19189c89d035638f500c1e191d283d0cc2f1600c8c80d6dcd430c", size = 12484040, upload-time = "2025-10-02T19:28:49.199Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b5/4a21a4922e5dd6845e91896b0d9ef493574cbe061ef7d00a73c61db531af/ruff-0.13.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2bdad6512fb666b40fcadb65e33add2b040fc18a24997d2e47fee7d66f7fcae2", size = 13122975, upload-time = "2025-10-02T19:28:52.446Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/15649af836d88c9f154e5be87e64ae7d2b1baa5a3ef317cb0c8fafcd882d/ruff-0.13.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fc6fa4637284708d6ed4e5e970d52fc3b76a557d7b4e85a53013d9d201d93286", size = 12346621, upload-time = "2025-10-02T19:28:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/bcbccb8141305f9a6d3f72549dd82d1134299177cc7eaf832599700f95a7/ruff-0.13.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c9e6469864f94a98f412f20ea143d547e4c652f45e44f369d7b74ee78185838", size = 12574408, upload-time = "2025-10-02T19:28:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/ce/19/0f3681c941cdcfa2d110ce4515624c07a964dc315d3100d889fcad3bfc9e/ruff-0.13.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bf62b705f319476c78891e0e97e965b21db468b3c999086de8ffb0d40fd2822", size = 12285330, upload-time = "2025-10-02T19:28:58.79Z" }, + { url = "https://files.pythonhosted.org/packages/10/f8/387976bf00d126b907bbd7725219257feea58650e6b055b29b224d8cb731/ruff-0.13.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cc1abed87ce40cb07ee0667ce99dbc766c9f519eabfd948ed87295d8737c60", size = 13980815, upload-time = "2025-10-02T19:29:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a6/7c8ec09d62d5a406e2b17d159e4817b63c945a8b9188a771193b7e1cc0b5/ruff-0.13.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4fb75e7c402d504f7a9a259e0442b96403fa4a7310ffe3588d11d7e170d2b1e3", size = 14987733, upload-time = "2025-10-02T19:29:04.036Z" }, + { url = "https://files.pythonhosted.org/packages/97/e5/f403a60a12258e0fd0c2195341cfa170726f254c788673495d86ab5a9a9d/ruff-0.13.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b951f9d9afb39330b2bdd2dd144ce1c1335881c277837ac1b50bfd99985ed3", size = 14439848, upload-time = "2025-10-02T19:29:06.684Z" }, + { url = "https://files.pythonhosted.org/packages/39/49/3de381343e89364c2334c9f3268b0349dc734fc18b2d99a302d0935c8345/ruff-0.13.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6052f8088728898e0a449f0dde8fafc7ed47e4d878168b211977e3e7e854f662", size = 13421890, upload-time = "2025-10-02T19:29:08.767Z" }, + { url = "https://files.pythonhosted.org/packages/ab/b5/c0feca27d45ae74185a6bacc399f5d8920ab82df2d732a17213fb86a2c4c/ruff-0.13.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc742c50f4ba72ce2a3be362bd359aef7d0d302bf7637a6f942eaa763bd292af", size = 13444870, upload-time = "2025-10-02T19:29:11.234Z" }, + { url = "https://files.pythonhosted.org/packages/50/a1/b655298a1f3fda4fdc7340c3f671a4b260b009068fbeb3e4e151e9e3e1bf/ruff-0.13.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:8e5640349493b378431637019366bbd73c927e515c9c1babfea3e932f5e68e1d", size = 13691599, upload-time = "2025-10-02T19:29:13.353Z" }, + { url = "https://files.pythonhosted.org/packages/32/b0/a8705065b2dafae007bcae21354e6e2e832e03eb077bb6c8e523c2becb92/ruff-0.13.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b139f638a80eae7073c691a5dd8d581e0ba319540be97c343d60fb12949c8d0", size = 12421893, upload-time = "2025-10-02T19:29:15.668Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/cbe7082588d025cddbb2f23e6dfef08b1a2ef6d6f8328584ad3015b5cebd/ruff-0.13.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6b547def0a40054825de7cfa341039ebdfa51f3d4bfa6a0772940ed351d2746c", size = 12267220, upload-time = "2025-10-02T19:29:17.583Z" }, + { url = "https://files.pythonhosted.org/packages/a5/99/4086f9c43f85e0755996d09bdcb334b6fee9b1eabdf34e7d8b877fadf964/ruff-0.13.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9cc48a3564423915c93573f1981d57d101e617839bef38504f85f3677b3a0a3e", size = 13177818, upload-time = "2025-10-02T19:29:19.943Z" }, + { url = "https://files.pythonhosted.org/packages/9b/de/7b5db7e39947d9dc1c5f9f17b838ad6e680527d45288eeb568e860467010/ruff-0.13.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1a993b17ec03719c502881cb2d5f91771e8742f2ca6de740034433a97c561989", size = 13618715, upload-time = "2025-10-02T19:29:22.527Z" }, + { url = "https://files.pythonhosted.org/packages/28/d3/bb25ee567ce2f61ac52430cf99f446b0e6d49bdfa4188699ad005fdd16aa/ruff-0.13.3-py3-none-win32.whl", hash = "sha256:f14e0d1fe6460f07814d03c6e32e815bff411505178a1f539a38f6097d3e8ee3", size = 12334488, upload-time = "2025-10-02T19:29:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/cf/49/12f5955818a1139eed288753479ba9d996f6ea0b101784bb1fe6977ec128/ruff-0.13.3-py3-none-win_amd64.whl", hash = "sha256:621e2e5812b691d4f244638d693e640f188bacbb9bc793ddd46837cea0503dd2", size = 13455262, upload-time = "2025-10-02T19:29:26.882Z" }, + { url = "https://files.pythonhosted.org/packages/fe/72/7b83242b26627a00e3af70d0394d68f8f02750d642567af12983031777fc/ruff-0.13.3-py3-none-win_arm64.whl", hash = "sha256:9e9e9d699841eaf4c2c798fa783df2fabc680b72059a02ca0ed81c460bc58330", size = 12538484, upload-time = "2025-10-02T19:29:28.951Z" }, ] [[package]] @@ -1471,15 +1518,15 @@ wheels = [ [[package]] name = "sentry-sdk" -version = "2.37.1" +version = "2.39.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/be/ffc232c32d0be18f8e4eff7a22dffc1f1fef2894703d64cc281a80e75da6/sentry_sdk-2.37.1.tar.gz", hash = "sha256:531751da91aa62a909b42a7be155b41f6bb0de9df6ae98441d23b95de2f98475", size = 346235, upload-time = "2025-09-09T13:48:27.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/72/43294fa4bdd75c51610b5104a3ff834459ba653abb415150aa7826a249dd/sentry_sdk-2.39.0.tar.gz", hash = "sha256:8c185854d111f47f329ab6bc35993f28f7a6b7114db64aa426b326998cfa14e9", size = 348556, upload-time = "2025-09-25T09:15:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/c3/cba447ab531331d165d9003c04473be944a308ad916ca2345b5ef1969ed9/sentry_sdk-2.37.1-py2.py3-none-any.whl", hash = "sha256:baaaea6608ed3a639766a69ded06b254b106d32ad9d180bdbe58f3db9364592b", size = 368307, upload-time = "2025-09-09T13:48:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/dd/44/4356cc64246ba7b2b920f7c97a85c3c52748e213e250b512ee8152eb559d/sentry_sdk-2.39.0-py2.py3-none-any.whl", hash = "sha256:ba655ca5e57b41569b18e2a5552cb3375209760a5d332cdd87c6c3f28f729602", size = 370851, upload-time = "2025-09-25T09:15:36.35Z" }, ] [package.optional-dependencies] @@ -1545,28 +1592,28 @@ wheels = [ [[package]] name = "sqlmodel" -version = "0.0.24" +version = "0.0.25" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/4b/c2ad0496f5bdc6073d9b4cef52be9c04f2b37a5773441cc6600b1857648b/sqlmodel-0.0.24.tar.gz", hash = "sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423", size = 116780, upload-time = "2025-03-07T05:43:32.887Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/80/d9c098a88724ee4554907939cf39590cf67e10c6683723216e228d3315f7/sqlmodel-0.0.25.tar.gz", hash = "sha256:56548c2e645975b1ed94d6c53f0d13c85593f57926a575e2bf566650b2243fa4", size = 117075, upload-time = "2025-09-17T21:44:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/91/484cd2d05569892b7fef7f5ceab3bc89fb0f8a8c0cde1030d383dbc5449c/sqlmodel-0.0.24-py3-none-any.whl", hash = "sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193", size = 28622, upload-time = "2025-03-07T05:43:30.37Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/5d175ce8de07fe694ec4e3d4d65c2dd06cc30f6c79599b31f9d2f6dd2830/sqlmodel-0.0.25-py3-none-any.whl", hash = "sha256:c98234cda701fb77e9dcbd81688c23bb251c13bb98ce1dd8d4adc467374d45b7", size = 28893, upload-time = "2025-09-17T21:44:39.764Z" }, ] [[package]] name = "starlette" -version = "0.47.3" +version = "0.48.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" }, + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, ] [[package]] @@ -1713,14 +1760,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -1755,15 +1802,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [package.optional-dependencies] From 1e775c9a36d8a8f8d656f57ea35c89fbe64ff5fd Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 13:25:31 +0000 Subject: [PATCH 07/26] fix(log): don't show traceback in log --- app/log.py | 4 ++- app/middleware/verify_session.py | 56 ++++++++++++++------------------ 2 files changed, 28 insertions(+), 32 deletions(-) diff --git a/app/log.py b/app/log.py index e171a0d..ba0ef95 100644 --- a/app/log.py +++ b/app/log.py @@ -200,7 +200,9 @@ def dynamic_format(record): real_name = record["extra"].get("real_name", "") or record["name"] name = f"{real_name}" - return f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}\n" + return ( + f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}{{exception}}\n" + ) logger.remove() diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py index 2a9c911..58ed754 100644 --- a/app/middleware/verify_session.py +++ b/app/middleware/verify_session.py @@ -57,39 +57,33 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): async def dispatch(self, request: Request, call_next: Callable) -> Response: """中间件主处理逻辑""" - try: - # 检查是否跳过验证 - if self._should_skip_verification(request): - return await call_next(request) - - # 获取当前用户 - user = await self._get_current_user(request) - if not user: - # 未登录用户跳过验证 - return await call_next(request) - - # 获取会话状态 - session_state = await self._get_session_state(request, user) - if not session_state: - # 无会话状态,继续请求 - return await call_next(request) - - # 检查是否已验证 - if session_state.is_verified(): - return await call_next(request) - - # 检查是否需要验证 - if not self._requires_verification(request, user): - return await call_next(request) - - # 启动验证流程 - return await self._initiate_verification(request, session_state) - - except Exception as e: - logger.error(f"Error: {e}") - # 出错时允许请求继续,避免阻塞 + # 检查是否跳过验证 + if self._should_skip_verification(request): return await call_next(request) + # 获取当前用户 + user = await self._get_current_user(request) + if not user: + # 未登录用户跳过验证 + return await call_next(request) + + # 获取会话状态 + session_state = await self._get_session_state(request, user) + if not session_state: + # 无会话状态,继续请求 + return await call_next(request) + + # 检查是否已验证 + if session_state.is_verified(): + return await call_next(request) + + # 检查是否需要验证 + if not self._requires_verification(request, user): + return await call_next(request) + + # 启动验证流程 + return await self._initiate_verification(request, session_state) + def _should_skip_verification(self, request: Request) -> bool: """检查是否应该跳过验证""" path = request.url.path From b10425ad9198f9b266f31f9f23ac4f9e93bd2fd8 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 13:40:18 +0000 Subject: [PATCH 08/26] refactor(service): remove unused services --- app/service/message_queue.py | 214 ------------------ app/service/message_queue_processor.py | 290 ------------------------- app/service/optimized_message.py | 148 ------------- 3 files changed, 652 deletions(-) delete mode 100644 app/service/message_queue.py delete mode 100644 app/service/message_queue_processor.py delete mode 100644 app/service/optimized_message.py diff --git a/app/service/message_queue.py b/app/service/message_queue.py deleted file mode 100644 index 4f1c0f1..0000000 --- a/app/service/message_queue.py +++ /dev/null @@ -1,214 +0,0 @@ -""" -Redis 消息队列服务 -用于实现实时消息推送和异步数据库持久化 -""" - -from __future__ import annotations - -import asyncio -import concurrent.futures -from datetime import datetime -import uuid - -from app.database.chat import ChatMessage, MessageType -from app.dependencies.database import get_redis, with_db -from app.log import logger -from app.utils import bg_tasks - - -class MessageQueue: - """Redis 消息队列服务""" - - def __init__(self): - self.redis = get_redis() - self._processing = False - self._batch_size = 50 # 批量处理大小 - self._batch_timeout = 1.0 # 批量处理超时时间(秒) - self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=4) - - async def _run_in_executor(self, func, *args): - """在线程池中运行同步 Redis 操作""" - loop = asyncio.get_event_loop() - return await loop.run_in_executor(self._executor, func, *args) - - async def start_processing(self): - """启动消息处理任务""" - if not self._processing: - self._processing = True - bg_tasks.add_task(self._process_message_queue) - logger.info("Message queue processing started") - - async def stop_processing(self): - """停止消息处理""" - self._processing = False - logger.info("Message queue processing stopped") - - async def enqueue_message(self, message_data: dict) -> str: - """ - 将消息加入 Redis 队列(实时响应) - - Args: - message_data: 消息数据字典,包含所有必要的字段 - - Returns: - 消息的临时 UUID - """ - # 生成临时 UUID - temp_uuid = str(uuid.uuid4()) - message_data["temp_uuid"] = temp_uuid - message_data["timestamp"] = datetime.now().isoformat() - message_data["status"] = "pending" # pending, processing, completed, failed - - # 将消息存储到 Redis - await self._run_in_executor(lambda: self.redis.hset(f"msg:{temp_uuid}", mapping=message_data)) - await self._run_in_executor(self.redis.expire, f"msg:{temp_uuid}", 3600) # 1小时过期 - - # 加入处理队列 - await self._run_in_executor(self.redis.lpush, "message_queue", temp_uuid) - - logger.info(f"Message enqueued with temp_uuid: {temp_uuid}") - return temp_uuid - - async def get_message_status(self, temp_uuid: str) -> dict | None: - """获取消息状态""" - message_data = await self._run_in_executor(self.redis.hgetall, f"msg:{temp_uuid}") - if not message_data: - return None - - return message_data - - async def get_cached_messages(self, channel_id: int, limit: int = 50, since: int = 0) -> list[dict]: - """ - 从 Redis 获取缓存的消息 - - Args: - channel_id: 频道 ID - limit: 限制数量 - since: 获取自此消息 ID 之后的消息 - - Returns: - 消息列表 - """ - # 从 Redis 获取频道最近的消息 UUID 列表 - message_uuids = await self._run_in_executor(self.redis.lrange, f"channel:{channel_id}:messages", 0, limit - 1) - - messages = [] - for uuid_str in message_uuids: - message_data = await self._run_in_executor(self.redis.hgetall, f"msg:{uuid_str}") - if message_data: - # 检查是否满足 since 条件 - if since > 0 and "message_id" in message_data: - if int(message_data["message_id"]) <= since: - continue - - messages.append(message_data) - - return messages[::-1] # 返回时间顺序 - - async def cache_channel_message(self, channel_id: int, temp_uuid: str, max_cache: int = 100): - """将消息 UUID 缓存到频道消息列表""" - # 添加到频道消息列表开头 - await self._run_in_executor(self.redis.lpush, f"channel:{channel_id}:messages", temp_uuid) - # 限制缓存大小 - await self._run_in_executor(self.redis.ltrim, f"channel:{channel_id}:messages", 0, max_cache - 1) - # 设置过期时间(24小时) - await self._run_in_executor(self.redis.expire, f"channel:{channel_id}:messages", 86400) - - async def _process_message_queue(self): - """异步处理消息队列,批量写入数据库""" - while self._processing: - try: - # 批量获取消息 - message_uuids = [] - for _ in range(self._batch_size): - result = await self._run_in_executor(lambda: self.redis.brpop(["message_queue"], timeout=1)) - if result: - message_uuids.append(result[1]) - else: - break - - if message_uuids: - await self._process_message_batch(message_uuids) - else: - # 没有消息时短暂等待 - await asyncio.sleep(0.1) - - except Exception as e: - logger.error(f"Error processing message queue: {e}") - await asyncio.sleep(1) # 错误时等待1秒再重试 - - async def _process_message_batch(self, message_uuids: list[str]): - """批量处理消息写入数据库""" - async with with_db() as session: - messages_to_insert = [] - - for temp_uuid in message_uuids: - try: - # 获取消息数据 - message_data = await self._run_in_executor(self.redis.hgetall, f"msg:{temp_uuid}") - if not message_data: - continue - - # 更新状态为处理中 - await self._run_in_executor(self.redis.hset, f"msg:{temp_uuid}", "status", "processing") - - # 创建数据库消息对象 - msg = ChatMessage( - channel_id=int(message_data["channel_id"]), - content=message_data["content"], - sender_id=int(message_data["sender_id"]), - type=MessageType(message_data["type"]), - uuid=message_data.get("user_uuid"), # 用户提供的 UUID(如果有) - ) - - messages_to_insert.append((msg, temp_uuid)) - - except Exception as e: - logger.error(f"Error preparing message {temp_uuid}: {e}") - await self._run_in_executor(self.redis.hset, f"msg:{temp_uuid}", "status", "failed") - - if messages_to_insert: - try: - # 批量插入数据库 - for msg, temp_uuid in messages_to_insert: - session.add(msg) - - await session.commit() - - # 更新所有消息状态和真实 ID - for msg, temp_uuid in messages_to_insert: - await session.refresh(msg) - await self._run_in_executor( - lambda: self.redis.hset( - f"msg:{temp_uuid}", - mapping={ - "status": "completed", - "message_id": str(msg.message_id), - "created_at": msg.timestamp.isoformat() if msg.timestamp else "", - }, - ) - ) - - logger.info(f"Message {temp_uuid} persisted to DB with ID {msg.message_id}") - - except Exception as e: - logger.error(f"Error inserting messages to database: {e}") - await session.rollback() - - # 标记所有消息为失败 - for _, temp_uuid in messages_to_insert: - await self._run_in_executor(self.redis.hset, f"msg:{temp_uuid}", "status", "failed") - - -# 全局消息队列实例 -message_queue = MessageQueue() - - -async def start_message_queue(): - """启动消息队列处理""" - await message_queue.start_processing() - - -async def stop_message_queue(): - """停止消息队列处理""" - await message_queue.stop_processing() diff --git a/app/service/message_queue_processor.py b/app/service/message_queue_processor.py deleted file mode 100644 index 199d8ee..0000000 --- a/app/service/message_queue_processor.py +++ /dev/null @@ -1,290 +0,0 @@ -""" -消息队列处理服务 -专门处理 Redis 消息队列的异步写入数据库 -""" - -from __future__ import annotations - -import asyncio -from concurrent.futures import ThreadPoolExecutor -from datetime import datetime -import json - -from app.database.chat import ChatMessage, MessageType -from app.dependencies.database import get_redis_message, with_db -from app.log import logger - - -class MessageQueueProcessor: - """消息队列处理器""" - - def __init__(self): - self.redis_message = get_redis_message() - self.executor = ThreadPoolExecutor(max_workers=2) - self._processing = False - self._queue_task = None - - async def _redis_exec(self, func, *args, **kwargs): - """在线程池中执行 Redis 操作""" - loop = asyncio.get_event_loop() - return await loop.run_in_executor(self.executor, lambda: func(*args, **kwargs)) - - async def cache_message(self, channel_id: int, message_data: dict, temp_uuid: str): - """将消息缓存到 Redis""" - try: - # 存储消息数据 - await self._redis_exec(self.redis_message.hset, f"msg:{temp_uuid}", mapping=message_data) - await self._redis_exec(self.redis_message.expire, f"msg:{temp_uuid}", 3600) # 1小时过期 - - # 加入频道消息列表 - await self._redis_exec(self.redis_message.lpush, f"channel:{channel_id}:messages", temp_uuid) - await self._redis_exec(self.redis_message.ltrim, f"channel:{channel_id}:messages", 0, 99) # 保持最新100条 - await self._redis_exec(self.redis_message.expire, f"channel:{channel_id}:messages", 86400) # 24小时过期 - - # 加入异步处理队列 - await self._redis_exec(self.redis_message.lpush, "message_write_queue", temp_uuid) - - logger.info(f"Message cached to Redis: {temp_uuid}") - except Exception as e: - logger.error(f"Failed to cache message to Redis: {e}") - - async def get_cached_messages(self, channel_id: int, limit: int = 50, since: int = 0) -> list[dict]: - """从 Redis 获取缓存的消息""" - try: - message_uuids = await self._redis_exec( - self.redis_message.lrange, - f"channel:{channel_id}:messages", - 0, - limit - 1, - ) - - messages = [] - for temp_uuid in message_uuids: - # 解码 UUID 如果它是字节类型 - if isinstance(temp_uuid, bytes): - temp_uuid = temp_uuid.decode("utf-8") - - raw_data = await self._redis_exec(self.redis_message.hgetall, f"msg:{temp_uuid}") - if raw_data: - # 解码 Redis 返回的字节数据 - message_data = { - k.decode("utf-8") if isinstance(k, bytes) else k: v.decode("utf-8") - if isinstance(v, bytes) - else v - for k, v in raw_data.items() - } - - # 检查 since 条件 - if since > 0 and message_data.get("message_id"): - if int(message_data["message_id"]) <= since: - continue - messages.append(message_data) - - return messages[::-1] # 按时间顺序返回 - except Exception as e: - logger.error(f"Failed to get cached messages: {e}") - return [] - - async def update_message_status(self, temp_uuid: str, status: str, message_id: int | None = None): - """更新消息状态""" - try: - update_data = {"status": status} - if message_id: - update_data["message_id"] = str(message_id) - update_data["db_timestamp"] = datetime.now().isoformat() - - await self._redis_exec(self.redis_message.hset, f"msg:{temp_uuid}", mapping=update_data) - except Exception as e: - logger.error(f"Failed to update message status: {e}") - - async def get_message_status(self, temp_uuid: str) -> dict | None: - """获取消息状态""" - try: - raw_data = await self._redis_exec(self.redis_message.hgetall, f"msg:{temp_uuid}") - if not raw_data: - return None - - # 解码 Redis 返回的字节数据 - return { - k.decode("utf-8") if isinstance(k, bytes) else k: v.decode("utf-8") if isinstance(v, bytes) else v - for k, v in raw_data.items() - } - except Exception as e: - logger.error(f"Failed to get message status: {e}") - return None - - async def _process_message_queue(self): - """处理消息队列,异步写入数据库""" - logger.info("Message queue processing started") - - while self._processing: - try: - # 批量获取消息 - message_uuids = [] - for _ in range(20): # 批量处理20条消息 - result = await self._redis_exec(self.redis_message.brpop, ["message_write_queue"], timeout=1) - if result: - # result是 (queue_name, value) 的元组,需要解码 - uuid_value = result[1] - if isinstance(uuid_value, bytes): - uuid_value = uuid_value.decode("utf-8") - message_uuids.append(uuid_value) - else: - break - - if not message_uuids: - await asyncio.sleep(0.5) - continue - - # 批量写入数据库 - await self._process_message_batch(message_uuids) - - except Exception as e: - logger.error(f"Error in message queue processing: {e}") - await asyncio.sleep(1) - - logger.info("Message queue processing stopped") - - async def _process_message_batch(self, message_uuids: list[str]): - """批量处理消息写入数据库""" - async with with_db() as session: - for temp_uuid in message_uuids: - try: - # 获取消息数据并解码 - raw_data = await self._redis_exec(self.redis_message.hgetall, f"msg:{temp_uuid}") - if not raw_data: - continue - - # 解码 Redis 返回的字节数据 - message_data = { - k.decode("utf-8") if isinstance(k, bytes) else k: v.decode("utf-8") - if isinstance(v, bytes) - else v - for k, v in raw_data.items() - } - - if message_data.get("status") != "pending": - continue - - # 更新状态为处理中 - await self.update_message_status(temp_uuid, "processing") - - # 创建数据库消息 - msg = ChatMessage( - channel_id=int(message_data["channel_id"]), - content=message_data["content"], - sender_id=int(message_data["sender_id"]), - type=MessageType(message_data["type"]), - uuid=message_data.get("user_uuid") or None, - ) - - session.add(msg) - await session.commit() - await session.refresh(msg) - - # 更新成功状态,包含临时消息ID映射 - await self.update_message_status(temp_uuid, "completed", msg.message_id) - - # 如果有临时消息ID,存储映射关系并通知客户端更新 - if message_data.get("temp_message_id"): - temp_msg_id = int(message_data["temp_message_id"]) - await self._redis_exec( - self.redis_message.set, - f"temp_to_real:{temp_msg_id}", - str(msg.message_id), - ex=3600, # 1小时过期 - ) - - # 发送消息ID更新通知到频道 - channel_id = int(message_data["channel_id"]) - await self._notify_message_update(channel_id, temp_msg_id, msg.message_id, message_data) - - logger.info( - f"Message {temp_uuid} persisted to DB with ID {msg.message_id}, " - f"temp_id: {message_data.get('temp_message_id')}" - ) - - except Exception as e: - logger.error(f"Failed to process message {temp_uuid}: {e}") - await self.update_message_status(temp_uuid, "failed") - - async def _notify_message_update( - self, - channel_id: int, - temp_message_id: int, - real_message_id: int, - message_data: dict, - ): - """通知客户端消息ID已更新""" - try: - # 通过 Redis 发布消息更新事件,由聊天通知服务分发到客户端 - update_event = { - "event": "chat.message.update", - "data": { - "channel_id": channel_id, - "temp_message_id": temp_message_id, - "real_message_id": real_message_id, - "timestamp": message_data.get("timestamp"), - }, - } - - await self._redis_exec( - self.redis_message.publish, - f"chat_updates:{channel_id}", - json.dumps(update_event), - ) - - logger.info(f"Published message update: temp_id={temp_message_id}, real_id={real_message_id}") - - except Exception as e: - logger.error(f"Failed to notify message update: {e}") - - def start_processing(self): - """启动消息队列处理""" - if not self._processing: - self._processing = True - self._queue_task = asyncio.create_task(self._process_message_queue()) - logger.info("Message queue processor started") - - def stop_processing(self): - """停止消息队列处理""" - if self._processing: - self._processing = False - if self._queue_task: - self._queue_task.cancel() - self._queue_task = None - logger.info("Message queue processor stopped") - - def __del__(self): - """清理资源""" - if hasattr(self, "executor"): - self.executor.shutdown(wait=False) - - -# 全局消息队列处理器实例 -message_queue_processor = MessageQueueProcessor() - - -def start_message_processing(): - """启动消息队列处理""" - message_queue_processor.start_processing() - - -def stop_message_processing(): - """停止消息队列处理""" - message_queue_processor.stop_processing() - - -async def cache_message_to_redis(channel_id: int, message_data: dict, temp_uuid: str): - """将消息缓存到 Redis - 便捷接口""" - await message_queue_processor.cache_message(channel_id, message_data, temp_uuid) - - -async def get_cached_messages(channel_id: int, limit: int = 50, since: int = 0) -> list[dict]: - """从 Redis 获取缓存的消息 - 便捷接口""" - return await message_queue_processor.get_cached_messages(channel_id, limit, since) - - -async def get_message_status(temp_uuid: str) -> dict | None: - """获取消息状态 - 便捷接口""" - return await message_queue_processor.get_message_status(temp_uuid) diff --git a/app/service/optimized_message.py b/app/service/optimized_message.py deleted file mode 100644 index c68f5c1..0000000 --- a/app/service/optimized_message.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -优化的消息服务 -结合 Redis 缓存和异步数据库写入实现实时消息传送 -""" - -from __future__ import annotations - -from app.database.chat import ( - ChannelType, - ChatMessageResp, - MessageType, -) -from app.database.user import User -from app.log import logger -from app.service.message_queue import message_queue - -from sqlalchemy.ext.asyncio import AsyncSession - - -class OptimizedMessageService: - """优化的消息服务""" - - def __init__(self): - self.message_queue = message_queue - - async def send_message_fast( - self, - channel_id: int, - channel_type: ChannelType, - channel_name: str, - content: str, - sender: User, - is_action: bool = False, - user_uuid: str | None = None, - session: AsyncSession | None = None, - ) -> ChatMessageResp: - """ - 快速发送消息(先缓存到 Redis,异步写入数据库) - - Args: - channel_id: 频道 ID - channel_type: 频道类型 - channel_name: 频道名称 - content: 消息内容 - sender: 发送者 - is_action: 是否为动作消息 - user_uuid: 用户提供的 UUID - session: 数据库会话(可选,用于一些验证) - - Returns: - 消息响应对象 - """ - - # 准备消息数据 - message_data = { - "channel_id": str(channel_id), - "content": content, - "sender_id": str(sender.id), - "type": MessageType.ACTION.value if is_action else MessageType.PLAIN.value, - "user_uuid": user_uuid or "", - "channel_type": channel_type.value, - "channel_name": channel_name, - } - - # 立即将消息加入 Redis 队列(实时响应) - temp_uuid = await self.message_queue.enqueue_message(message_data) - - # 缓存到频道消息列表 - await self.message_queue.cache_channel_message(channel_id, temp_uuid) - - # 创建临时响应对象(简化版本,用于立即响应) - from datetime import datetime - - from app.database.user import UserResp - - # 创建基本的用户响应对象 - user_resp = UserResp( - id=sender.id, - username=sender.username, - country_code=getattr(sender, "country_code", "XX"), - # 基本字段,其他复杂字段可以后续异步加载 - ) - - temp_response = ChatMessageResp( - message_id=0, # 临时 ID,等数据库写入后会更新 - channel_id=channel_id, - content=content, - timestamp=datetime.now(), - sender_id=sender.id, - sender=user_resp, - is_action=is_action, - uuid=user_uuid, - ) - temp_response.temp_uuid = temp_uuid # 添加临时 UUID 用于后续更新 - - logger.info(f"Message sent to channel {channel_id} with temp_uuid {temp_uuid}") - return temp_response - - async def get_cached_messages(self, channel_id: int, limit: int = 50, since: int = 0) -> list[dict]: - """ - 获取缓存的消息 - - Args: - channel_id: 频道 ID - limit: 限制数量 - since: 获取自此消息 ID 之后的消息 - - Returns: - 消息列表 - """ - return await self.message_queue.get_cached_messages(channel_id, limit, since) - - async def get_message_status(self, temp_uuid: str) -> dict | None: - """ - 获取消息状态 - - Args: - temp_uuid: 临时消息 UUID - - Returns: - 消息状态信息 - """ - return await self.message_queue.get_message_status(temp_uuid) - - async def wait_for_message_persisted(self, temp_uuid: str, timeout: int = 30) -> dict | None: # noqa: ASYNC109 - """ - 等待消息持久化到数据库 - - Args: - temp_uuid: 临时消息 UUID - timeout: 超时时间(秒) - - Returns: - 完成后的消息状态 - """ - import asyncio - - for _ in range(timeout * 10): # 每100ms检查一次 - status = await self.get_message_status(temp_uuid) - if status and status.get("status") in ["completed", "failed"]: - return status - await asyncio.sleep(0.1) - - return None - - -# 全局优化消息服务实例 -optimized_message_service = OptimizedMessageService() From d490239f46256589f0c266e08c98ee8a3e31e087 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 15:46:53 +0000 Subject: [PATCH 09/26] chore(linter): update ruff rules --- app/achievements/daily_challenge.py | 8 +- app/achievements/hush_hush.py | 25 +- app/achievements/mods.py | 4 +- app/achievements/osu_combo.py | 6 +- app/achievements/osu_playcount.py | 6 +- app/achievements/skill.py | 4 +- app/achievements/total_hits.py | 6 +- app/auth.py | 13 +- app/calculator.py | 10 +- app/config.py | 20 +- app/database/beatmap.py | 9 +- app/database/beatmapset.py | 11 +- app/database/chat.py | 14 +- app/database/score.py | 11 +- app/database/user.py | 16 +- app/dependencies/param.py | 2 +- app/dependencies/user.py | 5 +- app/exceptions/__init__.py | 0 app/fetcher/beatmapset.py | 32 +- app/helpers/__init__.py | 0 app/helpers/geoip_helper.py | 305 ++++++++++-------- app/log.py | 4 +- app/middleware/verify_session.py | 9 +- app/models/extended_auth.py | 13 +- app/models/notification.py | 1 + app/models/oauth.py | 6 +- app/models/v1_user.py | 8 +- app/router/auth.py | 19 +- app/router/lio.py | 2 +- app/router/notification/banchobot.py | 10 +- app/router/notification/server.py | 5 +- app/router/redirect.py | 2 +- app/router/v1/beatmap.py | 5 +- app/router/v2/beatmap.py | 5 +- app/router/v2/beatmapset.py | 13 +- app/router/v2/room.py | 1 - app/router/v2/score.py | 5 +- app/router/v2/session_verify.py | 23 +- app/router/v2/tags.py | 11 +- app/router/v2/user.py | 12 +- app/service/asset_proxy_helper.py | 4 +- app/service/beatmap_cache_service.py | 1 + app/service/beatmapset_cache_service.py | 7 +- app/service/beatmapset_update_service.py | 8 +- app/service/login_log_service.py | 6 +- app/service/password_reset_service.py | 2 +- app/service/ranking_cache_service.py | 11 +- app/service/subscribers/__init__.py | 0 app/service/subscribers/chat.py | 6 +- app/service/user_cache_service.py | 1 + app/service/verification_service.py | 4 - app/tasks/cache.py | 24 +- app/tasks/geoip.py | 23 +- app/tasks/osu_rx_statistics.py | 6 +- app/utils.py | 5 +- main.py | 13 +- ...2_d103d442dc24_add_password_reset_table.py | 1 + pyproject.toml | 17 +- tools/fix_user_rank_event.py | 18 +- 59 files changed, 393 insertions(+), 425 deletions(-) create mode 100644 app/exceptions/__init__.py create mode 100644 app/helpers/__init__.py create mode 100644 app/service/subscribers/__init__.py diff --git a/app/achievements/daily_challenge.py b/app/achievements/daily_challenge.py index b2b3f4c..fcca594 100644 --- a/app/achievements/daily_challenge.py +++ b/app/achievements/daily_challenge.py @@ -32,11 +32,9 @@ async def process_streak( ).first() if not stats: return False - if streak <= stats.daily_streak_best < next_streak: - return True - elif next_streak == 0 and stats.daily_streak_best >= streak: - return True - return False + return bool( + streak <= stats.daily_streak_best < next_streak or (next_streak == 0 and stats.daily_streak_best >= streak) + ) MEDALS = { diff --git a/app/achievements/hush_hush.py b/app/achievements/hush_hush.py index a887693..bc9c53f 100644 --- a/app/achievements/hush_hush.py +++ b/app/achievements/hush_hush.py @@ -68,9 +68,7 @@ async def to_the_core( if ("Nightcore" not in beatmap.beatmapset.title) and "Nightcore" not in beatmap.beatmapset.artist: return False mods_ = mod_to_save(score.mods) - if "DT" not in mods_ or "NC" not in mods_: - return False - return True + return not ("DT" not in mods_ or "NC" not in mods_) async def wysi( @@ -83,9 +81,7 @@ async def wysi( return False if str(round(score.accuracy, ndigits=4))[3:] != "727": return False - if "xi" not in beatmap.beatmapset.artist: - return False - return True + return "xi" in beatmap.beatmapset.artist async def prepared( @@ -97,9 +93,7 @@ async def prepared( if score.rank != Rank.X and score.rank != Rank.XH: return False mods_ = mod_to_save(score.mods) - if "NF" not in mods_: - return False - return True + return "NF" in mods_ async def reckless_adandon( @@ -117,9 +111,7 @@ async def reckless_adandon( redis = get_redis() mods_ = score.mods.copy() attribute = await calculate_beatmap_attributes(beatmap.id, score.gamemode, mods_, redis, fetcher) - if attribute.star_rating < 3: - return False - return True + return not attribute.star_rating < 3 async def lights_out( @@ -413,11 +405,10 @@ async def by_the_skin_of_the_teeth( return False for mod in score.mods: - if mod.get("acronym") == "AC": - if "settings" in mod and "minimum_accuracy" in mod["settings"]: - target_accuracy = mod["settings"]["minimum_accuracy"] - if isinstance(target_accuracy, int | float): - return abs(score.accuracy - float(target_accuracy)) < 0.0001 + if mod.get("acronym") == "AC" and "settings" in mod and "minimum_accuracy" in mod["settings"]: + target_accuracy = mod["settings"]["minimum_accuracy"] + if isinstance(target_accuracy, int | float): + return abs(score.accuracy - float(target_accuracy)) < 0.0001 return False diff --git a/app/achievements/mods.py b/app/achievements/mods.py index d157a15..0f3c728 100644 --- a/app/achievements/mods.py +++ b/app/achievements/mods.py @@ -19,9 +19,7 @@ async def process_mod( return False if not beatmap.beatmap_status.has_leaderboard(): return False - if len(score.mods) != 1 or score.mods[0]["acronym"] != mod: - return False - return True + return not (len(score.mods) != 1 or score.mods[0]["acronym"] != mod) async def process_category_mod( diff --git a/app/achievements/osu_combo.py b/app/achievements/osu_combo.py index 25abbbd..7039bae 100644 --- a/app/achievements/osu_combo.py +++ b/app/achievements/osu_combo.py @@ -22,11 +22,7 @@ async def process_combo( return False if next_combo != 0 and combo >= next_combo: return False - if combo <= score.max_combo < next_combo: - return True - elif next_combo == 0 and score.max_combo >= combo: - return True - return False + return bool(combo <= score.max_combo < next_combo or (next_combo == 0 and score.max_combo >= combo)) MEDALS: Medals = { diff --git a/app/achievements/osu_playcount.py b/app/achievements/osu_playcount.py index 934e1c0..b5e1e9b 100644 --- a/app/achievements/osu_playcount.py +++ b/app/achievements/osu_playcount.py @@ -35,11 +35,7 @@ async def process_playcount( ).first() if not stats: return False - if pc <= stats.play_count < next_pc: - return True - elif next_pc == 0 and stats.play_count >= pc: - return True - return False + return bool(pc <= stats.play_count < next_pc or (next_pc == 0 and stats.play_count >= pc)) MEDALS: Medals = { diff --git a/app/achievements/skill.py b/app/achievements/skill.py index 66123cc..43993d8 100644 --- a/app/achievements/skill.py +++ b/app/achievements/skill.py @@ -47,9 +47,7 @@ async def process_skill( attribute = await calculate_beatmap_attributes(beatmap.id, score.gamemode, mods_, redis, fetcher) if attribute.star_rating < star or attribute.star_rating >= star + 1: return False - if type == "fc" and not score.is_perfect_combo: - return False - return True + return not (type == "fc" and not score.is_perfect_combo) MEDALS: Medals = { diff --git a/app/achievements/total_hits.py b/app/achievements/total_hits.py index 5f3d13d..93fb2c5 100644 --- a/app/achievements/total_hits.py +++ b/app/achievements/total_hits.py @@ -35,11 +35,7 @@ async def process_tth( ).first() if not stats: return False - if tth <= stats.total_hits < next_tth: - return True - elif next_tth == 0 and stats.play_count >= tth: - return True - return False + return bool(tth <= stats.total_hits < next_tth or (next_tth == 0 and stats.play_count >= tth)) MEDALS: Medals = { diff --git a/app/auth.py b/app/auth.py index 6e43f8b..8bef0a8 100644 --- a/app/auth.py +++ b/app/auth.py @@ -69,7 +69,7 @@ def verify_password_legacy(plain_password: str, bcrypt_hash: str) -> bool: 2. MD5哈希 -> bcrypt验证 """ # 1. 明文密码转 MD5 - pw_md5 = hashlib.md5(plain_password.encode()).hexdigest().encode() + pw_md5 = hashlib.md5(plain_password.encode()).hexdigest().encode() # noqa: S324 # 2. 检查缓存 if bcrypt_hash in bcrypt_cache: @@ -103,7 +103,7 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: def get_password_hash(password: str) -> str: """生成密码哈希 - 使用 osu! 的方式""" # 1. 明文密码 -> MD5 - pw_md5 = hashlib.md5(password.encode()).hexdigest().encode() + pw_md5 = hashlib.md5(password.encode()).hexdigest().encode() # noqa: S324 # 2. MD5 -> bcrypt pw_bcrypt = bcrypt.hashpw(pw_md5, bcrypt.gensalt()) return pw_bcrypt.decode() @@ -114,7 +114,7 @@ async def authenticate_user_legacy(db: AsyncSession, name: str, password: str) - 验证用户身份 - 使用类似 from_login 的逻辑 """ # 1. 明文密码转 MD5 - pw_md5 = hashlib.md5(password.encode()).hexdigest() + pw_md5 = hashlib.md5(password.encode()).hexdigest() # noqa: S324 # 2. 根据用户名查找用户 user = None @@ -325,12 +325,7 @@ def _generate_totp_account_label(user: User) -> str: 根据配置选择使用用户名或邮箱,并添加服务器信息使标签更具描述性 """ - if settings.totp_use_username_in_label: - # 使用用户名作为主要标识 - primary_identifier = user.username - else: - # 使用邮箱作为标识 - primary_identifier = user.email + primary_identifier = user.username if settings.totp_use_username_in_label else user.email # 如果配置了服务名称,添加到标签中以便在认证器中区分 if settings.totp_service_name: diff --git a/app/calculator.py b/app/calculator.py index a408fcf..7c9eb7c 100644 --- a/app/calculator.py +++ b/app/calculator.py @@ -419,9 +419,8 @@ def too_dense(hit_objects: list[HitObject], per_1s: int, per_10s: int) -> bool: if len(hit_objects) > i + per_1s: if hit_objects[i + per_1s].start_time - hit_objects[i].start_time < 1000: return True - elif len(hit_objects) > i + per_10s: - if hit_objects[i + per_10s].start_time - hit_objects[i].start_time < 10000: - return True + elif len(hit_objects) > i + per_10s and hit_objects[i + per_10s].start_time - hit_objects[i].start_time < 10000: + return True return False @@ -448,10 +447,7 @@ def slider_is_sus(hit_objects: list[HitObject]) -> bool: def is_2b(hit_objects: list[HitObject]) -> bool: - for i in range(0, len(hit_objects) - 1): - if hit_objects[i] == hit_objects[i + 1].start_time: - return True - return False + return any(hit_objects[i] == hit_objects[i + 1].start_time for i in range(0, len(hit_objects) - 1)) def is_suspicious_beatmap(content: str) -> bool: diff --git a/app/config.py b/app/config.py index adb7794..b09c233 100644 --- a/app/config.py +++ b/app/config.py @@ -217,7 +217,7 @@ STORAGE_SETTINGS='{ # 服务器设置 host: Annotated[ str, - Field(default="0.0.0.0", description="服务器监听地址"), + Field(default="0.0.0.0", description="服务器监听地址"), # noqa: S104 "服务器设置", ] port: Annotated[ @@ -609,26 +609,26 @@ STORAGE_SETTINGS='{ ] @field_validator("fetcher_scopes", mode="before") + @classmethod def validate_fetcher_scopes(cls, v: Any) -> list[str]: if isinstance(v, str): return v.split(",") return v @field_validator("storage_settings", mode="after") + @classmethod def validate_storage_settings( cls, v: LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings, info: ValidationInfo, ) -> LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings: - if info.data.get("storage_service") == StorageServiceType.CLOUDFLARE_R2: - if not isinstance(v, CloudflareR2Settings): - raise ValueError("When storage_service is 'r2', storage_settings must be CloudflareR2Settings") - elif info.data.get("storage_service") == StorageServiceType.LOCAL: - if not isinstance(v, LocalStorageSettings): - raise ValueError("When storage_service is 'local', storage_settings must be LocalStorageSettings") - elif info.data.get("storage_service") == StorageServiceType.AWS_S3: - if not isinstance(v, AWSS3StorageSettings): - raise ValueError("When storage_service is 's3', storage_settings must be AWSS3StorageSettings") + service = info.data.get("storage_service") + if service == StorageServiceType.CLOUDFLARE_R2 and not isinstance(v, CloudflareR2Settings): + raise ValueError("When storage_service is 'r2', storage_settings must be CloudflareR2Settings") + if service == StorageServiceType.LOCAL and not isinstance(v, LocalStorageSettings): + raise ValueError("When storage_service is 'local', storage_settings must be LocalStorageSettings") + if service == StorageServiceType.AWS_S3 and not isinstance(v, AWSS3StorageSettings): + raise ValueError("When storage_service is 's3', storage_settings must be AWSS3StorageSettings") return v diff --git a/app/database/beatmap.py b/app/database/beatmap.py index ff920b1..029915f 100644 --- a/app/database/beatmap.py +++ b/app/database/beatmap.py @@ -71,10 +71,10 @@ class Beatmap(BeatmapBase, table=True): failtimes: FailTime | None = Relationship(back_populates="beatmap", sa_relationship_kwargs={"lazy": "joined"}) @classmethod - async def from_resp_no_save(cls, session: AsyncSession, resp: "BeatmapResp") -> "Beatmap": + async def from_resp_no_save(cls, _session: AsyncSession, resp: "BeatmapResp") -> "Beatmap": d = resp.model_dump() del d["beatmapset"] - beatmap = Beatmap.model_validate( + beatmap = cls.model_validate( { **d, "beatmapset_id": resp.beatmapset_id, @@ -90,8 +90,7 @@ class Beatmap(BeatmapBase, table=True): if not (await session.exec(select(exists()).where(Beatmap.id == resp.id))).first(): session.add(beatmap) await session.commit() - beatmap = (await session.exec(select(Beatmap).where(Beatmap.id == resp.id))).one() - return beatmap + return (await session.exec(select(Beatmap).where(Beatmap.id == resp.id))).one() @classmethod async def from_resp_batch(cls, session: AsyncSession, inp: list["BeatmapResp"], from_: int = 0) -> list["Beatmap"]: @@ -250,7 +249,7 @@ async def calculate_beatmap_attributes( redis: Redis, fetcher: "Fetcher", ): - key = f"beatmap:{beatmap_id}:{ruleset}:{hashlib.md5(str(mods_).encode()).hexdigest()}:attributes" + key = f"beatmap:{beatmap_id}:{ruleset}:{hashlib.sha256(str(mods_).encode()).hexdigest()}:attributes" if await redis.exists(key): return BeatmapAttributes.model_validate_json(await redis.get(key)) resp = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id) diff --git a/app/database/beatmapset.py b/app/database/beatmapset.py index d2df25e..7829c8c 100644 --- a/app/database/beatmapset.py +++ b/app/database/beatmapset.py @@ -130,7 +130,7 @@ class Beatmapset(AsyncAttrs, BeatmapsetBase, table=True): favourites: list["FavouriteBeatmapset"] = Relationship(back_populates="beatmapset") @classmethod - async def from_resp_no_save(cls, session: AsyncSession, resp: "BeatmapsetResp", from_: int = 0) -> "Beatmapset": + async def from_resp_no_save(cls, resp: "BeatmapsetResp") -> "Beatmapset": d = resp.model_dump() if resp.nominations: d["nominations_required"] = resp.nominations.required @@ -158,10 +158,15 @@ class Beatmapset(AsyncAttrs, BeatmapsetBase, table=True): return beatmapset @classmethod - async def from_resp(cls, session: AsyncSession, resp: "BeatmapsetResp", from_: int = 0) -> "Beatmapset": + async def from_resp( + cls, + session: AsyncSession, + resp: "BeatmapsetResp", + from_: int = 0, + ) -> "Beatmapset": from .beatmap import Beatmap - beatmapset = await cls.from_resp_no_save(session, resp, from_=from_) + beatmapset = await cls.from_resp_no_save(resp) if not (await session.exec(select(exists()).where(Beatmapset.id == resp.id))).first(): session.add(beatmapset) await session.commit() diff --git a/app/database/chat.py b/app/database/chat.py index f0d1c47..b05c790 100644 --- a/app/database/chat.py +++ b/app/database/chat.py @@ -105,17 +105,11 @@ class ChatChannelResp(ChatChannelBase): ) ).first() - last_msg = await redis.get(f"chat:{channel.channel_id}:last_msg") - if last_msg and last_msg.isdigit(): - last_msg = int(last_msg) - else: - last_msg = None + last_msg_raw = await redis.get(f"chat:{channel.channel_id}:last_msg") + last_msg = int(last_msg_raw) if last_msg_raw and last_msg_raw.isdigit() else None - last_read_id = await redis.get(f"chat:{channel.channel_id}:last_read:{user.id}") - if last_read_id and last_read_id.isdigit(): - last_read_id = int(last_read_id) - else: - last_read_id = last_msg + last_read_id_raw = await redis.get(f"chat:{channel.channel_id}:last_read:{user.id}") + last_read_id = int(last_read_id_raw) if last_read_id_raw and last_read_id_raw.isdigit() else last_msg if silence is not None: attribute = ChatUserAttributes( diff --git a/app/database/score.py b/app/database/score.py index fd9668f..172e8a4 100644 --- a/app/database/score.py +++ b/app/database/score.py @@ -520,12 +520,11 @@ async def _score_where( wheres.append(col(BestScore.user).has(col(User.country_code) == user.country_code)) else: return None - elif type == LeaderboardType.TEAM: - if user: - team_membership = await user.awaitable_attrs.team_membership - if team_membership: - team_id = team_membership.team_id - wheres.append(col(BestScore.user).has(col(User.team_membership).has(TeamMember.team_id == team_id))) + elif type == LeaderboardType.TEAM and user: + team_membership = await user.awaitable_attrs.team_membership + if team_membership: + team_id = team_membership.team_id + wheres.append(col(BestScore.user).has(col(User.team_membership).has(TeamMember.team_id == team_id))) if mods: if user and user.is_supporter: wheres.append( diff --git a/app/database/user.py b/app/database/user.py index 053c1b0..fbc9eed 100644 --- a/app/database/user.py +++ b/app/database/user.py @@ -256,8 +256,6 @@ class UserResp(UserBase): session: AsyncSession, include: list[str] = [], ruleset: GameMode | None = None, - *, - token_id: int | None = None, ) -> "UserResp": from app.dependencies.database import get_redis @@ -310,16 +308,16 @@ class UserResp(UserBase): ).all() ] - if "team" in include: - if team_membership := await obj.awaitable_attrs.team_membership: - u.team = team_membership.team + if "team" in include and (team_membership := await obj.awaitable_attrs.team_membership): + u.team = team_membership.team if "account_history" in include: u.account_history = [UserAccountHistoryResp.from_db(ah) for ah in await obj.awaitable_attrs.account_history] - if "daily_challenge_user_stats": - if daily_challenge_stats := await obj.awaitable_attrs.daily_challenge_stats: - u.daily_challenge_user_stats = DailyChallengeStatsResp.from_db(daily_challenge_stats) + if "daily_challenge_user_stats" in include and ( + daily_challenge_stats := await obj.awaitable_attrs.daily_challenge_stats + ): + u.daily_challenge_user_stats = DailyChallengeStatsResp.from_db(daily_challenge_stats) if "statistics" in include: current_stattistics = None @@ -443,7 +441,7 @@ class MeResp(UserResp): from app.dependencies.database import get_redis from app.service.verification_service import LoginSessionService - u = await super().from_db(obj, session, ALL_INCLUDED, ruleset, token_id=token_id) + u = await super().from_db(obj, session, ALL_INCLUDED, ruleset) u.session_verified = ( not await LoginSessionService.check_is_need_verification(session, user_id=obj.id, token_id=token_id) if token_id diff --git a/app/dependencies/param.py b/app/dependencies/param.py index 174adde..9e640bd 100644 --- a/app/dependencies/param.py +++ b/app/dependencies/param.py @@ -7,7 +7,7 @@ from fastapi.exceptions import RequestValidationError from pydantic import BaseModel, ValidationError -def BodyOrForm[T: BaseModel](model: type[T]): +def BodyOrForm[T: BaseModel](model: type[T]): # noqa: N802 async def dependency( request: Request, ) -> T: diff --git a/app/dependencies/user.py b/app/dependencies/user.py index ff3ff53..7061bcb 100644 --- a/app/dependencies/user.py +++ b/app/dependencies/user.py @@ -119,10 +119,7 @@ async def get_client_user( if verify_method is None: # 智能选择验证方式(有TOTP优先TOTP) totp_key = await user.awaitable_attrs.totp_key - if totp_key is not None and api_version >= SUPPORT_TOTP_VERIFICATION_VER: - verify_method = "totp" - else: - verify_method = "mail" + verify_method = "totp" if totp_key is not None and api_version >= SUPPORT_TOTP_VERIFICATION_VER else "mail" # 设置选择的验证方法到Redis中,避免重复选择 if api_version >= 20250913: diff --git a/app/exceptions/__init__.py b/app/exceptions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/fetcher/beatmapset.py b/app/fetcher/beatmapset.py index 8801fef..a67c6f6 100644 --- a/app/fetcher/beatmapset.py +++ b/app/fetcher/beatmapset.py @@ -116,7 +116,7 @@ class BeatmapsetFetcher(BaseFetcher): # 序列化为 JSON 并生成 MD5 哈希 cache_json = json.dumps(cache_data, sort_keys=True, separators=(",", ":")) - cache_hash = hashlib.md5(cache_json.encode()).hexdigest() + cache_hash = hashlib.md5(cache_json.encode(), usedforsecurity=False).hexdigest() logger.opt(colors=True).debug(f"[CacheKey] Query: {cache_data}, Hash: {cache_hash}") @@ -160,10 +160,10 @@ class BeatmapsetFetcher(BaseFetcher): cached_data = json.loads(cached_result) return SearchBeatmapsetsResp.model_validate(cached_data) except Exception as e: - logger.opt(colors=True).warning(f"Cache data invalid, fetching from API: {e}") + logger.warning(f"Cache data invalid, fetching from API: {e}") # 缓存未命中,从 API 获取数据 - logger.opt(colors=True).debug("Cache miss, fetching from API") + logger.debug("Cache miss, fetching from API") params = query.model_dump(exclude_none=True, exclude_unset=True, exclude_defaults=True) @@ -203,7 +203,7 @@ class BeatmapsetFetcher(BaseFetcher): try: await self.prefetch_next_pages(query, api_response["cursor"], redis_client, pages=1) except RateLimitError: - logger.opt(colors=True).info("Prefetch skipped due to rate limit") + logger.info("Prefetch skipped due to rate limit") bg_tasks.add_task(delayed_prefetch) @@ -227,14 +227,14 @@ class BeatmapsetFetcher(BaseFetcher): # 使用当前 cursor 请求下一页 next_query = query.model_copy() - logger.opt(colors=True).debug(f"Prefetching page {page + 1}") + logger.debug(f"Prefetching page {page + 1}") # 生成下一页的缓存键 next_cache_key = self._generate_cache_key(next_query, cursor) # 检查是否已经缓存 if await redis_client.exists(next_cache_key): - logger.opt(colors=True).debug(f"Page {page + 1} already cached") + logger.debug(f"Page {page + 1} already cached") # 尝试从缓存获取cursor继续预取 cached_data = await redis_client.get(next_cache_key) if cached_data: @@ -244,7 +244,7 @@ class BeatmapsetFetcher(BaseFetcher): cursor = data["cursor"] continue except Exception: - pass + logger.warning("Failed to parse cached data for cursor") break # 在预取页面之间添加延迟,避免突发请求 @@ -279,18 +279,18 @@ class BeatmapsetFetcher(BaseFetcher): ex=prefetch_ttl, ) - logger.opt(colors=True).debug(f"Prefetched page {page + 1} (TTL: {prefetch_ttl}s)") + logger.debug(f"Prefetched page {page + 1} (TTL: {prefetch_ttl}s)") except RateLimitError: - logger.opt(colors=True).info("Prefetch stopped due to rate limit") + logger.info("Prefetch stopped due to rate limit") except Exception as e: - logger.opt(colors=True).warning(f"Prefetch failed: {e}") + logger.warning(f"Prefetch failed: {e}") async def warmup_homepage_cache(self, redis_client: redis.Redis) -> None: """预热主页缓存""" homepage_queries = self._get_homepage_queries() - logger.opt(colors=True).info(f"Starting homepage cache warmup ({len(homepage_queries)} queries)") + logger.info(f"Starting homepage cache warmup ({len(homepage_queries)} queries)") for i, (query, cursor) in enumerate(homepage_queries): try: @@ -302,7 +302,7 @@ class BeatmapsetFetcher(BaseFetcher): # 检查是否已经缓存 if await redis_client.exists(cache_key): - logger.opt(colors=True).debug(f"Query {query.sort} already cached") + logger.debug(f"Query {query.sort} already cached") continue # 请求并缓存 @@ -325,15 +325,15 @@ class BeatmapsetFetcher(BaseFetcher): ex=cache_ttl, ) - logger.opt(colors=True).info(f"Warmed up cache for {query.sort} (TTL: {cache_ttl}s)") + logger.info(f"Warmed up cache for {query.sort} (TTL: {cache_ttl}s)") if api_response.get("cursor"): try: await self.prefetch_next_pages(query, api_response["cursor"], redis_client, pages=2) except RateLimitError: - logger.opt(colors=True).info(f"Warmup prefetch skipped for {query.sort} due to rate limit") + logger.info(f"Warmup prefetch skipped for {query.sort} due to rate limit") except RateLimitError: - logger.opt(colors=True).warning(f"Warmup skipped for {query.sort} due to rate limit") + logger.warning(f"Warmup skipped for {query.sort} due to rate limit") except Exception as e: - logger.opt(colors=True).error(f"Failed to warmup cache for {query.sort}: {e}") + logger.error(f"Failed to warmup cache for {query.sort}: {e}") diff --git a/app/helpers/__init__.py b/app/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py index 3d65371..12c7c3c 100644 --- a/app/helpers/geoip_helper.py +++ b/app/helpers/geoip_helper.py @@ -1,19 +1,39 @@ """ -GeoLite2 Helper Class +GeoLite2 Helper Class (asynchronous) """ from __future__ import annotations +import asyncio +from contextlib import suppress import os from pathlib import Path import shutil import tarfile import tempfile import time +from typing import Any, Required, TypedDict +from app.log import logger + +import aiofiles import httpx import maxminddb + +class GeoIPLookupResult(TypedDict, total=False): + ip: Required[str] + country_iso: str + country_name: str + city_name: str + latitude: str + longitude: str + time_zone: str + postal_code: str + asn: int | None + organization: str + + BASE_URL = "https://download.maxmind.com/app/geoip_download" EDITIONS = { "City": "GeoLite2-City", @@ -25,161 +45,184 @@ EDITIONS = { class GeoIPHelper: def __init__( self, - dest_dir="./geoip", - license_key=None, - editions=None, - max_age_days=8, - timeout=60.0, + dest_dir: str | Path = Path("./geoip"), + license_key: str | None = None, + editions: list[str] | None = None, + max_age_days: int = 8, + timeout: float = 60.0, ): - self.dest_dir = dest_dir + self.dest_dir = Path(dest_dir).expanduser() self.license_key = license_key or os.getenv("MAXMIND_LICENSE_KEY") - self.editions = editions or ["City", "ASN"] + self.editions = list(editions or ["City", "ASN"]) self.max_age_days = max_age_days self.timeout = timeout - self._readers = {} + self._readers: dict[str, maxminddb.Reader] = {} + self._update_lock = asyncio.Lock() @staticmethod - def _safe_extract(tar: tarfile.TarFile, path: str): - base = Path(path).resolve() - for m in tar.getmembers(): - target = (base / m.name).resolve() - if not str(target).startswith(str(base)): + def _safe_extract(tar: tarfile.TarFile, path: Path) -> None: + base = path.resolve() + for member in tar.getmembers(): + target = (base / member.name).resolve() + if not target.is_relative_to(base): # py312 raise RuntimeError("Unsafe path in tar file") - tar.extractall(path=path, filter="data") + tar.extractall(path=base, filter="data") - def _download_and_extract(self, edition_id: str) -> str: - """ - 下载并解压 mmdb 文件到 dest_dir,仅保留 .mmdb - - 跟随 302 重定向 - - 流式下载到临时文件 - - 临时目录退出后自动清理 - """ + @staticmethod + def _as_mapping(value: Any) -> dict[str, Any]: + return value if isinstance(value, dict) else {} + + @staticmethod + def _as_str(value: Any, default: str = "") -> str: + if isinstance(value, str): + return value + if value is None: + return default + return str(value) + + @staticmethod + def _as_int(value: Any) -> int | None: + return value if isinstance(value, int) else None + + @staticmethod + def _extract_tarball(src: Path, dest: Path) -> None: + with tarfile.open(src, "r:gz") as tar: + GeoIPHelper._safe_extract(tar, dest) + + @staticmethod + def _find_mmdb(root: Path) -> Path | None: + for candidate in root.rglob("*.mmdb"): + return candidate + return None + + def _latest_file_sync(self, edition_id: str) -> Path | None: + directory = self.dest_dir + if not directory.is_dir(): + return None + candidates = list(directory.glob(f"{edition_id}*.mmdb")) + if not candidates: + return None + return max(candidates, key=lambda p: p.stat().st_mtime) + + async def _latest_file(self, edition_id: str) -> Path | None: + return await asyncio.to_thread(self._latest_file_sync, edition_id) + + async def _download_and_extract(self, edition_id: str) -> Path: if not self.license_key: raise ValueError("MaxMind License Key is missing. Please configure it via env MAXMIND_LICENSE_KEY.") url = f"{BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz" + tmp_dir = Path(await asyncio.to_thread(tempfile.mkdtemp)) - with httpx.Client(follow_redirects=True, timeout=self.timeout) as client: - with client.stream("GET", url) as resp: + try: + tgz_path = tmp_dir / "db.tgz" + async with ( + httpx.AsyncClient(follow_redirects=True, timeout=self.timeout) as client, + client.stream("GET", url) as resp, + ): resp.raise_for_status() - with tempfile.TemporaryDirectory() as tmpd: - tgz_path = os.path.join(tmpd, "db.tgz") - # 流式写入 - with open(tgz_path, "wb") as f: - for chunk in resp.iter_bytes(): - if chunk: - f.write(chunk) + async with aiofiles.open(tgz_path, "wb") as download_file: + async for chunk in resp.aiter_bytes(): + if chunk: + await download_file.write(chunk) - # 解压并只移动 .mmdb - with tarfile.open(tgz_path, "r:gz") as tar: - # 先安全检查与解压 - self._safe_extract(tar, tmpd) + await asyncio.to_thread(self._extract_tarball, tgz_path, tmp_dir) + mmdb_path = await asyncio.to_thread(self._find_mmdb, tmp_dir) + if mmdb_path is None: + raise RuntimeError("未在压缩包中找到 .mmdb 文件") - # 递归找 .mmdb - mmdb_path = None - for root, _, files in os.walk(tmpd): - for fn in files: - if fn.endswith(".mmdb"): - mmdb_path = os.path.join(root, fn) - break - if mmdb_path: - break + await asyncio.to_thread(self.dest_dir.mkdir, parents=True, exist_ok=True) + dst = self.dest_dir / mmdb_path.name + await asyncio.to_thread(shutil.move, mmdb_path, dst) + return dst + finally: + await asyncio.to_thread(shutil.rmtree, tmp_dir, ignore_errors=True) - if not mmdb_path: - raise RuntimeError("未在压缩包中找到 .mmdb 文件") + async def update(self, force: bool = False) -> None: + async with self._update_lock: + for edition in self.editions: + edition_id = EDITIONS[edition] + path = await self._latest_file(edition_id) + need_download = force or path is None - os.makedirs(self.dest_dir, exist_ok=True) - dst = os.path.join(self.dest_dir, os.path.basename(mmdb_path)) - shutil.move(mmdb_path, dst) - return dst - - def _latest_file(self, edition_id: str): - if not os.path.isdir(self.dest_dir): - return None - files = [ - os.path.join(self.dest_dir, f) - for f in os.listdir(self.dest_dir) - if f.startswith(edition_id) and f.endswith(".mmdb") - ] - return max(files, key=os.path.getmtime) if files else None - - def update(self, force=False): - from app.log import logger - - for ed in self.editions: - eid = EDITIONS[ed] - path = self._latest_file(eid) - need = force or not path - - if path: - age_days = (time.time() - os.path.getmtime(path)) / 86400 - if age_days >= self.max_age_days: - need = True - logger.info( - f"{eid} database is {age_days:.1f} days old " - f"(max: {self.max_age_days}), will download new version" - ) + if path: + mtime = await asyncio.to_thread(path.stat) + age_days = (time.time() - mtime.st_mtime) / 86400 + if age_days >= self.max_age_days: + need_download = True + logger.info( + f"{edition_id} database is {age_days:.1f} days old " + f"(max: {self.max_age_days}), will download new version" + ) + else: + logger.info( + f"{edition_id} database is {age_days:.1f} days old, still fresh (max: {self.max_age_days})" + ) else: - logger.info(f"{eid} database is {age_days:.1f} days old, still fresh (max: {self.max_age_days})") - else: - logger.info(f"{eid} database not found, will download") + logger.info(f"{edition_id} database not found, will download") - if need: - logger.info(f"Downloading {eid} database...") - path = self._download_and_extract(eid) - logger.info(f"{eid} database downloaded successfully") - else: - logger.info(f"Using existing {eid} database") + if need_download: + logger.info(f"Downloading {edition_id} database...") + path = await self._download_and_extract(edition_id) + logger.info(f"{edition_id} database downloaded successfully") + else: + logger.info(f"Using existing {edition_id} database") - old = self._readers.get(ed) - if old: - try: - old.close() - except Exception: - pass - if path is not None: - self._readers[ed] = maxminddb.open_database(path) + old_reader = self._readers.get(edition) + if old_reader: + with suppress(Exception): + old_reader.close() + if path is not None: + self._readers[edition] = maxminddb.open_database(str(path)) - def lookup(self, ip: str): - res = {"ip": ip} - # City - city_r = self._readers.get("City") - if city_r: - data = city_r.get(ip) - if data: - country = data.get("country") or {} - res["country_iso"] = country.get("iso_code") or "" - res["country_name"] = (country.get("names") or {}).get("en", "") - city = data.get("city") or {} - res["city_name"] = (city.get("names") or {}).get("en", "") - loc = data.get("location") or {} - res["latitude"] = str(loc.get("latitude") or "") - res["longitude"] = str(loc.get("longitude") or "") - res["time_zone"] = str(loc.get("time_zone") or "") - postal = data.get("postal") or {} - if "code" in postal: - res["postal_code"] = postal["code"] - # ASN - asn_r = self._readers.get("ASN") - if asn_r: - data = asn_r.get(ip) - if data: - res["asn"] = data.get("autonomous_system_number") - res["organization"] = data.get("autonomous_system_organization") + def lookup(self, ip: str) -> GeoIPLookupResult: + res: GeoIPLookupResult = {"ip": ip} + city_reader = self._readers.get("City") + if city_reader: + data = city_reader.get(ip) + if isinstance(data, dict): + country = self._as_mapping(data.get("country")) + res["country_iso"] = self._as_str(country.get("iso_code")) + country_names = self._as_mapping(country.get("names")) + res["country_name"] = self._as_str(country_names.get("en")) + + city = self._as_mapping(data.get("city")) + city_names = self._as_mapping(city.get("names")) + res["city_name"] = self._as_str(city_names.get("en")) + + location = self._as_mapping(data.get("location")) + latitude = location.get("latitude") + longitude = location.get("longitude") + res["latitude"] = str(latitude) if latitude is not None else "" + res["longitude"] = str(longitude) if longitude is not None else "" + res["time_zone"] = self._as_str(location.get("time_zone")) + + postal = self._as_mapping(data.get("postal")) + postal_code = postal.get("code") + if postal_code is not None: + res["postal_code"] = self._as_str(postal_code) + + asn_reader = self._readers.get("ASN") + if asn_reader: + data = asn_reader.get(ip) + if isinstance(data, dict): + res["asn"] = self._as_int(data.get("autonomous_system_number")) + res["organization"] = self._as_str(data.get("autonomous_system_organization"), default="") return res - def close(self): - for r in self._readers.values(): - try: - r.close() - except Exception: - pass + def close(self) -> None: + for reader in self._readers.values(): + with suppress(Exception): + reader.close() self._readers = {} if __name__ == "__main__": - # 示例用法 - geo = GeoIPHelper(dest_dir="./geoip", license_key="") - geo.update() - print(geo.lookup("8.8.8.8")) - geo.close() + + async def _demo() -> None: + geo = GeoIPHelper(dest_dir="./geoip", license_key="") + await geo.update() + print(geo.lookup("8.8.8.8")) + geo.close() + + asyncio.run(_demo()) diff --git a/app/log.py b/app/log.py index ba0ef95..d2c5060 100644 --- a/app/log.py +++ b/app/log.py @@ -97,9 +97,7 @@ class InterceptHandler(logging.Handler): status_color = "green" elif 300 <= status < 400: status_color = "yellow" - elif 400 <= status < 500: - status_color = "red" - elif 500 <= status < 600: + elif 400 <= status < 500 or 500 <= status < 600: status_color = "red" return ( diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py index 58ed754..dee6332 100644 --- a/app/middleware/verify_session.py +++ b/app/middleware/verify_session.py @@ -82,7 +82,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return await call_next(request) # 启动验证流程 - return await self._initiate_verification(request, session_state) + return await self._initiate_verification(session_state) def _should_skip_verification(self, request: Request) -> bool: """检查是否应该跳过验证""" @@ -93,10 +93,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return True # 非API请求跳过 - if not path.startswith("/api/"): - return True - - return False + return bool(not path.startswith("/api/")) def _requires_verification(self, request: Request, user: User) -> bool: """检查是否需要验证""" @@ -177,7 +174,7 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): logger.error(f"Error getting session state: {e}") return None - async def _initiate_verification(self, request: Request, state: SessionState) -> Response: + async def _initiate_verification(self, state: SessionState) -> Response: """启动验证流程""" try: method = await state.get_method() diff --git a/app/models/extended_auth.py b/app/models/extended_auth.py index b3fc831..35a3752 100644 --- a/app/models/extended_auth.py +++ b/app/models/extended_auth.py @@ -11,7 +11,7 @@ class ExtendedTokenResponse(BaseModel): """扩展的令牌响应,支持二次验证状态""" access_token: str | None = None - token_type: str = "Bearer" + token_type: str = "Bearer" # noqa: S105 expires_in: int | None = None refresh_token: str | None = None scope: str | None = None @@ -20,14 +20,3 @@ class ExtendedTokenResponse(BaseModel): requires_second_factor: bool = False verification_message: str | None = None user_id: int | None = None # 用于二次验证的用户ID - - -class SessionState(BaseModel): - """会话状态""" - - user_id: int - username: str - email: str - requires_verification: bool - session_token: str | None = None - verification_sent: bool = False diff --git a/app/models/notification.py b/app/models/notification.py index cc95e4c..ceef3b0 100644 --- a/app/models/notification.py +++ b/app/models/notification.py @@ -1,3 +1,4 @@ +# ruff: noqa: ARG002 from __future__ import annotations from abc import abstractmethod diff --git a/app/models/oauth.py b/app/models/oauth.py index f3db41f..ce4cabf 100644 --- a/app/models/oauth.py +++ b/app/models/oauth.py @@ -22,7 +22,7 @@ class TokenRequest(BaseModel): class TokenResponse(BaseModel): access_token: str - token_type: str = "Bearer" + token_type: str = "Bearer" # noqa: S105 expires_in: int refresh_token: str scope: str = "*" @@ -67,7 +67,7 @@ class RegistrationRequestErrors(BaseModel): class OAuth2ClientCredentialsBearer(OAuth2): def __init__( self, - tokenUrl: Annotated[ + tokenUrl: Annotated[ # noqa: N803 str, Doc( """ @@ -75,7 +75,7 @@ class OAuth2ClientCredentialsBearer(OAuth2): """ ), ], - refreshUrl: Annotated[ + refreshUrl: Annotated[ # noqa: N803 str | None, Doc( """ diff --git a/app/models/v1_user.py b/app/models/v1_user.py index 9868260..ea7d177 100644 --- a/app/models/v1_user.py +++ b/app/models/v1_user.py @@ -46,10 +46,10 @@ class PlayerStatsResponse(BaseModel): class PlayerEventItem(BaseModel): """玩家事件项目""" - userId: int + userId: int # noqa: N815 name: str - mapId: int | None = None - setId: int | None = None + mapId: int | None = None # noqa: N815 + setId: int | None = None # noqa: N815 artist: str | None = None title: str | None = None version: str | None = None @@ -88,7 +88,7 @@ class PlayerInfo(BaseModel): custom_badge_icon: str custom_badge_color: str userpage_content: str - recentFailed: int + recentFailed: int # noqa: N815 social_discord: str | None = None social_youtube: str | None = None social_twitter: str | None = None diff --git a/app/router/auth.py b/app/router/auth.py index ff163b3..1512642 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -126,21 +126,22 @@ async def register_user( try: # 获取客户端 IP 并查询地理位置 - country_code = "CN" # 默认国家代码 + country_code = None # 默认国家代码 try: # 查询 IP 地理位置 geo_info = geoip.lookup(client_ip) - if geo_info and geo_info.get("country_iso"): - country_code = geo_info["country_iso"] + if geo_info and (country_code := geo_info.get("country_iso")): logger.info(f"User {user_username} registering from {client_ip}, country: {country_code}") else: logger.warning(f"Could not determine country for IP {client_ip}") except Exception as e: logger.warning(f"GeoIP lookup failed for {client_ip}: {e}") + if country_code is None: + country_code = "CN" # 创建新用户 - # 确保 AUTO_INCREMENT 值从3开始(ID=1是BanchoBot,ID=2预留给ppy) + # 确保 AUTO_INCREMENT 值从3开始(ID=2是BanchoBot) result = await db.execute( text( "SELECT AUTO_INCREMENT FROM information_schema.TABLES " @@ -157,7 +158,7 @@ async def register_user( email=user_email, pw_bcrypt=get_password_hash(user_password), priv=1, # 普通用户权限 - country_code=country_code, # 根据 IP 地理位置设置国家 + country_code=country_code, join_date=utcnow(), last_visit=utcnow(), is_supporter=settings.enable_supporter_for_all_users, @@ -386,7 +387,7 @@ async def oauth_token( return TokenResponse( access_token=access_token, - token_type="Bearer", + token_type="Bearer", # noqa: S106 expires_in=settings.access_token_expire_minutes * 60, refresh_token=refresh_token_str, scope=scope, @@ -439,7 +440,7 @@ async def oauth_token( ) return TokenResponse( access_token=access_token, - token_type="Bearer", + token_type="Bearer", # noqa: S106 expires_in=settings.access_token_expire_minutes * 60, refresh_token=new_refresh_token, scope=scope, @@ -509,7 +510,7 @@ async def oauth_token( return TokenResponse( access_token=access_token, - token_type="Bearer", + token_type="Bearer", # noqa: S106 expires_in=settings.access_token_expire_minutes * 60, refresh_token=refresh_token_str, scope=" ".join(scopes), @@ -554,7 +555,7 @@ async def oauth_token( return TokenResponse( access_token=access_token, - token_type="Bearer", + token_type="Bearer", # noqa: S106 expires_in=settings.access_token_expire_minutes * 60, refresh_token=refresh_token_str, scope=" ".join(scopes), diff --git a/app/router/lio.py b/app/router/lio.py index 969b214..93e0088 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -130,7 +130,7 @@ def _coerce_playlist_item(item_data: dict[str, Any], default_order: int, host_us "allowed_mods": item_data.get("allowed_mods", []), "expired": bool(item_data.get("expired", False)), "playlist_order": item_data.get("playlist_order", default_order), - "played_at": item_data.get("played_at", None), + "played_at": item_data.get("played_at"), "freestyle": bool(item_data.get("freestyle", True)), "beatmap_checksum": item_data.get("beatmap_checksum", ""), "star_rating": item_data.get("star_rating", 0.0), diff --git a/app/router/notification/banchobot.py b/app/router/notification/banchobot.py index 7b01347..a491b7d 100644 --- a/app/router/notification/banchobot.py +++ b/app/router/notification/banchobot.py @@ -157,10 +157,7 @@ async def _help(user: User, args: list[str], _session: AsyncSession, channel: Ch @bot.command("roll") def _roll(user: User, args: list[str], _session: AsyncSession, channel: ChatChannel) -> str: - if len(args) > 0 and args[0].isdigit(): - r = random.randint(1, int(args[0])) - else: - r = random.randint(1, 100) + r = random.randint(1, int(args[0])) if len(args) > 0 and args[0].isdigit() else random.randint(1, 100) return f"{user.username} rolls {r} point(s)" @@ -179,10 +176,7 @@ async def _stats(user: User, args: list[str], session: AsyncSession, channel: Ch if gamemode is None: subquery = select(func.max(Score.id)).where(Score.user_id == target_user.id).scalar_subquery() last_score = (await session.exec(select(Score).where(Score.id == subquery))).first() - if last_score is not None: - gamemode = last_score.gamemode - else: - gamemode = target_user.playmode + gamemode = last_score.gamemode if last_score is not None else target_user.playmode statistics = ( await session.exec( diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 9940801..0732124 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -313,10 +313,7 @@ async def chat_websocket( # 优先使用查询参数中的token,支持token或access_token参数名 auth_token = token or access_token if not auth_token and authorization: - if authorization.startswith("Bearer "): - auth_token = authorization[7:] - else: - auth_token = authorization + auth_token = authorization.removeprefix("Bearer ") if not auth_token: await websocket.close(code=1008, reason="Missing authentication token") diff --git a/app/router/redirect.py b/app/router/redirect.py index 7805fc4..bec9eca 100644 --- a/app/router/redirect.py +++ b/app/router/redirect.py @@ -10,7 +10,7 @@ from fastapi.responses import RedirectResponse redirect_router = APIRouter(include_in_schema=False) -@redirect_router.get("/users/{path:path}") +@redirect_router.get("/users/{path:path}") # noqa: FAST003 @redirect_router.get("/teams/{team_id}") @redirect_router.get("/u/{user_id}") @redirect_router.get("/b/{beatmap_id}") diff --git a/app/router/v1/beatmap.py b/app/router/v1/beatmap.py index b723713..6ca3775 100644 --- a/app/router/v1/beatmap.py +++ b/app/router/v1/beatmap.py @@ -168,10 +168,7 @@ async def get_beatmaps( elif beatmapset_id is not None: beatmapset = await Beatmapset.get_or_fetch(session, fetcher, beatmapset_id) await beatmapset.awaitable_attrs.beatmaps - if len(beatmapset.beatmaps) > limit: - beatmaps = beatmapset.beatmaps[:limit] - else: - beatmaps = beatmapset.beatmaps + beatmaps = beatmapset.beatmaps[:limit] if len(beatmapset.beatmaps) > limit else beatmapset.beatmaps elif user is not None: where = Beatmapset.user_id == user if type == "id" or user.isdigit() else Beatmapset.creator == user beatmapsets = (await session.exec(select(Beatmapset).where(where))).all() diff --git a/app/router/v2/beatmap.py b/app/router/v2/beatmap.py index 024a542..152e8f0 100644 --- a/app/router/v2/beatmap.py +++ b/app/router/v2/beatmap.py @@ -158,7 +158,10 @@ async def get_beatmap_attributes( if ruleset is None: beatmap_db = await Beatmap.get_or_fetch(db, fetcher, beatmap_id) ruleset = beatmap_db.mode - key = f"beatmap:{beatmap_id}:{ruleset}:{hashlib.md5(str(mods_).encode()).hexdigest()}:attributes" + key = ( + f"beatmap:{beatmap_id}:{ruleset}:" + f"{hashlib.md5(str(mods_).encode(), usedforsecurity=False).hexdigest()}:attributes" + ) if await redis.exists(key): return BeatmapAttributes.model_validate_json(await redis.get(key)) # pyright: ignore[reportArgumentType] try: diff --git a/app/router/v2/beatmapset.py b/app/router/v2/beatmapset.py index c4f2561..3cfcdb1 100644 --- a/app/router/v2/beatmapset.py +++ b/app/router/v2/beatmapset.py @@ -46,7 +46,6 @@ async def _save_to_db(sets: SearchBeatmapsetsResp): response_model=SearchBeatmapsetsResp, ) async def search_beatmapset( - db: Database, query: Annotated[SearchQueryModel, Query(...)], request: Request, background_tasks: BackgroundTasks, @@ -104,7 +103,7 @@ async def search_beatmapset( if cached_result: sets = SearchBeatmapsetsResp(**cached_result) # 处理资源代理 - processed_sets = await process_response_assets(sets, request) + processed_sets = await process_response_assets(sets) return processed_sets try: @@ -115,7 +114,7 @@ async def search_beatmapset( await cache_service.cache_search_result(query_hash, cursor_hash, sets.model_dump()) # 处理资源代理 - processed_sets = await process_response_assets(sets, request) + processed_sets = await process_response_assets(sets) return processed_sets except HTTPError as e: raise HTTPException(status_code=500, detail=str(e)) from e @@ -140,7 +139,7 @@ async def lookup_beatmapset( cached_resp = await cache_service.get_beatmap_lookup_from_cache(beatmap_id) if cached_resp: # 处理资源代理 - processed_resp = await process_response_assets(cached_resp, request) + processed_resp = await process_response_assets(cached_resp) return processed_resp try: @@ -151,7 +150,7 @@ async def lookup_beatmapset( await cache_service.cache_beatmap_lookup(beatmap_id, resp) # 处理资源代理 - processed_resp = await process_response_assets(resp, request) + processed_resp = await process_response_assets(resp) return processed_resp except HTTPError as exc: raise HTTPException(status_code=404, detail="Beatmap not found") from exc @@ -176,7 +175,7 @@ async def get_beatmapset( cached_resp = await cache_service.get_beatmapset_from_cache(beatmapset_id) if cached_resp: # 处理资源代理 - processed_resp = await process_response_assets(cached_resp, request) + processed_resp = await process_response_assets(cached_resp) return processed_resp try: @@ -187,7 +186,7 @@ async def get_beatmapset( await cache_service.cache_beatmapset(resp) # 处理资源代理 - processed_resp = await process_response_assets(resp, request) + processed_resp = await process_response_assets(resp) return processed_resp except HTTPError as exc: raise HTTPException(status_code=404, detail="Beatmapset not found") from exc diff --git a/app/router/v2/room.py b/app/router/v2/room.py index decf936..34fb6f7 100644 --- a/app/router/v2/room.py +++ b/app/router/v2/room.py @@ -166,7 +166,6 @@ async def get_room( db: Database, room_id: Annotated[int, Path(..., description="房间 ID")], current_user: Annotated[User, Security(get_current_user, scopes=["public"])], - redis: Redis, category: Annotated[ str, Query( diff --git a/app/router/v2/score.py b/app/router/v2/score.py index 2b47c78..12be45d 100644 --- a/app/router/v2/score.py +++ b/app/router/v2/score.py @@ -847,10 +847,7 @@ async def reorder_score_pin( detail = "After score not found" if after_score_id else "Before score not found" raise HTTPException(status_code=404, detail=detail) - if after_score_id: - target_order = reference_score.pinned_order + 1 - else: - target_order = reference_score.pinned_order + target_order = reference_score.pinned_order + 1 if after_score_id else reference_score.pinned_order current_order = score_record.pinned_order diff --git a/app/router/v2/session_verify.py b/app/router/v2/session_verify.py index 424b988..add3d70 100644 --- a/app/router/v2/session_verify.py +++ b/app/router/v2/session_verify.py @@ -40,7 +40,7 @@ class SessionReissueResponse(BaseModel): message: str -class VerifyFailed(Exception): +class VerifyFailedError(Exception): def __init__(self, message: str, reason: str | None = None, should_reissue: bool = False): super().__init__(message) self.reason = reason @@ -93,10 +93,7 @@ async def verify_session( # 智能选择验证方法(参考osu-web实现) # API版本较老或用户未设置TOTP时强制使用邮件验证 # print(api_version, totp_key) - if api_version < 20240101 or totp_key is None: - verify_method = "mail" - else: - verify_method = "totp" + verify_method = "mail" if api_version < 20240101 or totp_key is None else "totp" await LoginSessionService.set_login_method(user_id, token_id, verify_method, redis) login_method = verify_method @@ -109,7 +106,7 @@ async def verify_session( db, redis, user_id, current_user.username, current_user.email, ip_address, user_agent ) verify_method = "mail" - raise VerifyFailed("用户TOTP已被删除,已切换到邮件验证") + raise VerifyFailedError("用户TOTP已被删除,已切换到邮件验证") # 如果未开启邮箱验证,则直接认为认证通过 # 正常不会进入到这里 @@ -120,16 +117,16 @@ async def verify_session( else: # 记录详细的验证失败原因(参考osu-web的错误处理) if len(verification_key) != 6: - raise VerifyFailed("TOTP验证码长度错误,应为6位数字", reason="incorrect_length") + raise VerifyFailedError("TOTP验证码长度错误,应为6位数字", reason="incorrect_length") elif not verification_key.isdigit(): - raise VerifyFailed("TOTP验证码格式错误,应为纯数字", reason="incorrect_format") + raise VerifyFailedError("TOTP验证码格式错误,应为纯数字", reason="incorrect_format") else: # 可能是密钥错误或者重放攻击 - raise VerifyFailed("TOTP 验证失败,请检查验证码是否正确且未过期", reason="incorrect_key") + raise VerifyFailedError("TOTP 验证失败,请检查验证码是否正确且未过期", reason="incorrect_key") else: success, message = await EmailVerificationService.verify_email_code(db, redis, user_id, verification_key) if not success: - raise VerifyFailed(f"邮件验证失败: {message}") + raise VerifyFailedError(f"邮件验证失败: {message}") await LoginLogService.record_login( db=db, @@ -144,7 +141,7 @@ async def verify_session( await db.commit() return Response(status_code=status.HTTP_204_NO_CONTENT) - except VerifyFailed as e: + except VerifyFailedError as e: await LoginLogService.record_failed_login( db=db, request=request, @@ -171,7 +168,9 @@ async def verify_session( ) error_response["reissued"] = True except Exception: - pass # 忽略重发邮件失败的错误 + log("Verification").exception( + f"Failed to resend verification email to user {current_user.id} (token: {token_id})" + ) return JSONResponse(status_code=status.HTTP_401_UNAUTHORIZED, content=error_response) diff --git a/app/router/v2/tags.py b/app/router/v2/tags.py index 644cd77..810656d 100644 --- a/app/router/v2/tags.py +++ b/app/router/v2/tags.py @@ -44,9 +44,7 @@ async def check_user_can_vote(user: User, beatmap_id: int, session: AsyncSession .where(col(Score.beatmap).has(col(Beatmap.mode) == Score.gamemode)) ) ).first() - if user_beatmap_score is None: - return False - return True + return user_beatmap_score is not None @router.put( @@ -75,10 +73,9 @@ async def vote_beatmap_tags( .where(BeatmapTagVote.user_id == current_user.id) ) ).first() - if previous_votes is None: - if check_user_can_vote(current_user, beatmap_id, session): - new_vote = BeatmapTagVote(tag_id=tag_id, beatmap_id=beatmap_id, user_id=current_user.id) - session.add(new_vote) + if previous_votes is None and check_user_can_vote(current_user, beatmap_id, session): + new_vote = BeatmapTagVote(tag_id=tag_id, beatmap_id=beatmap_id, user_id=current_user.id) + session.add(new_vote) await session.commit() except ValueError: raise HTTPException(400, "Tag is not found") diff --git a/app/router/v2/user.py b/app/router/v2/user.py index 98a0b93..e4e41ea 100644 --- a/app/router/v2/user.py +++ b/app/router/v2/user.py @@ -91,7 +91,7 @@ async def get_users( # 处理资源代理 response = BatchUserResponse(users=cached_users) - processed_response = await process_response_assets(response, request) + processed_response = await process_response_assets(response) return processed_response else: searched_users = (await session.exec(select(User).limit(50))).all() @@ -109,7 +109,7 @@ async def get_users( # 处理资源代理 response = BatchUserResponse(users=users) - processed_response = await process_response_assets(response, request) + processed_response = await process_response_assets(response) return processed_response @@ -240,7 +240,7 @@ async def get_user_info( cached_user = await cache_service.get_user_from_cache(user_id_int) if cached_user: # 处理资源代理 - processed_user = await process_response_assets(cached_user, request) + processed_user = await process_response_assets(cached_user) return processed_user searched_user = ( @@ -263,7 +263,7 @@ async def get_user_info( background_task.add_task(cache_service.cache_user, user_resp) # 处理资源代理 - processed_user = await process_response_assets(user_resp, request) + processed_user = await process_response_assets(user_resp) return processed_user @@ -381,7 +381,7 @@ async def get_user_scores( user_id, type, include_fails, mode, limit, offset, is_legacy_api ) if cached_scores is not None: - processed_scores = await process_response_assets(cached_scores, request) + processed_scores = await process_response_assets(cached_scores) return processed_scores db_user = await session.get(User, user_id) @@ -438,5 +438,5 @@ async def get_user_scores( ) # 处理资源代理 - processed_scores = await process_response_assets(score_responses, request) + processed_scores = await process_response_assets(score_responses) return processed_scores diff --git a/app/service/asset_proxy_helper.py b/app/service/asset_proxy_helper.py index c654821..c41e77c 100644 --- a/app/service/asset_proxy_helper.py +++ b/app/service/asset_proxy_helper.py @@ -12,7 +12,7 @@ from app.service.asset_proxy_service import get_asset_proxy_service from fastapi import Request -async def process_response_assets(data: Any, request: Request) -> Any: +async def process_response_assets(data: Any) -> Any: """ 根据配置处理响应数据中的资源URL @@ -72,7 +72,7 @@ def asset_proxy_response(func): # 如果有request对象且启用了资源代理,则处理响应 if request and settings.enable_asset_proxy and should_process_asset_proxy(request.url.path): - result = await process_response_assets(result, request) + result = await process_response_assets(result) return result diff --git a/app/service/beatmap_cache_service.py b/app/service/beatmap_cache_service.py index 3a81686..1a76195 100644 --- a/app/service/beatmap_cache_service.py +++ b/app/service/beatmap_cache_service.py @@ -113,6 +113,7 @@ class BeatmapCacheService: if size: total_size += size except Exception: + logger.debug(f"Failed to get size for key {key}") continue return { diff --git a/app/service/beatmapset_cache_service.py b/app/service/beatmapset_cache_service.py index df23f20..f255c8c 100644 --- a/app/service/beatmapset_cache_service.py +++ b/app/service/beatmapset_cache_service.py @@ -36,11 +36,8 @@ def safe_json_dumps(data) -> str: def generate_hash(data) -> str: """生成数据的MD5哈希值""" - if isinstance(data, str): - content = data - else: - content = safe_json_dumps(data) - return hashlib.md5(content.encode()).hexdigest() + content = data if isinstance(data, str) else safe_json_dumps(data) + return hashlib.md5(content.encode(), usedforsecurity=False).hexdigest() class BeatmapsetCacheService: diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py index 8852146..0cc8704 100644 --- a/app/service/beatmapset_update_service.py +++ b/app/service/beatmapset_update_service.py @@ -110,9 +110,7 @@ class ProcessingBeatmapset: changed_beatmaps = [] for bm in self.beatmapset.beatmaps: saved = next((s for s in self.record.beatmaps if s["beatmap_id"] == bm.id), None) - if not saved: - changed_beatmaps.append(ChangedBeatmap(bm.id, BeatmapChangeType.MAP_ADDED)) - elif saved["is_deleted"]: + if not saved or saved["is_deleted"]: changed_beatmaps.append(ChangedBeatmap(bm.id, BeatmapChangeType.MAP_ADDED)) elif saved["md5"] != bm.checksum: changed_beatmaps.append(ChangedBeatmap(bm.id, BeatmapChangeType.MAP_UPDATED)) @@ -285,7 +283,7 @@ class BeatmapsetUpdateService: async def _process_changed_beatmapset(self, beatmapset: BeatmapsetResp): async with with_db() as session: db_beatmapset = await session.get(Beatmapset, beatmapset.id) - new_beatmapset = await Beatmapset.from_resp_no_save(session, beatmapset) + new_beatmapset = await Beatmapset.from_resp_no_save(beatmapset) if db_beatmapset: await session.merge(new_beatmapset) await session.commit() @@ -356,5 +354,7 @@ def init_beatmapset_update_service(fetcher: "Fetcher") -> BeatmapsetUpdateServic def get_beatmapset_update_service() -> BeatmapsetUpdateService: + if service is None: + raise ValueError("BeatmapsetUpdateService is not initialized") assert service is not None, "BeatmapsetUpdateService is not initialized" return service diff --git a/app/service/login_log_service.py b/app/service/login_log_service.py index 6fa2f1a..0570493 100644 --- a/app/service/login_log_service.py +++ b/app/service/login_log_service.py @@ -128,7 +128,11 @@ class LoginLogService: login_success=False, login_method=login_method, user_agent=user_agent, - notes=f"Failed login attempt: {attempted_username}" if attempted_username else "Failed login attempt", + notes=( + f"Failed login attempt on user {attempted_username}: {notes}" + if attempted_username + else "Failed login attempt" + ), ) diff --git a/app/service/password_reset_service.py b/app/service/password_reset_service.py index 5d831d6..429840f 100644 --- a/app/service/password_reset_service.py +++ b/app/service/password_reset_service.py @@ -120,7 +120,7 @@ class PasswordResetService: await redis.delete(reset_code_key) await redis.delete(rate_limit_key) except Exception: - pass + logger.warning("Failed to clean up Redis data after error") logger.exception("Redis operation failed") return False, "服务暂时不可用,请稍后重试" diff --git a/app/service/ranking_cache_service.py b/app/service/ranking_cache_service.py index 8b4f3cf..0d9dd37 100644 --- a/app/service/ranking_cache_service.py +++ b/app/service/ranking_cache_service.py @@ -593,10 +593,7 @@ class RankingCacheService: async def invalidate_country_cache(self, ruleset: GameMode | None = None) -> None: """使地区排行榜缓存失效""" try: - if ruleset: - pattern = f"country_ranking:{ruleset}:*" - else: - pattern = "country_ranking:*" + pattern = f"country_ranking:{ruleset}:*" if ruleset else "country_ranking:*" keys = await self.redis.keys(pattern) if keys: @@ -608,10 +605,7 @@ class RankingCacheService: async def invalidate_team_cache(self, ruleset: GameMode | None = None) -> None: """使战队排行榜缓存失效""" try: - if ruleset: - pattern = f"team_ranking:{ruleset}:*" - else: - pattern = "team_ranking:*" + pattern = f"team_ranking:{ruleset}:*" if ruleset else "team_ranking:*" keys = await self.redis.keys(pattern) if keys: @@ -637,6 +631,7 @@ class RankingCacheService: if size: total_size += size except Exception: + logger.warning(f"Failed to get memory usage for key {key}") continue return { diff --git a/app/service/subscribers/__init__.py b/app/service/subscribers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/service/subscribers/chat.py b/app/service/subscribers/chat.py index 9512f9c..9241a7f 100644 --- a/app/service/subscribers/chat.py +++ b/app/service/subscribers/chat.py @@ -35,19 +35,19 @@ class ChatSubscriber(RedisSubscriber): self.add_handler(ON_NOTIFICATION, self.on_notification) self.start() - async def on_join_room(self, c: str, s: str): + async def on_join_room(self, c: str, s: str): # noqa: ARG002 channel_id, user_id = s.split(":") if self.chat_server is None: return await self.chat_server.join_room_channel(int(channel_id), int(user_id)) - async def on_leave_room(self, c: str, s: str): + async def on_leave_room(self, c: str, s: str): # noqa: ARG002 channel_id, user_id = s.split(":") if self.chat_server is None: return await self.chat_server.leave_room_channel(int(channel_id), int(user_id)) - async def on_notification(self, c: str, s: str): + async def on_notification(self, c: str, s: str): # noqa: ARG002 try: detail = TypeAdapter(NotificationDetails).validate_json(s) except ValueError: diff --git a/app/service/user_cache_service.py b/app/service/user_cache_service.py index ae0022e..4a40229 100644 --- a/app/service/user_cache_service.py +++ b/app/service/user_cache_service.py @@ -357,6 +357,7 @@ class UserCacheService: if size: total_size += size except Exception: + logger.warning(f"Failed to get memory usage for key {key}") continue return { diff --git a/app/service/verification_service.py b/app/service/verification_service.py index d1f2ee4..f7cc0ea 100644 --- a/app/service/verification_service.py +++ b/app/service/verification_service.py @@ -288,10 +288,6 @@ This email was sent automatically, please do not reply. redis: Redis, user_id: int, code: str, - ip_address: str | None = None, - user_agent: str | None = None, - client_id: int | None = None, - country_code: str | None = None, ) -> tuple[bool, str]: """验证邮箱验证码""" try: diff --git a/app/tasks/cache.py b/app/tasks/cache.py index 4a684f6..21934c9 100644 --- a/app/tasks/cache.py +++ b/app/tasks/cache.py @@ -41,7 +41,7 @@ async def warmup_cache() -> None: logger.info("Beatmap cache warmup completed successfully") except Exception as e: - logger.error("Beatmap cache warmup failed: %s", e) + logger.error(f"Beatmap cache warmup failed: {e}") async def refresh_ranking_cache() -> None: @@ -59,7 +59,7 @@ async def refresh_ranking_cache() -> None: logger.info("Ranking cache refresh completed successfully") except Exception as e: - logger.error("Ranking cache refresh failed: %s", e) + logger.error(f"Ranking cache refresh failed: {e}") async def schedule_user_cache_preload_task() -> None: @@ -93,14 +93,14 @@ async def schedule_user_cache_preload_task() -> None: if active_user_ids: user_ids = [row[0] for row in active_user_ids] await cache_service.preload_user_cache(session, user_ids) - logger.info("Preloaded cache for %s active users", len(user_ids)) + logger.info(f"Preloaded cache for {len(user_ids)} active users") else: logger.info("No active users found for cache preload") logger.info("User cache preload task completed successfully") except Exception as e: - logger.error("User cache preload task failed: %s", e) + logger.error(f"User cache preload task failed: {e}") async def schedule_user_cache_warmup_task() -> None: @@ -131,18 +131,18 @@ async def schedule_user_cache_warmup_task() -> None: if top_users: user_ids = list(top_users) await cache_service.preload_user_cache(session, user_ids) - logger.info("Warmed cache for top 100 users in %s", mode) + logger.info(f"Warmed cache for top 100 users in {mode}") await asyncio.sleep(1) except Exception as e: - logger.error("Failed to warm cache for %s: %s", mode, e) + logger.error(f"Failed to warm cache for {mode}: {e}") continue logger.info("User cache warmup task completed successfully") except Exception as e: - logger.error("User cache warmup task failed: %s", e) + logger.error(f"User cache warmup task failed: {e}") async def schedule_user_cache_cleanup_task() -> None: @@ -155,11 +155,11 @@ async def schedule_user_cache_cleanup_task() -> None: cache_service = get_user_cache_service(redis) stats = await cache_service.get_cache_stats() - logger.info("User cache stats: %s", stats) + logger.info(f"User cache stats: {stats}") logger.info("User cache cleanup task completed successfully") except Exception as e: - logger.error("User cache cleanup task failed: %s", e) + logger.error(f"User cache cleanup task failed: {e}") async def warmup_user_cache() -> None: @@ -167,7 +167,7 @@ async def warmup_user_cache() -> None: try: await schedule_user_cache_warmup_task() except Exception as e: - logger.error("User cache warmup failed: %s", e) + logger.error(f"User cache warmup failed: {e}") async def preload_user_cache() -> None: @@ -175,7 +175,7 @@ async def preload_user_cache() -> None: try: await schedule_user_cache_preload_task() except Exception as e: - logger.error("User cache preload failed: %s", e) + logger.error(f"User cache preload failed: {e}") async def cleanup_user_cache() -> None: @@ -183,7 +183,7 @@ async def cleanup_user_cache() -> None: try: await schedule_user_cache_cleanup_task() except Exception as e: - logger.error("User cache cleanup failed: %s", e) + logger.error(f"User cache cleanup failed: {e}") def register_cache_jobs() -> None: diff --git a/app/tasks/geoip.py b/app/tasks/geoip.py index 0d22ed8..2868346 100644 --- a/app/tasks/geoip.py +++ b/app/tasks/geoip.py @@ -5,8 +5,6 @@ Periodically update the MaxMind GeoIP database from __future__ import annotations -import asyncio - from app.config import settings from app.dependencies.geoip import get_geoip_helper from app.dependencies.scheduler import get_scheduler @@ -28,14 +26,10 @@ async def update_geoip_database(): try: logger.info("Starting scheduled GeoIP database update...") geoip = get_geoip_helper() - - # Run the synchronous update method in a background thread - loop = asyncio.get_event_loop() - await loop.run_in_executor(None, lambda: geoip.update(force=False)) - + await geoip.update(force=False) logger.info("Scheduled GeoIP database update completed successfully") - except Exception as e: - logger.error(f"Scheduled GeoIP database update failed: {e}") + except Exception as exc: + logger.error(f"Scheduled GeoIP database update failed: {exc}") async def init_geoip(): @@ -45,13 +39,8 @@ async def init_geoip(): try: geoip = get_geoip_helper() logger.info("Initializing GeoIP database...") - - # Run the synchronous update method in a background thread - # force=False means only download if files don't exist or are expired - loop = asyncio.get_event_loop() - await loop.run_in_executor(None, lambda: geoip.update(force=False)) - + await geoip.update(force=False) logger.info("GeoIP database initialization completed") - except Exception as e: - logger.error(f"GeoIP database initialization failed: {e}") + except Exception as exc: + logger.error(f"GeoIP database initialization failed: {exc}") # Do not raise an exception to avoid blocking application startup diff --git a/app/tasks/osu_rx_statistics.py b/app/tasks/osu_rx_statistics.py index 732d727..9b2a796 100644 --- a/app/tasks/osu_rx_statistics.py +++ b/app/tasks/osu_rx_statistics.py @@ -16,7 +16,7 @@ async def create_rx_statistics(): async with with_db() as session: users = (await session.exec(select(User.id))).all() total_users = len(users) - logger.info("Ensuring RX/AP statistics exist for %s users", total_users) + logger.info(f"Ensuring RX/AP statistics exist for {total_users} users") rx_created = 0 ap_created = 0 for i in users: @@ -57,7 +57,5 @@ async def create_rx_statistics(): await session.commit() if rx_created or ap_created: logger.success( - "Created %s RX statistics rows and %s AP statistics rows during backfill", - rx_created, - ap_created, + f"Created {rx_created} RX statistics rows and {ap_created} AP statistics rows during backfill" ) diff --git a/app/utils.py b/app/utils.py index 9a610e0..69b3dd7 100644 --- a/app/utils.py +++ b/app/utils.py @@ -258,10 +258,7 @@ class BackgroundTasks: self.tasks = set(tasks) if tasks else set() def add_task(self, func: Callable[P, Any], *args: P.args, **kwargs: P.kwargs) -> None: - if is_async_callable(func): - coro = func(*args, **kwargs) - else: - coro = run_in_threadpool(func, *args, **kwargs) + coro = func(*args, **kwargs) if is_async_callable(func) else run_in_threadpool(func, *args, **kwargs) task = asyncio.create_task(coro) self.tasks.add(task) task.add_done_callback(self.tasks.discard) diff --git a/main.py b/main.py index 282fd18..27089b1 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,7 @@ from __future__ import annotations from contextlib import asynccontextmanager +import json from pathlib import Path from app.config import settings @@ -50,7 +51,7 @@ import sentry_sdk @asynccontextmanager -async def lifespan(app: FastAPI): +async def lifespan(app: FastAPI): # noqa: ARG001 # on startup init_mods() init_ranked_mods() @@ -223,26 +224,26 @@ async def health_check(): @app.exception_handler(RequestValidationError) -async def validation_exception_handler(request: Request, exc: RequestValidationError): +async def validation_exception_handler(request: Request, exc: RequestValidationError): # noqa: ARG001 return JSONResponse( status_code=422, content={ - "error": exc.errors(), + "error": json.dumps(exc.errors()), }, ) @app.exception_handler(HTTPException) -async def http_exception_handler(requst: Request, exc: HTTPException): +async def http_exception_handler(request: Request, exc: HTTPException): # noqa: ARG001 return JSONResponse(status_code=exc.status_code, content={"error": exc.detail}) -if settings.secret_key == "your_jwt_secret_here": +if settings.secret_key == "your_jwt_secret_here": # noqa: S105 system_logger("Security").opt(colors=True).warning( "jwt_secret_key is unset. Your server is unsafe. " "Use this command to generate: openssl rand -hex 32." ) -if settings.osu_web_client_secret == "your_osu_web_client_secret_here": +if settings.osu_web_client_secret == "your_osu_web_client_secret_here": # noqa: S105 system_logger("Security").opt(colors=True).warning( "osu_web_client_secret is unset. Your server is unsafe. " "Use this command to generate: openssl rand -hex 40." diff --git a/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py b/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py index 9e15a12..aeba732 100644 --- a/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py +++ b/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py @@ -1,3 +1,4 @@ +# ruff: noqa """add_password_reset_table Revision ID: d103d442dc24 diff --git a/pyproject.toml b/pyproject.toml index 9862e00..2f99cda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,12 +55,20 @@ select = [ "ASYNC", # flake8-async "C4", # flake8-comprehensions "T10", # flake8-debugger - # "T20", # flake8-print "PYI", # flake8-pyi "PT", # flake8-pytest-style "Q", # flake8-quotes "TID", # flake8-tidy-imports "RUF", # Ruff-specific rules + "FAST", # FastAPI + "YTT", # flake8-2020 + "S", # flake8-bandit + "INP", # flake8-no-pep420 + "SIM", # flake8-simplify + "ARG", # flake8-unused-arguments + "PTH", # flake8-use-pathlib + "N", # pep8-naming + "FURB" # refurb ] ignore = [ "E402", # module-import-not-at-top-of-file @@ -68,10 +76,17 @@ ignore = [ "RUF001", # ambiguous-unicode-character-string "RUF002", # ambiguous-unicode-character-docstring "RUF003", # ambiguous-unicode-character-comment + "S101", # assert + "S311", # suspicious-non-cryptographic-random-usage ] [tool.ruff.lint.extend-per-file-ignores] "app/database/**/*.py" = ["I002"] +"tools/*.py" = ["PTH", "INP001"] +"migrations/**/*.py" = ["INP001"] +".github/**/*.py" = ["INP001"] +"app/achievements/*.py" = ["INP001", "ARG"] +"app/router/**/*.py" = ["ARG001"] [tool.ruff.lint.isort] force-sort-within-sections = true diff --git a/tools/fix_user_rank_event.py b/tools/fix_user_rank_event.py index 45b098a..67eab51 100644 --- a/tools/fix_user_rank_event.py +++ b/tools/fix_user_rank_event.py @@ -163,13 +163,19 @@ async def main(): # Show specific changes changes = [] - if "scorerank" in original_payload and "scorerank" in fixed_payload: - if original_payload["scorerank"] != fixed_payload["scorerank"]: - changes.append(f"scorerank: {original_payload['scorerank']} → {fixed_payload['scorerank']}") + if ( + "scorerank" in original_payload + and "scorerank" in fixed_payload + and original_payload["scorerank"] != fixed_payload["scorerank"] + ): + changes.append(f"scorerank: {original_payload['scorerank']} → {fixed_payload['scorerank']}") - if "mode" in original_payload and "mode" in fixed_payload: - if original_payload["mode"] != fixed_payload["mode"]: - changes.append(f"mode: {original_payload['mode']} → {fixed_payload['mode']}") + if ( + "mode" in original_payload + and "mode" in fixed_payload + and original_payload["mode"] != fixed_payload["mode"] + ): + changes.append(f"mode: {original_payload['mode']} → {fixed_payload['mode']}") if changes: print(f" Changes: {', '.join(changes)}") From 046f89440708da3bf62aa82a2cc5cea6872d6829 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 17:12:28 +0000 Subject: [PATCH 10/26] refactor(assets_proxy): use decorators to simplify code --- app/helpers/asset_proxy_helper.py | 108 +++++++++++++++++++++++++++ app/models/beatmap.py | 3 + app/router/__init__.py | 2 - app/router/auth.py | 2 - app/router/fetcher.py | 2 - app/router/file.py | 2 - app/router/lio.py | 2 - app/router/notification/__init__.py | 2 - app/router/notification/banchobot.py | 2 - app/router/notification/channel.py | 2 - app/router/notification/message.py | 2 - app/router/notification/server.py | 2 - app/router/private/__init__.py | 2 - app/router/private/admin.py | 2 - app/router/private/audio_proxy.py | 2 - app/router/private/avatar.py | 2 - app/router/private/beatmapset.py | 2 - app/router/private/cover.py | 2 - app/router/private/oauth.py | 2 - app/router/private/relationship.py | 2 - app/router/private/router.py | 2 - app/router/private/score.py | 2 - app/router/private/team.py | 2 - app/router/private/totp.py | 2 - app/router/private/username.py | 2 - app/router/redirect.py | 2 - app/router/v1/__init__.py | 2 - app/router/v1/beatmap.py | 2 - app/router/v1/public_router.py | 2 - app/router/v1/public_user.py | 2 - app/router/v1/replay.py | 2 - app/router/v1/router.py | 2 - app/router/v1/score.py | 2 - app/router/v1/user.py | 2 - app/router/v2/__init__.py | 2 - app/router/v2/beatmap.py | 2 - app/router/v2/beatmapset.py | 36 +++------ app/router/v2/cache.py | 2 - app/router/v2/me.py | 2 - app/router/v2/misc.py | 2 - app/router/v2/ranking.py | 2 - app/router/v2/relationship.py | 2 - app/router/v2/room.py | 2 - app/router/v2/router.py | 2 - app/router/v2/score.py | 2 - app/router/v2/session_verify.py | 2 - app/router/v2/tags.py | 2 - app/router/v2/user.py | 51 ++++++------- app/service/asset_proxy_helper.py | 79 -------------------- app/service/asset_proxy_service.py | 83 -------------------- app/service/ranking_cache_service.py | 9 +-- app/service/user_cache_service.py | 5 +- pyproject.toml | 2 +- 53 files changed, 151 insertions(+), 313 deletions(-) create mode 100644 app/helpers/asset_proxy_helper.py delete mode 100644 app/service/asset_proxy_helper.py delete mode 100644 app/service/asset_proxy_service.py diff --git a/app/helpers/asset_proxy_helper.py b/app/helpers/asset_proxy_helper.py new file mode 100644 index 0000000..87c68ab --- /dev/null +++ b/app/helpers/asset_proxy_helper.py @@ -0,0 +1,108 @@ +"""资源代理辅助方法与路由装饰器。""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from functools import wraps +import re +from typing import Any + +from app.config import settings + +from fastapi import Response +from pydantic import BaseModel + +Handler = Callable[..., Awaitable[Any]] + + +def _replace_asset_urls_in_string(value: str) -> str: + result = value + custom_domain = settings.custom_asset_domain + asset_prefix = settings.asset_proxy_prefix + avatar_prefix = settings.avatar_proxy_prefix + beatmap_prefix = settings.beatmap_proxy_prefix + audio_proxy_base_url = f"{settings.server_url}api/private/audio/beatmapset" + + result = re.sub( + r"^https://assets\.ppy\.sh/", + f"https://{asset_prefix}.{custom_domain}/", + result, + ) + + result = re.sub( + r"^https://b\.ppy\.sh/preview/(\d+)\\.mp3", + rf"{audio_proxy_base_url}/\1", + result, + ) + + result = re.sub( + r"^//b\.ppy\.sh/preview/(\d+)\\.mp3", + rf"{audio_proxy_base_url}/\1", + result, + ) + + result = re.sub( + r"^https://a\.ppy\.sh/", + f"https://{avatar_prefix}.{custom_domain}/", + result, + ) + + result = re.sub( + r"https://b\.ppy\.sh/", + f"https://{beatmap_prefix}.{custom_domain}/", + result, + ) + return result + + +def _replace_asset_urls_in_data(data: Any) -> Any: + if isinstance(data, str): + return _replace_asset_urls_in_string(data) + if isinstance(data, list): + return [_replace_asset_urls_in_data(item) for item in data] + if isinstance(data, tuple): + return tuple(_replace_asset_urls_in_data(item) for item in data) + if isinstance(data, dict): + return {key: _replace_asset_urls_in_data(value) for key, value in data.items()} + return data + + +async def replace_asset_urls(data: Any) -> Any: + """替换数据中的 osu! 资源 URL。""" + + if not settings.enable_asset_proxy: + return data + + if hasattr(data, "model_dump"): + raw = data.model_dump() + processed = _replace_asset_urls_in_data(raw) + try: + return data.__class__(**processed) + except Exception: + return processed + + if isinstance(data, (dict, list, tuple, str)): + return _replace_asset_urls_in_data(data) + + return data + + +def asset_proxy_response(func: Handler) -> Handler: + """装饰器:在返回响应前替换资源 URL。""" + + @wraps(func) + async def wrapper(*args, **kwargs): + result = await func(*args, **kwargs) + + if not settings.enable_asset_proxy: + return result + + if isinstance(result, Response): + return result + + if isinstance(result, BaseModel): + result = result.model_dump() + + return _replace_asset_urls_in_data(result) + + return wrapper # type: ignore[return-value] diff --git a/app/models/beatmap.py b/app/models/beatmap.py index 718041f..c4e71b1 100644 --- a/app/models/beatmap.py +++ b/app/models/beatmap.py @@ -204,3 +204,6 @@ class SearchQueryModel(BaseModel): default=None, description="游标字符串,用于分页", ) + + +SearchQueryModel.model_rebuild() diff --git a/app/router/__init__.py b/app/router/__init__.py index fc5d2a7..814d19b 100644 --- a/app/router/__init__.py +++ b/app/router/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from .auth import router as auth_router from .fetcher import fetcher_router as fetcher_router from .file import file_router as file_router diff --git a/app/router/auth.py b/app/router/auth.py index 1512642..b51028d 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta import re from typing import Annotated, Literal diff --git a/app/router/fetcher.py b/app/router/fetcher.py index 887eabf..23bf8e6 100644 --- a/app/router/fetcher.py +++ b/app/router/fetcher.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.dependencies.fetcher import Fetcher from fastapi import APIRouter diff --git a/app/router/file.py b/app/router/file.py index 14263f9..184baff 100644 --- a/app/router/file.py +++ b/app/router/file.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.dependencies.storage import StorageService as StorageServiceDep from app.storage import LocalStorageService diff --git a/app/router/lio.py b/app/router/lio.py index 93e0088..096a883 100644 --- a/app/router/lio.py +++ b/app/router/lio.py @@ -1,7 +1,5 @@ """LIO (Legacy IO) router for osu-server-spectator compatibility.""" -from __future__ import annotations - import base64 import json from typing import Any diff --git a/app/router/notification/__init__.py b/app/router/notification/__init__.py index 176fbdb..b9b0658 100644 --- a/app/router/notification/__init__.py +++ b/app/router/notification/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.config import settings from app.database.notification import Notification, UserNotification from app.database.user import User diff --git a/app/router/notification/banchobot.py b/app/router/notification/banchobot.py index a491b7d..39a01c5 100644 --- a/app/router/notification/banchobot.py +++ b/app/router/notification/banchobot.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from collections.abc import Awaitable, Callable from math import ceil diff --git a/app/router/notification/channel.py b/app/router/notification/channel.py index 62861ec..324c0ff 100644 --- a/app/router/notification/channel.py +++ b/app/router/notification/channel.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated, Any, Literal, Self from app.database.chat import ( diff --git a/app/router/notification/message.py b/app/router/notification/message.py index 8dec1cb..34ebc48 100644 --- a/app/router/notification/message.py +++ b/app/router/notification/message.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database import ChatMessageResp diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 0732124..021a690 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from typing import Annotated, overload diff --git a/app/router/private/__init__.py b/app/router/private/__init__.py index f1715a1..7664822 100644 --- a/app/router/private/__init__.py +++ b/app/router/private/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.config import settings from . import admin, audio_proxy, avatar, beatmapset, cover, oauth, relationship, score, team, username # noqa: F401 diff --git a/app/router/private/admin.py b/app/router/private/admin.py index 57dcf98..c9bf1d4 100644 --- a/app/router/private/admin.py +++ b/app/router/private/admin.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database.auth import OAuthToken diff --git a/app/router/private/audio_proxy.py b/app/router/private/audio_proxy.py index ca8cda4..da5d946 100644 --- a/app/router/private/audio_proxy.py +++ b/app/router/private/audio_proxy.py @@ -3,8 +3,6 @@ 提供从osu!官方获取beatmapset音频预览的代理服务 """ -from __future__ import annotations - from typing import Annotated from app.dependencies.database import get_redis, get_redis_binary diff --git a/app/router/private/avatar.py b/app/router/private/avatar.py index 0af8694..dfe8166 100644 --- a/app/router/private/avatar.py +++ b/app/router/private/avatar.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import hashlib from typing import Annotated diff --git a/app/router/private/beatmapset.py b/app/router/private/beatmapset.py index 5b80841..f206562 100644 --- a/app/router/private/beatmapset.py +++ b/app/router/private/beatmapset.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database.beatmap import Beatmap diff --git a/app/router/private/cover.py b/app/router/private/cover.py index 71992e0..b0a5b53 100644 --- a/app/router/private/cover.py +++ b/app/router/private/cover.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import hashlib from typing import Annotated diff --git a/app/router/private/oauth.py b/app/router/private/oauth.py index 2af00dc..8c4c664 100644 --- a/app/router/private/oauth.py +++ b/app/router/private/oauth.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import secrets from typing import Annotated diff --git a/app/router/private/relationship.py b/app/router/private/relationship.py index 1f882cb..7b051a8 100644 --- a/app/router/private/relationship.py +++ b/app/router/private/relationship.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database import Relationship diff --git a/app/router/private/router.py b/app/router/private/router.py index 6882bc9..6f5fac4 100644 --- a/app/router/private/router.py +++ b/app/router/private/router.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.dependencies.rate_limit import LIMITERS from fastapi import APIRouter diff --git a/app/router/private/score.py b/app/router/private/score.py index e640121..c5a6a97 100644 --- a/app/router/private/score.py +++ b/app/router/private/score.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.database.score import Score from app.dependencies.database import Database, Redis from app.dependencies.storage import StorageService diff --git a/app/router/private/team.py b/app/router/private/team.py index b60461c..2cf6cae 100644 --- a/app/router/private/team.py +++ b/app/router/private/team.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import hashlib from typing import Annotated diff --git a/app/router/private/totp.py b/app/router/private/totp.py index 06406aa..780c1ec 100644 --- a/app/router/private/totp.py +++ b/app/router/private/totp.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.auth import ( diff --git a/app/router/private/username.py b/app/router/private/username.py index 18eb219..10c5411 100644 --- a/app/router/private/username.py +++ b/app/router/private/username.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.auth import validate_username diff --git a/app/router/redirect.py b/app/router/redirect.py index bec9eca..f90a5d4 100644 --- a/app/router/redirect.py +++ b/app/router/redirect.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import urllib.parse from app.config import settings diff --git a/app/router/v1/__init__.py b/app/router/v1/__init__.py index 4a8ca41..f2ae0aa 100644 --- a/app/router/v1/__init__.py +++ b/app/router/v1/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from . import beatmap, public_user, replay, score, user # noqa: F401 from .public_router import public_router as api_v1_public_router from .router import router as api_v1_router diff --git a/app/router/v1/beatmap.py b/app/router/v1/beatmap.py index 6ca3775..1df2048 100644 --- a/app/router/v1/beatmap.py +++ b/app/router/v1/beatmap.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from typing import Annotated, Literal diff --git a/app/router/v1/public_router.py b/app/router/v1/public_router.py index 4d2c240..596beec 100644 --- a/app/router/v1/public_router.py +++ b/app/router/v1/public_router.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from enum import Enum diff --git a/app/router/v1/public_user.py b/app/router/v1/public_user.py index dadbfb9..6ca98d8 100644 --- a/app/router/v1/public_user.py +++ b/app/router/v1/public_user.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated, Literal from app.database.statistics import UserStatistics diff --git a/app/router/v1/replay.py b/app/router/v1/replay.py index b0ffc99..0b16b3a 100644 --- a/app/router/v1/replay.py +++ b/app/router/v1/replay.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import base64 from datetime import date from typing import Annotated, Literal diff --git a/app/router/v1/router.py b/app/router/v1/router.py index f46ea2b..612d222 100644 --- a/app/router/v1/router.py +++ b/app/router/v1/router.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from enum import Enum diff --git a/app/router/v1/score.py b/app/router/v1/score.py index 4ac9b42..f880846 100644 --- a/app/router/v1/score.py +++ b/app/router/v1/score.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime, timedelta from typing import Annotated, Literal diff --git a/app/router/v1/user.py b/app/router/v1/user.py index 52ee19a..f34ae76 100644 --- a/app/router/v1/user.py +++ b/app/router/v1/user.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from typing import Annotated, Literal diff --git a/app/router/v2/__init__.py b/app/router/v2/__init__.py index 0bd06cd..e12f56b 100644 --- a/app/router/v2/__init__.py +++ b/app/router/v2/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from . import ( # noqa: F401 beatmap, beatmapset, diff --git a/app/router/v2/beatmap.py b/app/router/v2/beatmap.py index 152e8f0..e389865 100644 --- a/app/router/v2/beatmap.py +++ b/app/router/v2/beatmap.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio import hashlib import json diff --git a/app/router/v2/beatmapset.py b/app/router/v2/beatmapset.py index 3cfcdb1..d61b5f8 100644 --- a/app/router/v2/beatmapset.py +++ b/app/router/v2/beatmapset.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import re from typing import Annotated, Literal from urllib.parse import parse_qs @@ -12,8 +10,8 @@ from app.dependencies.database import Database, Redis, with_db from app.dependencies.fetcher import Fetcher from app.dependencies.geoip import IPAddress, get_geoip_helper from app.dependencies.user import ClientUser, get_current_user +from app.helpers.asset_proxy_helper import asset_proxy_response from app.models.beatmap import SearchQueryModel -from app.service.asset_proxy_helper import process_response_assets from app.service.beatmapset_cache_service import generate_hash from .router import router @@ -45,8 +43,9 @@ async def _save_to_db(sets: SearchBeatmapsetsResp): tags=["谱面集"], response_model=SearchBeatmapsetsResp, ) +@asset_proxy_response async def search_beatmapset( - query: Annotated[SearchQueryModel, Query(...)], + query: Annotated[SearchQueryModel, Query()], request: Request, background_tasks: BackgroundTasks, current_user: Annotated[User, Security(get_current_user, scopes=["public"])], @@ -102,9 +101,7 @@ async def search_beatmapset( cached_result = await cache_service.get_search_from_cache(query_hash, cursor_hash) if cached_result: sets = SearchBeatmapsetsResp(**cached_result) - # 处理资源代理 - processed_sets = await process_response_assets(sets) - return processed_sets + return sets try: sets = await fetcher.search_beatmapset(query, cursor, redis) @@ -112,10 +109,7 @@ async def search_beatmapset( # 缓存搜索结果 await cache_service.cache_search_result(query_hash, cursor_hash, sets.model_dump()) - - # 处理资源代理 - processed_sets = await process_response_assets(sets) - return processed_sets + return sets except HTTPError as e: raise HTTPException(status_code=500, detail=str(e)) from e @@ -127,6 +121,7 @@ async def search_beatmapset( response_model=BeatmapsetResp, description=("通过谱面 ID 查询所属谱面集。"), ) +@asset_proxy_response async def lookup_beatmapset( db: Database, request: Request, @@ -138,9 +133,7 @@ async def lookup_beatmapset( # 先尝试从缓存获取 cached_resp = await cache_service.get_beatmap_lookup_from_cache(beatmap_id) if cached_resp: - # 处理资源代理 - processed_resp = await process_response_assets(cached_resp) - return processed_resp + return cached_resp try: beatmap = await Beatmap.get_or_fetch(db, fetcher, bid=beatmap_id) @@ -148,10 +141,7 @@ async def lookup_beatmapset( # 缓存结果 await cache_service.cache_beatmap_lookup(beatmap_id, resp) - - # 处理资源代理 - processed_resp = await process_response_assets(resp) - return processed_resp + return resp except HTTPError as exc: raise HTTPException(status_code=404, detail="Beatmap not found") from exc @@ -163,6 +153,7 @@ async def lookup_beatmapset( response_model=BeatmapsetResp, description="获取单个谱面集详情。", ) +@asset_proxy_response async def get_beatmapset( db: Database, request: Request, @@ -174,9 +165,7 @@ async def get_beatmapset( # 先尝试从缓存获取 cached_resp = await cache_service.get_beatmapset_from_cache(beatmapset_id) if cached_resp: - # 处理资源代理 - processed_resp = await process_response_assets(cached_resp) - return processed_resp + return cached_resp try: beatmapset = await Beatmapset.get_or_fetch(db, fetcher, beatmapset_id) @@ -184,10 +173,7 @@ async def get_beatmapset( # 缓存结果 await cache_service.cache_beatmapset(resp) - - # 处理资源代理 - processed_resp = await process_response_assets(resp) - return processed_resp + return resp except HTTPError as exc: raise HTTPException(status_code=404, detail="Beatmapset not found") from exc diff --git a/app/router/v2/cache.py b/app/router/v2/cache.py index 0b1a396..a75b593 100644 --- a/app/router/v2/cache.py +++ b/app/router/v2/cache.py @@ -3,8 +3,6 @@ 提供缓存统计、清理和预热功能 """ -from __future__ import annotations - from app.dependencies.database import Redis from app.service.user_cache_service import get_user_cache_service diff --git a/app/router/v2/me.py b/app/router/v2/me.py index 5304e9d..ed89704 100644 --- a/app/router/v2/me.py +++ b/app/router/v2/me.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database import MeResp, User diff --git a/app/router/v2/misc.py b/app/router/v2/misc.py index bd67695..5c8e18b 100644 --- a/app/router/v2/misc.py +++ b/app/router/v2/misc.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import UTC, datetime from app.config import settings diff --git a/app/router/v2/ranking.py b/app/router/v2/ranking.py index f2c6236..cde6c43 100644 --- a/app/router/v2/ranking.py +++ b/app/router/v2/ranking.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated, Literal from app.config import settings diff --git a/app/router/v2/relationship.py b/app/router/v2/relationship.py index 4851e2a..89fd223 100644 --- a/app/router/v2/relationship.py +++ b/app/router/v2/relationship.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database import Relationship, RelationshipResp, RelationshipType, User diff --git a/app/router/v2/room.py b/app/router/v2/room.py index 34fb6f7..66241e4 100644 --- a/app/router/v2/room.py +++ b/app/router/v2/room.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import UTC from typing import Annotated, Literal diff --git a/app/router/v2/router.py b/app/router/v2/router.py index 5ef8c3f..9b39315 100644 --- a/app/router/v2/router.py +++ b/app/router/v2/router.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.dependencies.rate_limit import LIMITERS from fastapi import APIRouter diff --git a/app/router/v2/score.py b/app/router/v2/score.py index 12be45d..a433da8 100644 --- a/app/router/v2/score.py +++ b/app/router/v2/score.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import UTC, date import time from typing import Annotated diff --git a/app/router/v2/session_verify.py b/app/router/v2/session_verify.py index add3d70..cf2808c 100644 --- a/app/router/v2/session_verify.py +++ b/app/router/v2/session_verify.py @@ -2,8 +2,6 @@ 会话验证路由 - 实现类似 osu! 的邮件验证流程 (API v2) """ -from __future__ import annotations - from typing import Annotated, Literal from app.auth import check_totp_backup_code, verify_totp_key_with_replay_protection diff --git a/app/router/v2/tags.py b/app/router/v2/tags.py index 810656d..d64d4aa 100644 --- a/app/router/v2/tags.py +++ b/app/router/v2/tags.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.database.beatmap import Beatmap diff --git a/app/router/v2/user.py b/app/router/v2/user.py index e4e41ea..4ffc976 100644 --- a/app/router/v2/user.py +++ b/app/router/v2/user.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta from typing import Annotated, Literal @@ -19,10 +17,10 @@ from app.database.user import SEARCH_INCLUDED from app.dependencies.api_version import APIVersion from app.dependencies.database import Database, get_redis from app.dependencies.user import get_current_user +from app.helpers.asset_proxy_helper import asset_proxy_response from app.log import log from app.models.score import GameMode from app.models.user import BeatmapsetType -from app.service.asset_proxy_helper import process_response_assets from app.service.user_cache_service import get_user_cache_service from app.utils import utcnow @@ -47,6 +45,7 @@ class BatchUserResponse(BaseModel): ) @router.get("/users/lookup", response_model=BatchUserResponse, include_in_schema=False) @router.get("/users/lookup/", response_model=BatchUserResponse, include_in_schema=False) +@asset_proxy_response async def get_users( session: Database, request: Request, @@ -89,28 +88,25 @@ async def get_users( # 异步缓存,不阻塞响应 background_task.add_task(cache_service.cache_user, user_resp) - # 处理资源代理 response = BatchUserResponse(users=cached_users) - processed_response = await process_response_assets(response) - return processed_response + return response else: searched_users = (await session.exec(select(User).limit(50))).all() users = [] for searched_user in searched_users: - if searched_user.id != BANCHOBOT_ID: - user_resp = await UserResp.from_db( - searched_user, - session, - include=SEARCH_INCLUDED, - ) - users.append(user_resp) - # 异步缓存 - background_task.add_task(cache_service.cache_user, user_resp) + if searched_user.id == BANCHOBOT_ID: + continue + user_resp = await UserResp.from_db( + searched_user, + session, + include=SEARCH_INCLUDED, + ) + users.append(user_resp) + # 异步缓存 + background_task.add_task(cache_service.cache_user, user_resp) - # 处理资源代理 response = BatchUserResponse(users=users) - processed_response = await process_response_assets(response) - return processed_response + return response @router.get( @@ -176,6 +172,7 @@ async def get_user_kudosu( description="通过用户 ID 或用户名获取单个用户的详细信息,并指定特定 ruleset。", tags=["用户"], ) +@asset_proxy_response async def get_user_info_ruleset( session: Database, background_task: BackgroundTasks, @@ -224,6 +221,7 @@ async def get_user_info_ruleset( description="通过用户 ID 或用户名获取单个用户的详细信息。", tags=["用户"], ) +@asset_proxy_response async def get_user_info( background_task: BackgroundTasks, session: Database, @@ -239,9 +237,7 @@ async def get_user_info( user_id_int = int(user_id) cached_user = await cache_service.get_user_from_cache(user_id_int) if cached_user: - # 处理资源代理 - processed_user = await process_response_assets(cached_user) - return processed_user + return cached_user searched_user = ( await session.exec( @@ -262,9 +258,7 @@ async def get_user_info( # 异步缓存结果 background_task.add_task(cache_service.cache_user, user_resp) - # 处理资源代理 - processed_user = await process_response_assets(user_resp) - return processed_user + return user_resp @router.get( @@ -274,6 +268,7 @@ async def get_user_info( description="获取指定用户特定类型的谱面集列表,如最常游玩、收藏等。", tags=["用户"], ) +@asset_proxy_response async def get_user_beatmapsets( session: Database, background_task: BackgroundTasks, @@ -354,6 +349,7 @@ async def get_user_beatmapsets( ), tags=["用户"], ) +@asset_proxy_response async def get_user_scores( request: Request, session: Database, @@ -381,8 +377,7 @@ async def get_user_scores( user_id, type, include_fails, mode, limit, offset, is_legacy_api ) if cached_scores is not None: - processed_scores = await process_response_assets(cached_scores) - return processed_scores + return cached_scores db_user = await session.get(User, user_id) if not db_user or db_user.id == BANCHOBOT_ID: @@ -437,6 +432,4 @@ async def get_user_scores( is_legacy_api, ) - # 处理资源代理 - processed_scores = await process_response_assets(score_responses) - return processed_scores + return score_responses diff --git a/app/service/asset_proxy_helper.py b/app/service/asset_proxy_helper.py deleted file mode 100644 index c41e77c..0000000 --- a/app/service/asset_proxy_helper.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -资源代理辅助函数和中间件 -""" - -from __future__ import annotations - -from typing import Any - -from app.config import settings -from app.service.asset_proxy_service import get_asset_proxy_service - -from fastapi import Request - - -async def process_response_assets(data: Any) -> Any: - """ - 根据配置处理响应数据中的资源URL - - Args: - data: API响应数据 - request: FastAPI请求对象 - - Returns: - 处理后的数据 - """ - if not settings.enable_asset_proxy: - return data - - asset_service = get_asset_proxy_service() - - # 仅URL替换模式 - return await asset_service.replace_asset_urls(data) - - -def should_process_asset_proxy(path: str) -> bool: - """ - 判断路径是否需要处理资源代理 - """ - # 只对特定的API端点处理资源代理 - asset_proxy_endpoints = [ - "/api/v1/users/", - "/api/v2/users/", - "/api/v1/me/", - "/api/v2/me/", - "/api/v2/beatmapsets/search", - "/api/v2/beatmapsets/lookup", - "/api/v2/beatmaps/", - "/api/v1/beatmaps/", - "/api/v2/beatmapsets/", - # 可以根据需要添加更多端点 - ] - - return any(path.startswith(endpoint) for endpoint in asset_proxy_endpoints) - - -# 响应处理装饰器 -def asset_proxy_response(func): - """ - 装饰器:自动处理响应中的资源URL - """ - - async def wrapper(*args, **kwargs): - # 获取request对象 - request = None - for arg in args: - if isinstance(arg, Request): - request = arg - break - - # 执行原函数 - result = await func(*args, **kwargs) - - # 如果有request对象且启用了资源代理,则处理响应 - if request and settings.enable_asset_proxy and should_process_asset_proxy(request.url.path): - result = await process_response_assets(result) - - return result - - return wrapper diff --git a/app/service/asset_proxy_service.py b/app/service/asset_proxy_service.py deleted file mode 100644 index 72fa54a..0000000 --- a/app/service/asset_proxy_service.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -资源文件代理服务 -提供URL替换方案:将osu!官方资源URL替换为自定义域名 -""" - -from __future__ import annotations - -import re -from typing import Any - -from app.config import settings - - -class AssetProxyService: - """资源代理服务 - 仅URL替换模式""" - - def __init__(self): - # 从配置获取自定义assets域名和前缀 - self.custom_asset_domain = settings.custom_asset_domain - self.asset_proxy_prefix = settings.asset_proxy_prefix - self.avatar_proxy_prefix = settings.avatar_proxy_prefix - self.beatmap_proxy_prefix = settings.beatmap_proxy_prefix - # 音频代理接口URL - self.audio_proxy_base_url = f"{settings.server_url}api/private/audio/beatmapset" - - async def replace_asset_urls(self, data: Any) -> Any: - """ - 递归替换数据中的osu!资源URL为自定义域名 - """ - # 处理Pydantic模型 - if hasattr(data, "model_dump"): - # 转换为字典,处理后再转换回模型 - data_dict = data.model_dump() - processed_dict = await self.replace_asset_urls(data_dict) - # 尝试从字典重新创建模型 - try: - return data.__class__(**processed_dict) - except Exception: - # 如果重新创建失败,返回字典 - return processed_dict - elif isinstance(data, dict): - result = {} - for key, value in data.items(): - result[key] = await self.replace_asset_urls(value) - return result - elif isinstance(data, list): - return [await self.replace_asset_urls(item) for item in data] - elif isinstance(data, str): - # 替换各种osu!资源域名 - result = data - - # 替换 assets.ppy.sh (用户头像、封面、奖章等) - result = re.sub( - r"https://assets\.ppy\.sh/", f"https://{self.asset_proxy_prefix}.{self.custom_asset_domain}/", result - ) - - # 替换 b.ppy.sh 预览音频为我们的音频代理接口 - # 匹配 https://b.ppy.sh/preview/{beatmapset_id}.mp3 格式 - result = re.sub(r"https://b\.ppy\.sh/preview/(\d+)\.mp3", rf"{self.audio_proxy_base_url}/\1", result) - - # 匹配 //b.ppy.sh/preview/{beatmapset_id}.mp3 格式 - result = re.sub(r"//b\.ppy\.sh/preview/(\d+)\.mp3", rf"{self.audio_proxy_base_url}/\1", result) - - # 替换 a.ppy.sh 头像 - result = re.sub( - r"https://a\.ppy\.sh/", f"https://{self.avatar_proxy_prefix}.{self.custom_asset_domain}/", result - ) - - return result - else: - return data - - -# 全局实例 -_asset_proxy_service: AssetProxyService | None = None - - -def get_asset_proxy_service() -> AssetProxyService: - """获取资源代理服务实例""" - global _asset_proxy_service - if _asset_proxy_service is None: - _asset_proxy_service = AssetProxyService() - return _asset_proxy_service diff --git a/app/service/ranking_cache_service.py b/app/service/ranking_cache_service.py index 0d9dd37..149fdab 100644 --- a/app/service/ranking_cache_service.py +++ b/app/service/ranking_cache_service.py @@ -12,9 +12,9 @@ from typing import TYPE_CHECKING, Literal from app.config import settings from app.database.statistics import UserStatistics, UserStatisticsResp +from app.helpers.asset_proxy_helper import replace_asset_urls from app.log import logger from app.models.score import GameMode -from app.service.asset_proxy_service import get_asset_proxy_service from app.utils import utcnow from redis.asyncio import Redis @@ -357,16 +357,15 @@ class RankingCacheService: for statistics in statistics_data: user_stats_resp = await UserStatisticsResp.from_db(statistics, session, None, include) + user_dict = user_stats_resp.model_dump() + # 应用资源代理处理 if settings.enable_asset_proxy: try: - asset_proxy_service = get_asset_proxy_service() - user_stats_resp = await asset_proxy_service.replace_asset_urls(user_stats_resp) + user_dict = await replace_asset_urls(user_dict) except Exception as e: logger.warning(f"Asset proxy processing failed for ranking cache: {e}") - # 将 UserStatisticsResp 转换为字典,处理所有序列化问题 - user_dict = json.loads(user_stats_resp.model_dump_json()) ranking_data.append(user_dict) # 缓存这一页的数据 diff --git a/app/service/user_cache_service.py b/app/service/user_cache_service.py index 4a40229..4aef7e6 100644 --- a/app/service/user_cache_service.py +++ b/app/service/user_cache_service.py @@ -15,9 +15,9 @@ from app.database import User, UserResp from app.database.score import LegacyScoreResp, ScoreResp from app.database.user import SEARCH_INCLUDED from app.dependencies.database import with_db +from app.helpers.asset_proxy_helper import replace_asset_urls from app.log import logger from app.models.score import GameMode -from app.service.asset_proxy_service import get_asset_proxy_service from redis.asyncio import Redis from sqlmodel import col, select @@ -318,8 +318,7 @@ class UserCacheService: # 应用资源代理处理 if settings.enable_asset_proxy: try: - asset_proxy_service = get_asset_proxy_service() - user_resp = await asset_proxy_service.replace_asset_urls(user_resp) + user_resp = await replace_asset_urls(user_resp) except Exception as e: logger.warning(f"Asset proxy processing failed for user cache {user.id}: {e}") diff --git a/pyproject.toml b/pyproject.toml index 2f99cda..4170f81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,7 +86,7 @@ ignore = [ "migrations/**/*.py" = ["INP001"] ".github/**/*.py" = ["INP001"] "app/achievements/*.py" = ["INP001", "ARG"] -"app/router/**/*.py" = ["ARG001"] +"app/router/**/*.py" = ["ARG001", "I002"] [tool.ruff.lint.isort] force-sort-within-sections = true From dfd656f2fb341088b5f2fdf1afa8698d124ad8ea Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 17:15:41 +0000 Subject: [PATCH 11/26] style(project): remove `from __future__ import annotations` --- .github/scripts/generate_config_doc.py | 2 - app/achievements/daily_challenge.py | 2 - app/achievements/hush_hush.py | 2 - app/achievements/mods.py | 2 - app/achievements/osu_combo.py | 2 - app/achievements/osu_playcount.py | 2 - app/achievements/skill.py | 2 - app/achievements/total_hits.py | 2 - app/auth.py | 2 - app/calculator.py | 2 - app/config.py | 1 - app/const.py | 2 - app/database/beatmap_tags.py | 2 - app/database/beatmapset_ratings.py | 2 - app/database/password_reset.py | 2 - app/dependencies/api_version.py | 2 - app/dependencies/beatmap_download.py | 2 - app/dependencies/beatmapset_cache.py | 2 - app/dependencies/database.py | 2 - app/dependencies/fetcher.py | 2 - app/dependencies/geoip.py | 2 - app/dependencies/param.py | 2 - app/dependencies/rate_limit.py | 2 - app/dependencies/scheduler.py | 2 - app/dependencies/storage.py | 2 - app/dependencies/user.py | 2 - app/dependencies/user_agent.py | 2 - app/exceptions/userpage.py | 2 - app/fetcher/__init__.py | 2 - app/fetcher/_base.py | 2 - app/fetcher/beatmap.py | 2 - app/fetcher/beatmap_raw.py | 2 - app/fetcher/beatmapset.py | 2 - app/helpers/asset_proxy_helper.py | 2 - app/helpers/geoip_helper.py | 2 - app/helpers/rate_limiter.py | 2 - app/log.py | 2 - app/middleware/__init__.py | 2 - app/middleware/setup.py | 2 - app/middleware/verify_session.py | 182 +++++++++--------- app/models/achievement.py | 2 - app/models/beatmap.py | 2 - app/models/chat.py | 2 - app/models/extended_auth.py | 2 - app/models/model.py | 2 - app/models/mods.py | 2 - app/models/notification.py | 3 +- app/models/oauth.py | 2 +- app/models/playlist.py | 2 - app/models/room.py | 2 - app/models/score.py | 2 - app/models/stats.py | 2 - app/models/tags.py | 2 - app/models/totp.py | 2 - app/models/user.py | 2 - app/models/userpage.py | 2 - app/models/v1_user.py | 2 - app/path.py | 2 - app/service/__init__.py | 2 - app/service/audio_proxy_service.py | 2 - app/service/bbcode_service.py | 2 - app/service/beatmap_cache_service.py | 2 - app/service/beatmap_download_service.py | 2 - app/service/beatmapset_cache_service.py | 2 - app/service/beatmapset_update_service.py | 2 - app/service/database_cleanup_service.py | 2 - app/service/email_queue.py | 2 - app/service/email_service.py | 2 - app/service/login_log_service.py | 2 - app/service/password_reset_service.py | 2 - app/service/ranking_cache_service.py | 2 - app/service/redis_message_system.py | 2 - app/service/room.py | 2 - app/service/subscribers/base.py | 2 - app/service/subscribers/chat.py | 2 - app/service/user_cache_service.py | 2 - app/service/verification_service.py | 2 - app/tasks/__init__.py | 2 +- app/tasks/beatmapset_update.py | 2 - app/tasks/cache.py | 2 - app/tasks/calculate_all_user_rank.py | 2 - app/tasks/create_banchobot.py | 2 - app/tasks/daily_challenge.py | 2 - app/tasks/database_cleanup.py | 2 - app/tasks/geoip.py | 2 - app/tasks/load_achievements.py | 2 - app/tasks/osu_rx_statistics.py | 2 - app/tasks/recalculate_banned_beatmap.py | 2 - app/tasks/recalculate_failed_score.py | 2 - app/utils.py | 2 - main.py | 2 - migrations/env.py | 2 - ...0_19cdc9ce4dcb_gamemode_add_osurx_osupp.py | 2 - ...10_319e5f841dcf_score_support_pin_score.py | 2 - .../versions/2025-08-10_fdb3822a30ba_init.py | 2 - ...a8669ba11e96_auth_support_custom_client.py | 2 - ...13f905_count_add_replays_watched_counts.py | 2 - ...2025-08-12_198227d190b8_user_add_events.py | 2 - ...3a_auth_add_name_description_for_oauth_.py | 2 - ...25-08-12_b6a304d96a2d_user_support_rank.py | 2 - ...-08-13_59c9a0827de0_beatmap_add_indexes.py | 2 - ...1ac7ca01d5_score_add_maximum_statistics.py | 2 - ...d764a5_statistics_remove_level_progress.py | 2 - ...laylist_best_scores_remove_foreign_key_.py | 2 - ...f0a5674_beatmap_make_max_combo_nullable.py | 2 - ...onvert_event_event_payload_from_str_to_.py | 2 - ...-14_7e9d5e012d37_auth_add_v1_keys_table.py | 2 - ...1a2188e691_score_add_rx_for_taiko_catch.py | 2 - ..._9f6b27e8ea51_add_table_banned_beatmaps.py | 2 - .../2025-08-15_dd33d89aa2c2_chat_add_chat.py | 2 - ...-08-16_df9f725a077c_room_add_channel_id.py | 2 - ...d04d3f4dc_fix_user_login_log_table_name.py | 2 - ...08-18_2fcfc28846c1_beatmap_add_failtime.py | 2 - ...8_3eef4794ded1_add_user_login_log_table.py | 2 - ...6c43d8601_notification_add_notification.py | 2 - ...49e18ca_achievement_remove_primary_key_.py | 2 - ...6348cdfd2_add_email_verification_tables.py | 2 - ...b22_daily_challenge_add_last_day_streak.py | 2 - ..._increase_the_length_limit_of_the_user_.py | 2 - ...5e7dc8d5905_team_add_team_request_table.py | 2 - ...2_d103d442dc24_add_password_reset_table.py | 2 - ...3_feat_db_add_password_column_to_rooms_.py | 2 - ...-08-24_34a563187e47_score_add_processed.py | 2 - .../2025-08-24_3f890a76f036_add_id_data.py | 2 - ...user_change_collation_for_username_and_.py | 2 - ...-08-28_24a32515292d_add_beatmap_ratings.py | 2 - ...2025-08-29_ebaa317ad928_add_beatmap_tag.py | 2 - ...5-09-20_15e3a9a05b67_auth_add_totp_keys.py | 2 - ...ogin_sessions_remove_session_token_add_.py | 2 - ...eat_db_add_session_verification_fields_.py | 2 - ...7561edf_score_save_ranked_into_database.py | 2 - ...885978490dc_sync_add_beatmap_sync_table.py | 2 - ...-10-01_b1ac2154bd0d_sync_add_updated_at.py | 2 - ...9b8f3f863_session_support_multi_session.py | 2 - ...250c5_auth_add_refresh_token_expires_at.py | 2 - ...f_session_add_device_id_to_loginsession.py | 2 - pyproject.toml | 4 +- 137 files changed, 94 insertions(+), 362 deletions(-) diff --git a/.github/scripts/generate_config_doc.py b/.github/scripts/generate_config_doc.py index 0ec74c4..5c53867 100644 --- a/.github/scripts/generate_config_doc.py +++ b/.github/scripts/generate_config_doc.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import datetime from enum import Enum import importlib.util diff --git a/app/achievements/daily_challenge.py b/app/achievements/daily_challenge.py index fcca594..846c1e2 100644 --- a/app/achievements/daily_challenge.py +++ b/app/achievements/daily_challenge.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from app.database.daily_challenge import DailyChallengeStats diff --git a/app/achievements/hush_hush.py b/app/achievements/hush_hush.py index bc9c53f..178b033 100644 --- a/app/achievements/hush_hush.py +++ b/app/achievements/hush_hush.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from app.database.beatmap import calculate_beatmap_attributes diff --git a/app/achievements/mods.py b/app/achievements/mods.py index 0f3c728..b6a565c 100644 --- a/app/achievements/mods.py +++ b/app/achievements/mods.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from app.database.score import Beatmap, Score diff --git a/app/achievements/osu_combo.py b/app/achievements/osu_combo.py index 7039bae..a99b48f 100644 --- a/app/achievements/osu_combo.py +++ b/app/achievements/osu_combo.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from app.database.score import Beatmap, Score diff --git a/app/achievements/osu_playcount.py b/app/achievements/osu_playcount.py index b5e1e9b..8d5a250 100644 --- a/app/achievements/osu_playcount.py +++ b/app/achievements/osu_playcount.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from app.database import UserStatistics diff --git a/app/achievements/skill.py b/app/achievements/skill.py index 43993d8..b169541 100644 --- a/app/achievements/skill.py +++ b/app/achievements/skill.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from typing import Literal, cast diff --git a/app/achievements/total_hits.py b/app/achievements/total_hits.py index 93fb2c5..a2214bc 100644 --- a/app/achievements/total_hits.py +++ b/app/achievements/total_hits.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from functools import partial from app.database.score import Beatmap, Score diff --git a/app/auth.py b/app/auth.py index 8bef0a8..7ebc3a2 100644 --- a/app/auth.py +++ b/app/auth.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta import hashlib import re diff --git a/app/calculator.py b/app/calculator.py index 7c9eb7c..ffcabec 100644 --- a/app/calculator.py +++ b/app/calculator.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from copy import deepcopy from enum import Enum diff --git a/app/config.py b/app/config.py index b09c233..53949aa 100644 --- a/app/config.py +++ b/app/config.py @@ -1,4 +1,3 @@ -# ruff: noqa: I002 from enum import Enum from typing import Annotated, Any diff --git a/app/const.py b/app/const.py index 439bc83..fd5f421 100644 --- a/app/const.py +++ b/app/const.py @@ -1,5 +1,3 @@ -from __future__ import annotations - BANCHOBOT_ID = 2 BACKUP_CODE_LENGTH = 10 diff --git a/app/database/beatmap_tags.py b/app/database/beatmap_tags.py index 69d5a31..f2b97de 100644 --- a/app/database/beatmap_tags.py +++ b/app/database/beatmap_tags.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from sqlmodel import Field, SQLModel diff --git a/app/database/beatmapset_ratings.py b/app/database/beatmapset_ratings.py index 48ab0c1..8b63d88 100644 --- a/app/database/beatmapset_ratings.py +++ b/app/database/beatmapset_ratings.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.database.beatmapset import Beatmapset from app.database.user import User diff --git a/app/database/password_reset.py b/app/database/password_reset.py index 2e92f0a..2aa40eb 100644 --- a/app/database/password_reset.py +++ b/app/database/password_reset.py @@ -2,8 +2,6 @@ 密码重置相关数据库模型 """ -from __future__ import annotations - from datetime import datetime from app.utils import utcnow diff --git a/app/dependencies/api_version.py b/app/dependencies/api_version.py index af8489a..75102fb 100644 --- a/app/dependencies/api_version.py +++ b/app/dependencies/api_version.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from fastapi import Depends, Header diff --git a/app/dependencies/beatmap_download.py b/app/dependencies/beatmap_download.py index 818dc7e..b395181 100644 --- a/app/dependencies/beatmap_download.py +++ b/app/dependencies/beatmap_download.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.service.beatmap_download_service import BeatmapDownloadService, download_service diff --git a/app/dependencies/beatmapset_cache.py b/app/dependencies/beatmapset_cache.py index df177e2..3c6af72 100644 --- a/app/dependencies/beatmapset_cache.py +++ b/app/dependencies/beatmapset_cache.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.dependencies.database import Redis diff --git a/app/dependencies/database.py b/app/dependencies/database.py index 1e0a29a..49c377c 100644 --- a/app/dependencies/database.py +++ b/app/dependencies/database.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager from contextvars import ContextVar diff --git a/app/dependencies/fetcher.py b/app/dependencies/fetcher.py index be75cc3..9aecbb4 100644 --- a/app/dependencies/fetcher.py +++ b/app/dependencies/fetcher.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.config import settings diff --git a/app/dependencies/geoip.py b/app/dependencies/geoip.py index 089b90c..c3fd887 100644 --- a/app/dependencies/geoip.py +++ b/app/dependencies/geoip.py @@ -2,8 +2,6 @@ GeoIP dependency for FastAPI """ -from __future__ import annotations - from functools import lru_cache import ipaddress from typing import Annotated diff --git a/app/dependencies/param.py b/app/dependencies/param.py index 9e640bd..4e44f10 100644 --- a/app/dependencies/param.py +++ b/app/dependencies/param.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Any from fastapi import Request diff --git a/app/dependencies/rate_limit.py b/app/dependencies/rate_limit.py index 45e374e..4733c5a 100644 --- a/app/dependencies/rate_limit.py +++ b/app/dependencies/rate_limit.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.config import settings from fastapi import Depends diff --git a/app/dependencies/scheduler.py b/app/dependencies/scheduler.py index 2bcee52..7f6d0a2 100644 --- a/app/dependencies/scheduler.py +++ b/app/dependencies/scheduler.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import UTC from typing import cast diff --git a/app/dependencies/storage.py b/app/dependencies/storage.py index 413e5b0..bcbe3e4 100644 --- a/app/dependencies/storage.py +++ b/app/dependencies/storage.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated, cast from app.config import ( diff --git a/app/dependencies/user.py b/app/dependencies/user.py index 7061bcb..fe8aa01 100644 --- a/app/dependencies/user.py +++ b/app/dependencies/user.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.auth import get_token_by_access_token diff --git a/app/dependencies/user_agent.py b/app/dependencies/user_agent.py index 6f776b1..763807b 100644 --- a/app/dependencies/user_agent.py +++ b/app/dependencies/user_agent.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated from app.models.model import UserAgentInfo as UserAgentInfoModel diff --git a/app/exceptions/userpage.py b/app/exceptions/userpage.py index f7ee16d..a448673 100644 --- a/app/exceptions/userpage.py +++ b/app/exceptions/userpage.py @@ -2,8 +2,6 @@ 用户页面相关的异常类 """ -from __future__ import annotations - class UserpageError(Exception): """用户页面处理错误基类""" diff --git a/app/fetcher/__init__.py b/app/fetcher/__init__.py index a2390f8..a059473 100644 --- a/app/fetcher/__init__.py +++ b/app/fetcher/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from .beatmap import BeatmapFetcher from .beatmap_raw import BeatmapRawFetcher from .beatmapset import BeatmapsetFetcher diff --git a/app/fetcher/_base.py b/app/fetcher/_base.py index cf41be0..4f9f7e5 100644 --- a/app/fetcher/_base.py +++ b/app/fetcher/_base.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio import time from urllib.parse import quote diff --git a/app/fetcher/beatmap.py b/app/fetcher/beatmap.py index 3909572..272f41f 100644 --- a/app/fetcher/beatmap.py +++ b/app/fetcher/beatmap.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.database.beatmap import BeatmapResp from app.log import fetcher_logger diff --git a/app/fetcher/beatmap_raw.py b/app/fetcher/beatmap_raw.py index bdf1f90..382f778 100644 --- a/app/fetcher/beatmap_raw.py +++ b/app/fetcher/beatmap_raw.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.log import fetcher_logger from ._base import BaseFetcher diff --git a/app/fetcher/beatmapset.py b/app/fetcher/beatmapset.py index a67c6f6..b228930 100644 --- a/app/fetcher/beatmapset.py +++ b/app/fetcher/beatmapset.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio import base64 import hashlib diff --git a/app/helpers/asset_proxy_helper.py b/app/helpers/asset_proxy_helper.py index 87c68ab..13673ad 100644 --- a/app/helpers/asset_proxy_helper.py +++ b/app/helpers/asset_proxy_helper.py @@ -1,7 +1,5 @@ """资源代理辅助方法与路由装饰器。""" -from __future__ import annotations - from collections.abc import Awaitable, Callable from functools import wraps import re diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py index 12c7c3c..741e4af 100644 --- a/app/helpers/geoip_helper.py +++ b/app/helpers/geoip_helper.py @@ -2,8 +2,6 @@ GeoLite2 Helper Class (asynchronous) """ -from __future__ import annotations - import asyncio from contextlib import suppress import os diff --git a/app/helpers/rate_limiter.py b/app/helpers/rate_limiter.py index 0002e5c..c80088a 100644 --- a/app/helpers/rate_limiter.py +++ b/app/helpers/rate_limiter.py @@ -6,8 +6,6 @@ Rate limiter for osu! API requests to avoid abuse detection. - 建议:每分钟不超过 60 次请求以避免滥用检测 """ -from __future__ import annotations - import asyncio from collections import deque import time diff --git a/app/log.py b/app/log.py index d2c5060..981c154 100644 --- a/app/log.py +++ b/app/log.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import http import inspect import logging diff --git a/app/middleware/__init__.py b/app/middleware/__init__.py index d24fa14..98b7ca2 100644 --- a/app/middleware/__init__.py +++ b/app/middleware/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from .verify_session import SessionState, VerifySessionMiddleware __all__ = ["SessionState", "VerifySessionMiddleware"] diff --git a/app/middleware/setup.py b/app/middleware/setup.py index 978ccf3..f822ece 100644 --- a/app/middleware/setup.py +++ b/app/middleware/setup.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.config import settings from app.middleware.verify_session import VerifySessionMiddleware diff --git a/app/middleware/verify_session.py b/app/middleware/verify_session.py index dee6332..626a5e5 100644 --- a/app/middleware/verify_session.py +++ b/app/middleware/verify_session.py @@ -4,8 +4,6 @@ FastAPI会话验证中间件 基于osu-web的会话验证系统,适配FastAPI框架 """ -from __future__ import annotations - from collections.abc import Callable from typing import ClassVar @@ -28,6 +26,96 @@ from starlette.middleware.base import BaseHTTPMiddleware logger = log("Middleware") +class SessionState: + """会话状态类 + + 简化版本的会话状态管理 + """ + + def __init__(self, session: LoginSession, user: User, redis: Redis, db: AsyncSession, api_version: int = 0) -> None: + self.session = session + self.user = user + self.redis = redis + self.db = db + self.api_version = api_version + self._verification_method: str | None = None + + def is_verified(self) -> bool: + """检查会话是否已验证""" + return self.session.is_verified + + async def get_method(self) -> str: + """获取验证方法""" + if self._verification_method is None: + # 从Redis获取已设置的方法 + token_id = self.session.token_id + if token_id is not None: + self._verification_method = await LoginSessionService.get_login_method( + self.user.id, token_id, self.redis + ) + + if self._verification_method is None: + if self.api_version < SUPPORT_TOTP_VERIFICATION_VER: + self._verification_method = "mail" + return self._verification_method + + await self.user.awaitable_attrs.totp_key + totp_key = self.user.totp_key + self._verification_method = "totp" if totp_key else "mail" + + token_id = self.session.token_id + if token_id is not None: + await LoginSessionService.set_login_method( + self.user.id, token_id, self._verification_method, self.redis + ) + + return self._verification_method + + async def mark_verified(self) -> None: + """标记会话为已验证""" + try: + token_id = self.session.token_id + if token_id is not None: + await LoginSessionService.mark_session_verified( + self.db, + self.redis, + self.user.id, + token_id, + self.session.ip_address, + extract_user_agent(self.session.user_agent), + self.session.web_uuid, + ) + except Exception as e: + logger.error(f"Error marking verified: {e}") + + async def issue_mail_if_needed(self) -> None: + """如果需要,发送验证邮件""" + try: + if await self.get_method() == "mail": + from app.service.verification_service import EmailVerificationService + + # 这里可以触发邮件发送 + await EmailVerificationService.send_verification_email( + self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None + ) + except Exception as e: + logger.error(f"Error issuing mail: {e}") + + def get_key(self) -> str: + """获取会话密钥""" + return str(self.session.id) if self.session.id else "" + + @property + def key_for_event(self) -> str: + """获取用于事件广播的会话密钥""" + return LoginSessionService.get_key_for_event(self.get_key()) + + @property + def user_id(self) -> int: + """获取用户ID""" + return self.user.id + + class VerifySessionMiddleware(BaseHTTPMiddleware): """会话验证中间件 @@ -192,93 +280,3 @@ class VerifySessionMiddleware(BaseHTTPMiddleware): return JSONResponse( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"error": "Verification initiation failed"} ) - - -class SessionState: - """会话状态类 - - 简化版本的会话状态管理 - """ - - def __init__(self, session: LoginSession, user: User, redis: Redis, db: AsyncSession, api_version: int = 0) -> None: - self.session = session - self.user = user - self.redis = redis - self.db = db - self.api_version = api_version - self._verification_method: str | None = None - - def is_verified(self) -> bool: - """检查会话是否已验证""" - return self.session.is_verified - - async def get_method(self) -> str: - """获取验证方法""" - if self._verification_method is None: - # 从Redis获取已设置的方法 - token_id = self.session.token_id - if token_id is not None: - self._verification_method = await LoginSessionService.get_login_method( - self.user.id, token_id, self.redis - ) - - if self._verification_method is None: - if self.api_version < SUPPORT_TOTP_VERIFICATION_VER: - self._verification_method = "mail" - return self._verification_method - - await self.user.awaitable_attrs.totp_key - totp_key = self.user.totp_key - self._verification_method = "totp" if totp_key else "mail" - - token_id = self.session.token_id - if token_id is not None: - await LoginSessionService.set_login_method( - self.user.id, token_id, self._verification_method, self.redis - ) - - return self._verification_method - - async def mark_verified(self) -> None: - """标记会话为已验证""" - try: - token_id = self.session.token_id - if token_id is not None: - await LoginSessionService.mark_session_verified( - self.db, - self.redis, - self.user.id, - token_id, - self.session.ip_address, - extract_user_agent(self.session.user_agent), - self.session.web_uuid, - ) - except Exception as e: - logger.error(f"Error marking verified: {e}") - - async def issue_mail_if_needed(self) -> None: - """如果需要,发送验证邮件""" - try: - if await self.get_method() == "mail": - from app.service.verification_service import EmailVerificationService - - # 这里可以触发邮件发送 - await EmailVerificationService.send_verification_email( - self.db, self.redis, self.user.id, self.user.username, self.user.email, None, None - ) - except Exception as e: - logger.error(f"Error issuing mail: {e}") - - def get_key(self) -> str: - """获取会话密钥""" - return str(self.session.id) if self.session.id else "" - - @property - def key_for_event(self) -> str: - """获取用于事件广播的会话密钥""" - return LoginSessionService.get_key_for_event(self.get_key()) - - @property - def user_id(self) -> int: - """获取用户ID""" - return self.user.id diff --git a/app/models/achievement.py b/app/models/achievement.py index 7b37155..98ee6d6 100644 --- a/app/models/achievement.py +++ b/app/models/achievement.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, NamedTuple diff --git a/app/models/beatmap.py b/app/models/beatmap.py index c4e71b1..068a97f 100644 --- a/app/models/beatmap.py +++ b/app/models/beatmap.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from enum import IntEnum from typing import Annotated, Any, Literal diff --git a/app/models/chat.py b/app/models/chat.py index 116342f..0e2b9da 100644 --- a/app/models/chat.py +++ b/app/models/chat.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Any from pydantic import BaseModel diff --git a/app/models/extended_auth.py b/app/models/extended_auth.py index 35a3752..c1b9d56 100644 --- a/app/models/extended_auth.py +++ b/app/models/extended_auth.py @@ -2,8 +2,6 @@ 扩展的 OAuth 响应模型,支持二次验证 """ -from __future__ import annotations - from pydantic import BaseModel diff --git a/app/models/model.py b/app/models/model.py index 3224c99..4c28048 100644 --- a/app/models/model.py +++ b/app/models/model.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from dataclasses import dataclass from datetime import UTC, datetime diff --git a/app/models/mods.py b/app/models/mods.py index a928cab..7bc5848 100644 --- a/app/models/mods.py +++ b/app/models/mods.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import hashlib import json from typing import Any, Literal, NotRequired, TypedDict diff --git a/app/models/notification.py b/app/models/notification.py index ceef3b0..96c1929 100644 --- a/app/models/notification.py +++ b/app/models/notification.py @@ -1,5 +1,4 @@ # ruff: noqa: ARG002 -from __future__ import annotations from abc import abstractmethod from enum import Enum @@ -161,7 +160,7 @@ class ChannelMessageTeam(ChannelMessageBase): cls, message: "ChatMessage", user: "User", - ) -> ChannelMessageTeam: + ) -> Self: from app.database import ChannelType return super().init(message, user, [], ChannelType.TEAM) diff --git a/app/models/oauth.py b/app/models/oauth.py index ce4cabf..a2bffaf 100644 --- a/app/models/oauth.py +++ b/app/models/oauth.py @@ -1,4 +1,4 @@ -# OAuth 相关模型 # noqa: I002 +# OAuth 相关模型 from typing import Annotated, Any, cast from typing_extensions import Doc diff --git a/app/models/playlist.py b/app/models/playlist.py index d938f4b..d6432a9 100644 --- a/app/models/playlist.py +++ b/app/models/playlist.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from app.models.mods import APIMod diff --git a/app/models/room.py b/app/models/room.py index 3cba32f..9b332a2 100644 --- a/app/models/room.py +++ b/app/models/room.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from enum import Enum from pydantic import BaseModel diff --git a/app/models/score.py b/app/models/score.py index 410e3a1..ed8811e 100644 --- a/app/models/score.py +++ b/app/models/score.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from enum import Enum from typing import TYPE_CHECKING, Literal, TypedDict, cast diff --git a/app/models/stats.py b/app/models/stats.py index ee79ba2..bad20e0 100644 --- a/app/models/stats.py +++ b/app/models/stats.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from pydantic import BaseModel diff --git a/app/models/tags.py b/app/models/tags.py index abe728f..ad99687 100644 --- a/app/models/tags.py +++ b/app/models/tags.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import json from app.log import log diff --git a/app/models/totp.py b/app/models/totp.py index d07ec29..318b820 100644 --- a/app/models/totp.py +++ b/app/models/totp.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from enum import Enum from typing import TypedDict diff --git a/app/models/user.py b/app/models/user.py index a564238..a12f7b6 100644 --- a/app/models/user.py +++ b/app/models/user.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime from enum import Enum from typing import NotRequired, TypedDict diff --git a/app/models/userpage.py b/app/models/userpage.py index d7815a1..aeaca94 100644 --- a/app/models/userpage.py +++ b/app/models/userpage.py @@ -2,8 +2,6 @@ 用户页面编辑相关的API模型 """ -from __future__ import annotations - from pydantic import BaseModel, Field, field_validator diff --git a/app/models/v1_user.py b/app/models/v1_user.py index ea7d177..193e8b1 100644 --- a/app/models/v1_user.py +++ b/app/models/v1_user.py @@ -1,7 +1,5 @@ """V1 API 用户相关模型""" -from __future__ import annotations - from pydantic import BaseModel, Field diff --git a/app/path.py b/app/path.py index 63793b2..d21d382 100644 --- a/app/path.py +++ b/app/path.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from pathlib import Path STATIC_DIR = Path(__file__).parent.parent / "static" diff --git a/app/service/__init__.py b/app/service/__init__.py index 8ddddd9..66da036 100644 --- a/app/service/__init__.py +++ b/app/service/__init__.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from .room import create_playlist_room, create_playlist_room_from_api __all__ = [ diff --git a/app/service/audio_proxy_service.py b/app/service/audio_proxy_service.py index 5ff77d9..7018287 100644 --- a/app/service/audio_proxy_service.py +++ b/app/service/audio_proxy_service.py @@ -3,8 +3,6 @@ 提供从osu!官方获取beatmapset音频预览并缓存的功能 """ -from __future__ import annotations - from app.log import logger from fastapi import HTTPException diff --git a/app/service/bbcode_service.py b/app/service/bbcode_service.py index 4e6a9dc..bbacd1e 100644 --- a/app/service/bbcode_service.py +++ b/app/service/bbcode_service.py @@ -4,8 +4,6 @@ BBCode处理服务 支持所有 osu! 官方 BBCode 标签 """ -from __future__ import annotations - import html import re from typing import ClassVar diff --git a/app/service/beatmap_cache_service.py b/app/service/beatmap_cache_service.py index 1a76195..04f3ac2 100644 --- a/app/service/beatmap_cache_service.py +++ b/app/service/beatmap_cache_service.py @@ -3,8 +3,6 @@ Beatmap缓存预取服务 用于提前缓存热门beatmap,减少成绩计算时的获取延迟 """ -from __future__ import annotations - import asyncio from datetime import timedelta from typing import TYPE_CHECKING diff --git a/app/service/beatmap_download_service.py b/app/service/beatmap_download_service.py index 4d82244..5dca318 100644 --- a/app/service/beatmap_download_service.py +++ b/app/service/beatmap_download_service.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from dataclasses import dataclass from datetime import datetime diff --git a/app/service/beatmapset_cache_service.py b/app/service/beatmapset_cache_service.py index f255c8c..75fe3fe 100644 --- a/app/service/beatmapset_cache_service.py +++ b/app/service/beatmapset_cache_service.py @@ -3,8 +3,6 @@ Beatmapset缓存服务 用于缓存beatmapset数据,减少数据库查询频率 """ -from __future__ import annotations - from datetime import datetime import hashlib import json diff --git a/app/service/beatmapset_update_service.py b/app/service/beatmapset_update_service.py index 0cc8704..555919f 100644 --- a/app/service/beatmapset_update_service.py +++ b/app/service/beatmapset_update_service.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta from enum import Enum import math diff --git a/app/service/database_cleanup_service.py b/app/service/database_cleanup_service.py index 6d80819..01aa1e8 100644 --- a/app/service/database_cleanup_service.py +++ b/app/service/database_cleanup_service.py @@ -2,8 +2,6 @@ 数据库清理服务 - 清理过期的验证码和会话 """ -from __future__ import annotations - from datetime import timedelta from app.database.auth import OAuthToken diff --git a/app/service/email_queue.py b/app/service/email_queue.py index be946fa..93f002b 100644 --- a/app/service/email_queue.py +++ b/app/service/email_queue.py @@ -3,8 +3,6 @@ 用于异步发送邮件 """ -from __future__ import annotations - import asyncio import concurrent.futures from datetime import datetime diff --git a/app/service/email_service.py b/app/service/email_service.py index 8e8314f..6215767 100644 --- a/app/service/email_service.py +++ b/app/service/email_service.py @@ -2,8 +2,6 @@ 邮件验证服务 """ -from __future__ import annotations - from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText import secrets diff --git a/app/service/login_log_service.py b/app/service/login_log_service.py index 0570493..f6b3d14 100644 --- a/app/service/login_log_service.py +++ b/app/service/login_log_service.py @@ -2,8 +2,6 @@ 用户登录记录服务 """ -from __future__ import annotations - import asyncio from app.database.user_login_log import UserLoginLog diff --git a/app/service/password_reset_service.py b/app/service/password_reset_service.py index 429840f..5a62bcd 100644 --- a/app/service/password_reset_service.py +++ b/app/service/password_reset_service.py @@ -2,8 +2,6 @@ 密码重置服务 """ -from __future__ import annotations - from datetime import datetime import json import secrets diff --git a/app/service/ranking_cache_service.py b/app/service/ranking_cache_service.py index 149fdab..11dd9da 100644 --- a/app/service/ranking_cache_service.py +++ b/app/service/ranking_cache_service.py @@ -3,8 +3,6 @@ 用于缓存用户排行榜数据,减轻数据库压力 """ -from __future__ import annotations - import asyncio from datetime import datetime import json diff --git a/app/service/redis_message_system.py b/app/service/redis_message_system.py index 2ca6d99..6bf4540 100644 --- a/app/service/redis_message_system.py +++ b/app/service/redis_message_system.py @@ -5,8 +5,6 @@ - 支持消息状态同步和故障恢复 """ -from __future__ import annotations - import asyncio from concurrent.futures import ThreadPoolExecutor from datetime import datetime diff --git a/app/service/room.py b/app/service/room.py index 03320ef..cec7029 100644 --- a/app/service/room.py +++ b/app/service/room.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta from app.database.beatmap import Beatmap diff --git a/app/service/subscribers/base.py b/app/service/subscribers/base.py index 89a9e9a..080fa87 100644 --- a/app/service/subscribers/base.py +++ b/app/service/subscribers/base.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from collections.abc import Awaitable, Callable from fnmatch import fnmatch diff --git a/app/service/subscribers/chat.py b/app/service/subscribers/chat.py index 9241a7f..449d65e 100644 --- a/app/service/subscribers/chat.py +++ b/app/service/subscribers/chat.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import TYPE_CHECKING from app.log import log diff --git a/app/service/user_cache_service.py b/app/service/user_cache_service.py index 4aef7e6..72ea979 100644 --- a/app/service/user_cache_service.py +++ b/app/service/user_cache_service.py @@ -3,8 +3,6 @@ 用于缓存用户信息,提供热缓存和实时刷新功能 """ -from __future__ import annotations - from datetime import datetime import json from typing import TYPE_CHECKING, Any diff --git a/app/service/verification_service.py b/app/service/verification_service.py index f7cc0ea..134f8e5 100644 --- a/app/service/verification_service.py +++ b/app/service/verification_service.py @@ -2,8 +2,6 @@ 邮件验证管理服务 """ -from __future__ import annotations - from datetime import timedelta import secrets import string diff --git a/app/tasks/__init__.py b/app/tasks/__init__.py index 6b3332a..0d520ae 100644 --- a/app/tasks/__init__.py +++ b/app/tasks/__init__.py @@ -1,5 +1,5 @@ # ruff: noqa: F401 -from __future__ import annotations + from . import ( beatmapset_update, diff --git a/app/tasks/beatmapset_update.py b/app/tasks/beatmapset_update.py index e04e860..ebda7bb 100644 --- a/app/tasks/beatmapset_update.py +++ b/app/tasks/beatmapset_update.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import datetime, timedelta from app.dependencies.scheduler import get_scheduler diff --git a/app/tasks/cache.py b/app/tasks/cache.py index 21934c9..955a686 100644 --- a/app/tasks/cache.py +++ b/app/tasks/cache.py @@ -1,7 +1,5 @@ """缓存相关的 APScheduler 任务入口。""" -from __future__ import annotations - import asyncio from datetime import UTC, timedelta from typing import Final diff --git a/app/tasks/calculate_all_user_rank.py b/app/tasks/calculate_all_user_rank.py index f742d25..31f2029 100644 --- a/app/tasks/calculate_all_user_rank.py +++ b/app/tasks/calculate_all_user_rank.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import timedelta from app.database import RankHistory, UserStatistics diff --git a/app/tasks/create_banchobot.py b/app/tasks/create_banchobot.py index 6148ff1..8adfc29 100644 --- a/app/tasks/create_banchobot.py +++ b/app/tasks/create_banchobot.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.const import BANCHOBOT_ID from app.database.statistics import UserStatistics from app.database.user import User diff --git a/app/tasks/daily_challenge.py b/app/tasks/daily_challenge.py index 8dd5e1c..cab2da5 100644 --- a/app/tasks/daily_challenge.py +++ b/app/tasks/daily_challenge.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from datetime import UTC, timedelta import json from math import ceil diff --git a/app/tasks/database_cleanup.py b/app/tasks/database_cleanup.py index 264e5c1..9cb7a0d 100644 --- a/app/tasks/database_cleanup.py +++ b/app/tasks/database_cleanup.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.dependencies.database import with_db from app.dependencies.scheduler import get_scheduler from app.log import logger diff --git a/app/tasks/geoip.py b/app/tasks/geoip.py index 2868346..e422592 100644 --- a/app/tasks/geoip.py +++ b/app/tasks/geoip.py @@ -3,8 +3,6 @@ Scheduled Update Service Periodically update the MaxMind GeoIP database """ -from __future__ import annotations - from app.config import settings from app.dependencies.geoip import get_geoip_helper from app.dependencies.scheduler import get_scheduler diff --git a/app/tasks/load_achievements.py b/app/tasks/load_achievements.py index e491274..718d912 100644 --- a/app/tasks/load_achievements.py +++ b/app/tasks/load_achievements.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import importlib from app.log import logger diff --git a/app/tasks/osu_rx_statistics.py b/app/tasks/osu_rx_statistics.py index 9b2a796..e21a345 100644 --- a/app/tasks/osu_rx_statistics.py +++ b/app/tasks/osu_rx_statistics.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.config import settings from app.const import BANCHOBOT_ID from app.database.statistics import UserStatistics diff --git a/app/tasks/recalculate_banned_beatmap.py b/app/tasks/recalculate_banned_beatmap.py index 0ed4d78..b88e6df 100644 --- a/app/tasks/recalculate_banned_beatmap.py +++ b/app/tasks/recalculate_banned_beatmap.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio import json diff --git a/app/tasks/recalculate_failed_score.py b/app/tasks/recalculate_failed_score.py index 13b59a2..4da2966 100644 --- a/app/tasks/recalculate_failed_score.py +++ b/app/tasks/recalculate_failed_score.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from app.calculator import pre_fetch_and_calculate_pp from app.database.score import Score, calculate_user_pp from app.database.statistics import UserStatistics diff --git a/app/utils.py b/app/utils.py index 69b3dd7..fce1e87 100644 --- a/app/utils.py +++ b/app/utils.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from collections.abc import Awaitable, Callable, Sequence from datetime import UTC, datetime diff --git a/main.py b/main.py index 27089b1..f9f8af7 100644 --- a/main.py +++ b/main.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from contextlib import asynccontextmanager import json from pathlib import Path diff --git a/migrations/env.py b/migrations/env.py index 7fbcb1b..0b5f62f 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import annotations - import asyncio from logging.config import fileConfig diff --git a/migrations/versions/2025-08-10_19cdc9ce4dcb_gamemode_add_osurx_osupp.py b/migrations/versions/2025-08-10_19cdc9ce4dcb_gamemode_add_osurx_osupp.py index e06d44b..8b23910 100644 --- a/migrations/versions/2025-08-10_19cdc9ce4dcb_gamemode_add_osurx_osupp.py +++ b/migrations/versions/2025-08-10_19cdc9ce4dcb_gamemode_add_osurx_osupp.py @@ -6,8 +6,6 @@ Create Date: 2025-08-10 06:10:08.093591 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-10_319e5f841dcf_score_support_pin_score.py b/migrations/versions/2025-08-10_319e5f841dcf_score_support_pin_score.py index ceacdec..54ce106 100644 --- a/migrations/versions/2025-08-10_319e5f841dcf_score_support_pin_score.py +++ b/migrations/versions/2025-08-10_319e5f841dcf_score_support_pin_score.py @@ -6,8 +6,6 @@ Create Date: 2025-08-10 14:07:51.749025 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-10_fdb3822a30ba_init.py b/migrations/versions/2025-08-10_fdb3822a30ba_init.py index e15b293..8f026e5 100644 --- a/migrations/versions/2025-08-10_fdb3822a30ba_init.py +++ b/migrations/versions/2025-08-10_fdb3822a30ba_init.py @@ -6,8 +6,6 @@ Create Date: 2025-08-10 04:30:58.443568 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-11_a8669ba11e96_auth_support_custom_client.py b/migrations/versions/2025-08-11_a8669ba11e96_auth_support_custom_client.py index 91b0367..a1efc47 100644 --- a/migrations/versions/2025-08-11_a8669ba11e96_auth_support_custom_client.py +++ b/migrations/versions/2025-08-11_a8669ba11e96_auth_support_custom_client.py @@ -6,8 +6,6 @@ Create Date: 2025-08-11 11:47:11.004301 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-11_aa582c13f905_count_add_replays_watched_counts.py b/migrations/versions/2025-08-11_aa582c13f905_count_add_replays_watched_counts.py index 33c727c..d741b8c 100644 --- a/migrations/versions/2025-08-11_aa582c13f905_count_add_replays_watched_counts.py +++ b/migrations/versions/2025-08-11_aa582c13f905_count_add_replays_watched_counts.py @@ -6,8 +6,6 @@ Create Date: 2025-08-11 08:03:33.739398 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-12_198227d190b8_user_add_events.py b/migrations/versions/2025-08-12_198227d190b8_user_add_events.py index 6f3d619..fc470f8 100644 --- a/migrations/versions/2025-08-12_198227d190b8_user_add_events.py +++ b/migrations/versions/2025-08-12_198227d190b8_user_add_events.py @@ -6,8 +6,6 @@ Create Date: 2025-08-12 15:12:49.860825 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-12_749bb2c2c33a_auth_add_name_description_for_oauth_.py b/migrations/versions/2025-08-12_749bb2c2c33a_auth_add_name_description_for_oauth_.py index ecb6d54..4855e21 100644 --- a/migrations/versions/2025-08-12_749bb2c2c33a_auth_add_name_description_for_oauth_.py +++ b/migrations/versions/2025-08-12_749bb2c2c33a_auth_add_name_description_for_oauth_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-12 09:29:12.085060 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-12_b6a304d96a2d_user_support_rank.py b/migrations/versions/2025-08-12_b6a304d96a2d_user_support_rank.py index 347e238..20c20c8 100644 --- a/migrations/versions/2025-08-12_b6a304d96a2d_user_support_rank.py +++ b/migrations/versions/2025-08-12_b6a304d96a2d_user_support_rank.py @@ -6,8 +6,6 @@ Create Date: 2025-08-12 13:31:45.315844 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_59c9a0827de0_beatmap_add_indexes.py b/migrations/versions/2025-08-13_59c9a0827de0_beatmap_add_indexes.py index 9fab9c4..a98d7a6 100644 --- a/migrations/versions/2025-08-13_59c9a0827de0_beatmap_add_indexes.py +++ b/migrations/versions/2025-08-13_59c9a0827de0_beatmap_add_indexes.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 07:07:52.506510 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_881ac7ca01d5_score_add_maximum_statistics.py b/migrations/versions/2025-08-13_881ac7ca01d5_score_add_maximum_statistics.py index 125c6da..b71d240 100644 --- a/migrations/versions/2025-08-13_881ac7ca01d5_score_add_maximum_statistics.py +++ b/migrations/versions/2025-08-13_881ac7ca01d5_score_add_maximum_statistics.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 03:54:12.283468 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_8bab62d764a5_statistics_remove_level_progress.py b/migrations/versions/2025-08-13_8bab62d764a5_statistics_remove_level_progress.py index d799722..0fae42c 100644 --- a/migrations/versions/2025-08-13_8bab62d764a5_statistics_remove_level_progress.py +++ b/migrations/versions/2025-08-13_8bab62d764a5_statistics_remove_level_progress.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 10:34:03.430039 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_9aa4f7c06824_playlist_best_scores_remove_foreign_key_.py b/migrations/versions/2025-08-13_9aa4f7c06824_playlist_best_scores_remove_foreign_key_.py index 37cb1e0..d8abaf0 100644 --- a/migrations/versions/2025-08-13_9aa4f7c06824_playlist_best_scores_remove_foreign_key_.py +++ b/migrations/versions/2025-08-13_9aa4f7c06824_playlist_best_scores_remove_foreign_key_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 15:17:53.921545 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_ce29ef0a5674_beatmap_make_max_combo_nullable.py b/migrations/versions/2025-08-13_ce29ef0a5674_beatmap_make_max_combo_nullable.py index 87b6613..823dbb5 100644 --- a/migrations/versions/2025-08-13_ce29ef0a5674_beatmap_make_max_combo_nullable.py +++ b/migrations/versions/2025-08-13_ce29ef0a5674_beatmap_make_max_combo_nullable.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 15:38:59.797780 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-13_f785165a5c0b_convert_event_event_payload_from_str_to_.py b/migrations/versions/2025-08-13_f785165a5c0b_convert_event_event_payload_from_str_to_.py index a70efbb..0520fae 100644 --- a/migrations/versions/2025-08-13_f785165a5c0b_convert_event_event_payload_from_str_to_.py +++ b/migrations/versions/2025-08-13_f785165a5c0b_convert_event_event_payload_from_str_to_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-13 06:02:11.911557 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-14_7e9d5e012d37_auth_add_v1_keys_table.py b/migrations/versions/2025-08-14_7e9d5e012d37_auth_add_v1_keys_table.py index e5bdc88..4e4b25d 100644 --- a/migrations/versions/2025-08-14_7e9d5e012d37_auth_add_v1_keys_table.py +++ b/migrations/versions/2025-08-14_7e9d5e012d37_auth_add_v1_keys_table.py @@ -6,8 +6,6 @@ Create Date: 2025-08-14 08:39:51.725121 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-15_951a2188e691_score_add_rx_for_taiko_catch.py b/migrations/versions/2025-08-15_951a2188e691_score_add_rx_for_taiko_catch.py index e165b0f..bf5d140 100644 --- a/migrations/versions/2025-08-15_951a2188e691_score_add_rx_for_taiko_catch.py +++ b/migrations/versions/2025-08-15_951a2188e691_score_add_rx_for_taiko_catch.py @@ -6,8 +6,6 @@ Create Date: 2025-08-15 04:38:07.595003 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-15_9f6b27e8ea51_add_table_banned_beatmaps.py b/migrations/versions/2025-08-15_9f6b27e8ea51_add_table_banned_beatmaps.py index 9568433..a7277dc 100644 --- a/migrations/versions/2025-08-15_9f6b27e8ea51_add_table_banned_beatmaps.py +++ b/migrations/versions/2025-08-15_9f6b27e8ea51_add_table_banned_beatmaps.py @@ -6,8 +6,6 @@ Create Date: 2025-08-15 07:23:25.645360 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-15_dd33d89aa2c2_chat_add_chat.py b/migrations/versions/2025-08-15_dd33d89aa2c2_chat_add_chat.py index 973c380..ee44fe1 100644 --- a/migrations/versions/2025-08-15_dd33d89aa2c2_chat_add_chat.py +++ b/migrations/versions/2025-08-15_dd33d89aa2c2_chat_add_chat.py @@ -6,8 +6,6 @@ Create Date: 2025-08-15 14:22:34.775877 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-16_df9f725a077c_room_add_channel_id.py b/migrations/versions/2025-08-16_df9f725a077c_room_add_channel_id.py index 1125cba..b3a4bf0 100644 --- a/migrations/versions/2025-08-16_df9f725a077c_room_add_channel_id.py +++ b/migrations/versions/2025-08-16_df9f725a077c_room_add_channel_id.py @@ -6,8 +6,6 @@ Create Date: 2025-08-16 08:05:28.748265 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-18_2dcd04d3f4dc_fix_user_login_log_table_name.py b/migrations/versions/2025-08-18_2dcd04d3f4dc_fix_user_login_log_table_name.py index 52cc20e..9802aae 100644 --- a/migrations/versions/2025-08-18_2dcd04d3f4dc_fix_user_login_log_table_name.py +++ b/migrations/versions/2025-08-18_2dcd04d3f4dc_fix_user_login_log_table_name.py @@ -6,8 +6,6 @@ Create Date: 2025-08-18 00:07:06.886879 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-18_2fcfc28846c1_beatmap_add_failtime.py b/migrations/versions/2025-08-18_2fcfc28846c1_beatmap_add_failtime.py index 112d93d..3936d34 100644 --- a/migrations/versions/2025-08-18_2fcfc28846c1_beatmap_add_failtime.py +++ b/migrations/versions/2025-08-18_2fcfc28846c1_beatmap_add_failtime.py @@ -6,8 +6,6 @@ Create Date: 2025-08-18 06:06:30.929740 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-18_3eef4794ded1_add_user_login_log_table.py b/migrations/versions/2025-08-18_3eef4794ded1_add_user_login_log_table.py index 2228548..bd901b8 100644 --- a/migrations/versions/2025-08-18_3eef4794ded1_add_user_login_log_table.py +++ b/migrations/versions/2025-08-18_3eef4794ded1_add_user_login_log_table.py @@ -6,8 +6,6 @@ Create Date: 2025-08-18 00:00:11.369944 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-21_4f46c43d8601_notification_add_notification.py b/migrations/versions/2025-08-21_4f46c43d8601_notification_add_notification.py index f718c47..07f9a78 100644 --- a/migrations/versions/2025-08-21_4f46c43d8601_notification_add_notification.py +++ b/migrations/versions/2025-08-21_4f46c43d8601_notification_add_notification.py @@ -6,8 +6,6 @@ Create Date: 2025-08-21 07:03:45.813547 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-21_e96a649e18ca_achievement_remove_primary_key_.py b/migrations/versions/2025-08-21_e96a649e18ca_achievement_remove_primary_key_.py index bd02c7e..cba69ba 100644 --- a/migrations/versions/2025-08-21_e96a649e18ca_achievement_remove_primary_key_.py +++ b/migrations/versions/2025-08-21_e96a649e18ca_achievement_remove_primary_key_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-21 08:03:00.670670 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-22_0f96348cdfd2_add_email_verification_tables.py b/migrations/versions/2025-08-22_0f96348cdfd2_add_email_verification_tables.py index c71f03a..daeb861 100644 --- a/migrations/versions/2025-08-22_0f96348cdfd2_add_email_verification_tables.py +++ b/migrations/versions/2025-08-22_0f96348cdfd2_add_email_verification_tables.py @@ -6,8 +6,6 @@ Create Date: 2025-08-22 07:26:59.129564 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-22_178873984b22_daily_challenge_add_last_day_streak.py b/migrations/versions/2025-08-22_178873984b22_daily_challenge_add_last_day_streak.py index f488ded..9ad8caf 100644 --- a/migrations/versions/2025-08-22_178873984b22_daily_challenge_add_last_day_streak.py +++ b/migrations/versions/2025-08-22_178873984b22_daily_challenge_add_last_day_streak.py @@ -6,8 +6,6 @@ Create Date: 2025-08-22 13:34:31.282236 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-22_5b76689f6e4b_increase_the_length_limit_of_the_user_.py b/migrations/versions/2025-08-22_5b76689f6e4b_increase_the_length_limit_of_the_user_.py index fdcec41..f520ff6 100644 --- a/migrations/versions/2025-08-22_5b76689f6e4b_increase_the_length_limit_of_the_user_.py +++ b/migrations/versions/2025-08-22_5b76689f6e4b_increase_the_length_limit_of_the_user_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-22 15:14:59.242274 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-22_65e7dc8d5905_team_add_team_request_table.py b/migrations/versions/2025-08-22_65e7dc8d5905_team_add_team_request_table.py index fda8536..0beee51 100644 --- a/migrations/versions/2025-08-22_65e7dc8d5905_team_add_team_request_table.py +++ b/migrations/versions/2025-08-22_65e7dc8d5905_team_add_team_request_table.py @@ -6,8 +6,6 @@ Create Date: 2025-08-22 03:47:57.870398 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py b/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py index aeba732..e2ffa5b 100644 --- a/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py +++ b/migrations/versions/2025-08-22_d103d442dc24_add_password_reset_table.py @@ -7,8 +7,6 @@ Create Date: 2025-08-22 08:27:58.468119 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-23_57bacf936413_feat_db_add_password_column_to_rooms_.py b/migrations/versions/2025-08-23_57bacf936413_feat_db_add_password_column_to_rooms_.py index d778ab8..d235d9b 100644 --- a/migrations/versions/2025-08-23_57bacf936413_feat_db_add_password_column_to_rooms_.py +++ b/migrations/versions/2025-08-23_57bacf936413_feat_db_add_password_column_to_rooms_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-23 18:45:03.009632 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-24_34a563187e47_score_add_processed.py b/migrations/versions/2025-08-24_34a563187e47_score_add_processed.py index cf7e961..ef3cf4d 100644 --- a/migrations/versions/2025-08-24_34a563187e47_score_add_processed.py +++ b/migrations/versions/2025-08-24_34a563187e47_score_add_processed.py @@ -6,8 +6,6 @@ Create Date: 2025-08-24 15:06:37.226068 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-24_3f890a76f036_add_id_data.py b/migrations/versions/2025-08-24_3f890a76f036_add_id_data.py index 614039d..6fe74ee 100644 --- a/migrations/versions/2025-08-24_3f890a76f036_add_id_data.py +++ b/migrations/versions/2025-08-24_3f890a76f036_add_id_data.py @@ -6,8 +6,6 @@ Create Date: 2025-08-24 04:00:02.063347 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-26_af88493881eb_user_change_collation_for_username_and_.py b/migrations/versions/2025-08-26_af88493881eb_user_change_collation_for_username_and_.py index 6bdaece..7d0c8bf 100644 --- a/migrations/versions/2025-08-26_af88493881eb_user_change_collation_for_username_and_.py +++ b/migrations/versions/2025-08-26_af88493881eb_user_change_collation_for_username_and_.py @@ -6,8 +6,6 @@ Create Date: 2025-08-26 11:31:07.183273 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-28_24a32515292d_add_beatmap_ratings.py b/migrations/versions/2025-08-28_24a32515292d_add_beatmap_ratings.py index e47e9a8..efa5753 100644 --- a/migrations/versions/2025-08-28_24a32515292d_add_beatmap_ratings.py +++ b/migrations/versions/2025-08-28_24a32515292d_add_beatmap_ratings.py @@ -6,8 +6,6 @@ Create Date: 2025-08-28 11:36:17.874090 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-08-29_ebaa317ad928_add_beatmap_tag.py b/migrations/versions/2025-08-29_ebaa317ad928_add_beatmap_tag.py index 4d0d5bb..489b981 100644 --- a/migrations/versions/2025-08-29_ebaa317ad928_add_beatmap_tag.py +++ b/migrations/versions/2025-08-29_ebaa317ad928_add_beatmap_tag.py @@ -7,8 +7,6 @@ Create Date: 2025-08-29 12:29:23.267557 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-09-20_15e3a9a05b67_auth_add_totp_keys.py b/migrations/versions/2025-09-20_15e3a9a05b67_auth_add_totp_keys.py index 1eb0eee..2860b53 100644 --- a/migrations/versions/2025-09-20_15e3a9a05b67_auth_add_totp_keys.py +++ b/migrations/versions/2025-09-20_15e3a9a05b67_auth_add_totp_keys.py @@ -6,8 +6,6 @@ Create Date: 2025-09-20 11:27:58.485299 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-09-21_fe8e9f3da298_login_sessions_remove_session_token_add_.py b/migrations/versions/2025-09-21_fe8e9f3da298_login_sessions_remove_session_token_add_.py index 4f4d0c9..16f6c1c 100644 --- a/migrations/versions/2025-09-21_fe8e9f3da298_login_sessions_remove_session_token_add_.py +++ b/migrations/versions/2025-09-21_fe8e9f3da298_login_sessions_remove_session_token_add_.py @@ -6,8 +6,6 @@ Create Date: 2025-09-21 02:30:58.233846 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-09-24_9419272e4c85_feat_db_add_session_verification_fields_.py b/migrations/versions/2025-09-24_9419272e4c85_feat_db_add_session_verification_fields_.py index c1f06f0..ca0a7e1 100644 --- a/migrations/versions/2025-09-24_9419272e4c85_feat_db_add_session_verification_fields_.py +++ b/migrations/versions/2025-09-24_9419272e4c85_feat_db_add_session_verification_fields_.py @@ -6,8 +6,6 @@ Create Date: 2025-09-24 00:46:57.367742 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-09-30_dc2087561edf_score_save_ranked_into_database.py b/migrations/versions/2025-09-30_dc2087561edf_score_save_ranked_into_database.py index cba3022..f2eec69 100644 --- a/migrations/versions/2025-09-30_dc2087561edf_score_save_ranked_into_database.py +++ b/migrations/versions/2025-09-30_dc2087561edf_score_save_ranked_into_database.py @@ -6,8 +6,6 @@ Create Date: 2025-09-30 10:44:25.286498 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-10-01_2885978490dc_sync_add_beatmap_sync_table.py b/migrations/versions/2025-10-01_2885978490dc_sync_add_beatmap_sync_table.py index 3e0ea13..9327e64 100644 --- a/migrations/versions/2025-10-01_2885978490dc_sync_add_beatmap_sync_table.py +++ b/migrations/versions/2025-10-01_2885978490dc_sync_add_beatmap_sync_table.py @@ -6,8 +6,6 @@ Create Date: 2025-10-01 12:19:50.485318 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-10-01_b1ac2154bd0d_sync_add_updated_at.py b/migrations/versions/2025-10-01_b1ac2154bd0d_sync_add_updated_at.py index 679a8bb..6bbdff9 100644 --- a/migrations/versions/2025-10-01_b1ac2154bd0d_sync_add_updated_at.py +++ b/migrations/versions/2025-10-01_b1ac2154bd0d_sync_add_updated_at.py @@ -6,8 +6,6 @@ Create Date: 2025-10-01 14:56:08.539694 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-10-02_72a9b8f3f863_session_support_multi_session.py b/migrations/versions/2025-10-02_72a9b8f3f863_session_support_multi_session.py index 6e47be4..b7e33e1 100644 --- a/migrations/versions/2025-10-02_72a9b8f3f863_session_support_multi_session.py +++ b/migrations/versions/2025-10-02_72a9b8f3f863_session_support_multi_session.py @@ -6,8 +6,6 @@ Create Date: 2025-10-02 07:17:19.297498 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py b/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py index 07f8bc9..0304981 100644 --- a/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py +++ b/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py @@ -6,8 +6,6 @@ Create Date: 2025-10-02 10:50:21.169065 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/migrations/versions/2025-10-02_9556cd2ec11f_session_add_device_id_to_loginsession.py b/migrations/versions/2025-10-02_9556cd2ec11f_session_add_device_id_to_loginsession.py index bb258a7..04ee5f3 100644 --- a/migrations/versions/2025-10-02_9556cd2ec11f_session_add_device_id_to_loginsession.py +++ b/migrations/versions/2025-10-02_9556cd2ec11f_session_add_device_id_to_loginsession.py @@ -6,8 +6,6 @@ Create Date: 2025-10-02 11:03:09.803140 """ -from __future__ import annotations - from collections.abc import Sequence from alembic import op diff --git a/pyproject.toml b/pyproject.toml index 4170f81..57a8b41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,12 +81,11 @@ ignore = [ ] [tool.ruff.lint.extend-per-file-ignores] -"app/database/**/*.py" = ["I002"] "tools/*.py" = ["PTH", "INP001"] "migrations/**/*.py" = ["INP001"] ".github/**/*.py" = ["INP001"] "app/achievements/*.py" = ["INP001", "ARG"] -"app/router/**/*.py" = ["ARG001", "I002"] +"app/router/**/*.py" = ["ARG001"] [tool.ruff.lint.isort] force-sort-within-sections = true @@ -94,7 +93,6 @@ force-wrap-aliases = true combine-as-imports = true order-by-type = true relative-imports-order = "closest-to-furthest" -required-imports = ["from __future__ import annotations"] extra-standard-library = ["typing_extensions"] section-order = ["future", "standard-library", "first-party", "local-folder", "third-party"] From 2bfde24b8406a6cdc746c51d4d2af3cece46b4f5 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Fri, 3 Oct 2025 17:27:47 +0000 Subject: [PATCH 12/26] fix(log): fix typing & exception logs --- app/log.py | 19 ++++++++++--------- app/router/redirect.py | 1 - main.py | 4 ++-- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/app/log.py b/app/log.py index 981c154..5fad3e9 100644 --- a/app/log.py +++ b/app/log.py @@ -138,27 +138,27 @@ def get_caller_class_name(module_prefix: str = "", just_last_part: bool = True) return None -def service_logger(name: str) -> Logger: +def service_logger(name: str) -> "Logger": return logger.bind(service=name) -def fetcher_logger(name: str) -> Logger: +def fetcher_logger(name: str) -> "Logger": return logger.bind(fetcher=name) -def task_logger(name: str) -> Logger: +def task_logger(name: str) -> "Logger": return logger.bind(task=name) -def system_logger(name: str) -> Logger: +def system_logger(name: str) -> "Logger": return logger.bind(system=name) -def uvicorn_logger() -> Logger: +def uvicorn_logger() -> "Logger": return logger.bind(uvicorn="Uvicorn") -def log(name: str) -> Logger: +def log(name: str) -> "Logger": return logger.bind(real_name=name) @@ -196,9 +196,10 @@ def dynamic_format(record): real_name = record["extra"].get("real_name", "") or record["name"] name = f"{real_name}" - return ( - f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}{{exception}}\n" - ) + format = f"{{time:YYYY-MM-DD HH:mm:ss}} [{{level}}] | {name} | {{message}}\n" + if record["exception"]: + format += "{exception}\n" + return format logger.remove() diff --git a/app/router/redirect.py b/app/router/redirect.py index f90a5d4..a236407 100644 --- a/app/router/redirect.py +++ b/app/router/redirect.py @@ -25,7 +25,6 @@ async def redirect(request: Request): redirect_url = urllib.parse.urljoin(str(settings.frontend_url), target_path) if query_string: redirect_url = f"{redirect_url}?{query_string}" - return RedirectResponse( redirect_url, status_code=301, diff --git a/main.py b/main.py index f9f8af7..6646afb 100644 --- a/main.py +++ b/main.py @@ -7,7 +7,7 @@ from app.database import User from app.dependencies.database import Database, engine, get_redis, redis_client from app.dependencies.fetcher import get_fetcher from app.dependencies.scheduler import start_scheduler, stop_scheduler -from app.log import logger, system_logger +from app.log import system_logger from app.middleware.verify_session import VerifySessionMiddleware from app.models.mods import init_mods, init_ranked_mods from app.router import ( @@ -71,7 +71,7 @@ async def lifespan(app: FastAPI): # noqa: ARG001 # 显示资源代理状态 if settings.enable_asset_proxy: - logger.info(f"Asset Proxy enabled - Domain: {settings.custom_asset_domain}") + system_logger("AssetProxy").info(f"Asset Proxy enabled - Domain: {settings.custom_asset_domain}") # on shutdown yield From 7c18fc5fb61edbfc9e37a4ff4173ce66e9edc4de Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 04:57:24 +0000 Subject: [PATCH 13/26] refactor(userpage): move APIs into g0v0 private API --- app/exceptions/__init__.py | 0 app/exceptions/userpage.py | 47 ------------ app/models/userpage.py | 44 +++++++++++ app/router/private/__init__.py | 2 +- app/router/private/user.py | 130 +++++++++++++++++++++++++++++++++ app/router/private/username.py | 58 --------------- app/router/v2/me.py | 109 +++------------------------ app/service/bbcode_service.py | 2 +- 8 files changed, 186 insertions(+), 206 deletions(-) delete mode 100644 app/exceptions/__init__.py delete mode 100644 app/exceptions/userpage.py create mode 100644 app/router/private/user.py delete mode 100644 app/router/private/username.py diff --git a/app/exceptions/__init__.py b/app/exceptions/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/exceptions/userpage.py b/app/exceptions/userpage.py deleted file mode 100644 index a448673..0000000 --- a/app/exceptions/userpage.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -用户页面相关的异常类 -""" - - -class UserpageError(Exception): - """用户页面处理错误基类""" - - def __init__(self, message: str, code: str = "userpage_error"): - self.message = message - self.code = code - super().__init__(message) - - -class ContentTooLongError(UserpageError): - """内容过长错误""" - - def __init__(self, current_length: int, max_length: int): - message = f"Content too long. Maximum {max_length} characters allowed, got {current_length}." - super().__init__(message, "content_too_long") - self.current_length = current_length - self.max_length = max_length - - -class ContentEmptyError(UserpageError): - """内容为空错误""" - - def __init__(self): - super().__init__("Content cannot be empty.", "content_empty") - - -class BBCodeValidationError(UserpageError): - """BBCode验证错误""" - - def __init__(self, errors: list[str]): - message = f"BBCode validation failed: {'; '.join(errors)}" - super().__init__(message, "bbcode_validation_error") - self.errors = errors - - -class ForbiddenTagError(UserpageError): - """禁止标签错误""" - - def __init__(self, tag: str): - message = f"Forbidden tag '{tag}' is not allowed." - super().__init__(message, "forbidden_tag") - self.tag = tag diff --git a/app/models/userpage.py b/app/models/userpage.py index aeaca94..d03b085 100644 --- a/app/models/userpage.py +++ b/app/models/userpage.py @@ -54,3 +54,47 @@ class ValidateBBCodeResponse(BaseModel): valid: bool = Field(description="BBCode是否有效") errors: list[str] = Field(default_factory=list, description="错误列表") preview: dict[str, str] = Field(description="预览内容") + + +class UserpageError(Exception): + """用户页面处理错误基类""" + + def __init__(self, message: str, code: str = "userpage_error"): + self.message = message + self.code = code + super().__init__(message) + + +class ContentTooLongError(UserpageError): + """内容过长错误""" + + def __init__(self, current_length: int, max_length: int): + message = f"Content too long. Maximum {max_length} characters allowed, got {current_length}." + super().__init__(message, "content_too_long") + self.current_length = current_length + self.max_length = max_length + + +class ContentEmptyError(UserpageError): + """内容为空错误""" + + def __init__(self): + super().__init__("Content cannot be empty.", "content_empty") + + +class BBCodeValidationError(UserpageError): + """BBCode验证错误""" + + def __init__(self, errors: list[str]): + message = f"BBCode validation failed: {'; '.join(errors)}" + super().__init__(message, "bbcode_validation_error") + self.errors = errors + + +class ForbiddenTagError(UserpageError): + """禁止标签错误""" + + def __init__(self, tag: str): + message = f"Forbidden tag '{tag}' is not allowed." + super().__init__(message, "forbidden_tag") + self.tag = tag diff --git a/app/router/private/__init__.py b/app/router/private/__init__.py index 7664822..9a6f91d 100644 --- a/app/router/private/__init__.py +++ b/app/router/private/__init__.py @@ -1,6 +1,6 @@ from app.config import settings -from . import admin, audio_proxy, avatar, beatmapset, cover, oauth, relationship, score, team, username # noqa: F401 +from . import admin, audio_proxy, avatar, beatmapset, cover, oauth, relationship, score, team, user # noqa: F401 from .router import router as private_router if settings.enable_totp_verification: diff --git a/app/router/private/user.py b/app/router/private/user.py new file mode 100644 index 0000000..193fc90 --- /dev/null +++ b/app/router/private/user.py @@ -0,0 +1,130 @@ +from typing import Annotated + +from app.auth import validate_username +from app.config import settings +from app.database import User +from app.database.events import Event, EventType +from app.dependencies.database import Database +from app.dependencies.user import ClientUser +from app.models.user import Page +from app.models.userpage import ( + UpdateUserpageRequest, + UpdateUserpageResponse, + UserpageError, + ValidateBBCodeRequest, + ValidateBBCodeResponse, +) +from app.service.bbcode_service import bbcode_service +from app.utils import utcnow + +from .router import router + +from fastapi import Body, HTTPException +from sqlmodel import exists, select + + +@router.post("/rename", name="修改用户名", tags=["用户", "g0v0 API"]) +async def user_rename( + session: Database, + new_name: Annotated[str, Body(..., description="新的用户名")], + current_user: ClientUser, +): + """修改用户名 + + 为指定用户修改用户名,并将原用户名添加到历史用户名列表中 + + 错误情况: + - 404: 找不到指定用户 + - 409: 新用户名已被占用 + + 返回: + - 成功: None + """ + samename_user = (await session.exec(select(exists()).where(User.username == new_name))).first() + if samename_user: + raise HTTPException(409, "Username Exisits") + errors = validate_username(new_name) + if errors: + raise HTTPException(403, "\n".join(errors)) + previous_username = [] + previous_username.extend(current_user.previous_usernames) + previous_username.append(current_user.username) + current_user.username = new_name + current_user.previous_usernames = previous_username + rename_event = Event( + created_at=utcnow(), + type=EventType.USERNAME_CHANGE, + user_id=current_user.id, + user=current_user, + ) + rename_event.event_payload["user"] = { + "username": new_name, + "url": settings.web_url + "users/" + str(current_user.id), + "previous_username": current_user.previous_usernames[-1], + } + session.add(rename_event) + await session.commit() + return None + + +@router.put( + "/user/page", + response_model=UpdateUserpageResponse, + name="更新用户页面", + description="更新指定用户的个人页面内容(支持BBCode)。匹配官方osu-web API格式。", + tags=["用户", "g0v0 API"], +) +async def update_userpage( + request: UpdateUserpageRequest, + session: Database, + current_user: ClientUser, +): + """更新用户页面内容""" + + try: + # 处理BBCode内容 + processed_page = bbcode_service.process_userpage_content(request.body) + + # 更新数据库 - 直接更新用户对象 + current_user.page = Page(html=processed_page["html"], raw=processed_page["raw"]) + session.add(current_user) + await session.commit() + await session.refresh(current_user) + + # 返回官方格式的响应:只包含html + return UpdateUserpageResponse(html=processed_page["html"]) + + except UserpageError as e: + # 使用官方格式的错误响应:{'error': message} + raise HTTPException(status_code=422, detail={"error": e.message}) + except Exception: + raise HTTPException(status_code=500, detail={"error": "Failed to update user page"}) + + +@router.post( + "/user/validate-bbcode", + response_model=ValidateBBCodeResponse, + name="验证BBCode", + description="验证BBCode语法并返回预览。", + tags=["用户", "g0v0 API"], +) +async def validate_bbcode( + request: ValidateBBCodeRequest, +): + """验证BBCode语法""" + try: + # 验证BBCode语法 + errors = bbcode_service.validate_bbcode(request.content) + + # 生成预览(如果没有严重错误) + if len(errors) == 0: + preview = bbcode_service.process_userpage_content(request.content) + else: + preview = {"raw": request.content, "html": ""} + + return ValidateBBCodeResponse(valid=len(errors) == 0, errors=errors, preview=preview) + + except UserpageError as e: + return ValidateBBCodeResponse(valid=False, errors=[e.message], preview={"raw": request.content, "html": ""}) + except Exception: + raise HTTPException(status_code=500, detail={"error": "Failed to validate BBCode"}) diff --git a/app/router/private/username.py b/app/router/private/username.py deleted file mode 100644 index 10c5411..0000000 --- a/app/router/private/username.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Annotated - -from app.auth import validate_username -from app.config import settings -from app.database.events import Event, EventType -from app.database.user import User -from app.dependencies.database import Database -from app.dependencies.user import ClientUser -from app.utils import utcnow - -from .router import router - -from fastapi import Body, HTTPException -from sqlmodel import exists, select - - -@router.post("/rename", name="修改用户名", tags=["用户", "g0v0 API"]) -async def user_rename( - session: Database, - new_name: Annotated[str, Body(..., description="新的用户名")], - current_user: ClientUser, -): - """修改用户名 - - 为指定用户修改用户名,并将原用户名添加到历史用户名列表中 - - 错误情况: - - 404: 找不到指定用户 - - 409: 新用户名已被占用 - - 返回: - - 成功: None - """ - samename_user = (await session.exec(select(exists()).where(User.username == new_name))).first() - if samename_user: - raise HTTPException(409, "Username Exisits") - errors = validate_username(new_name) - if errors: - raise HTTPException(403, "\n".join(errors)) - previous_username = [] - previous_username.extend(current_user.previous_usernames) - previous_username.append(current_user.username) - current_user.username = new_name - current_user.previous_usernames = previous_username - rename_event = Event( - created_at=utcnow(), - type=EventType.USERNAME_CHANGE, - user_id=current_user.id, - user=current_user, - ) - rename_event.event_payload["user"] = { - "username": new_name, - "url": settings.web_url + "users/" + str(current_user.id), - "previous_username": current_user.previous_usernames[-1], - } - session.add(rename_event) - await session.commit() - return None diff --git a/app/router/v2/me.py b/app/router/v2/me.py index ed89704..026becf 100644 --- a/app/router/v2/me.py +++ b/app/router/v2/me.py @@ -1,22 +1,14 @@ from typing import Annotated -from app.database import MeResp, User +from app.database import MeResp from app.dependencies.database import Database -from app.dependencies.user import UserAndToken, get_current_user, get_current_user_and_token -from app.exceptions.userpage import UserpageError +from app.dependencies.user import UserAndToken, get_current_user_and_token from app.models.score import GameMode -from app.models.user import Page -from app.models.userpage import ( - UpdateUserpageRequest, - UpdateUserpageResponse, - ValidateBBCodeRequest, - ValidateBBCodeResponse, -) -from app.service.bbcode_service import bbcode_service from .router import router -from fastapi import HTTPException, Path, Security +from fastapi import Path, Security +from fastapi.responses import RedirectResponse @router.get( @@ -50,92 +42,11 @@ async def get_user_info_default( return user_resp -# @router.get( -# "/users/{user_id}/page", -# response_model=UserpageResponse, -# name="获取用户页面", -# description="获取指定用户的个人页面内容。匹配官方osu-web API格式。", -# tags=["用户"], -# ) -# async def get_userpage( -# session: Database, -# user_id: int = Path(description="用户ID"), -# ): -# """获取用户页面内容""" -# # 查找用户 -# user = await session.get(User, user_id) -# if not user: -# raise HTTPException(status_code=404, detail={"error": "User not found"}) - -# # 返回页面内容 -# if user.page: -# return UserpageResponse(html=user.page.get("html", ""), raw=user.page.get("raw", "")) -# else: -# return UserpageResponse(html="", raw="") +@router.put("/users/{user_id}/page", include_in_schema=False) +async def update_userpage(): + return RedirectResponse(url="/api/private/user/page", status_code=307) -@router.put( - "/users/{user_id}/page", - response_model=UpdateUserpageResponse, - name="更新用户页面", - description="更新指定用户的个人页面内容(支持BBCode)。匹配官方osu-web API格式。", - tags=["用户"], -) -async def update_userpage( - request: UpdateUserpageRequest, - session: Database, - user_id: Annotated[int, Path(description="用户ID")], - current_user: Annotated[User, Security(get_current_user, scopes=["edit"])], -): - """更新用户页面内容(匹配官方osu-web实现)""" - # 检查权限:只能编辑自己的页面(除非是管理员) - if user_id != current_user.id: - raise HTTPException(status_code=403, detail={"error": "Access denied"}) - - try: - # 处理BBCode内容 - processed_page = bbcode_service.process_userpage_content(request.body) - - # 更新数据库 - 直接更新用户对象 - current_user.page = Page(html=processed_page["html"], raw=processed_page["raw"]) - session.add(current_user) - await session.commit() - await session.refresh(current_user) - - # 返回官方格式的响应:只包含html - return UpdateUserpageResponse(html=processed_page["html"]) - - except UserpageError as e: - # 使用官方格式的错误响应:{'error': message} - raise HTTPException(status_code=422, detail={"error": e.message}) - except Exception: - raise HTTPException(status_code=500, detail={"error": "Failed to update user page"}) - - -@router.post( - "/me/validate-bbcode", - response_model=ValidateBBCodeResponse, - name="验证BBCode", - description="验证BBCode语法并返回预览。", - tags=["用户"], -) -async def validate_bbcode( - request: ValidateBBCodeRequest, -): - """验证BBCode语法""" - try: - # 验证BBCode语法 - errors = bbcode_service.validate_bbcode(request.content) - - # 生成预览(如果没有严重错误) - if len(errors) == 0: - preview = bbcode_service.process_userpage_content(request.content) - else: - preview = {"raw": request.content, "html": ""} - - return ValidateBBCodeResponse(valid=len(errors) == 0, errors=errors, preview=preview) - - except UserpageError as e: - return ValidateBBCodeResponse(valid=False, errors=[e.message], preview={"raw": request.content, "html": ""}) - except Exception: - raise HTTPException(status_code=500, detail={"error": "Failed to validate BBCode"}) +@router.post("/me/validate-bbcode", include_in_schema=False) +async def validate_bbcode(): + return RedirectResponse(url="/api/private/user/validate-bbcode", status_code=307) diff --git a/app/service/bbcode_service.py b/app/service/bbcode_service.py index bbacd1e..f890b85 100644 --- a/app/service/bbcode_service.py +++ b/app/service/bbcode_service.py @@ -8,7 +8,7 @@ import html import re from typing import ClassVar -from app.exceptions.userpage import ( +from app.models.userpage import ( ContentEmptyError, ContentTooLongError, ForbiddenTagError, From 238520c577d1514436d3663106823baf119fdc2a Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 05:01:05 +0000 Subject: [PATCH 14/26] chore(linter): make ruff happy --- app/storage/local.py | 4 ++-- ...25-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/app/storage/local.py b/app/storage/local.py index af0e8ed..957c80c 100644 --- a/app/storage/local.py +++ b/app/storage/local.py @@ -32,8 +32,8 @@ class LocalStorageService(StorageService): self, file_path: str, content: bytes, - content_type: str = "application/octet-stream", - cache_control: str = "public, max-age=31536000", + content_type: str = "application/octet-stream", # noqa: ARG002 + cache_control: str = "public, max-age=31536000", # noqa: ARG002 ) -> None: full_path = self._get_file_path(file_path) full_path.parent.mkdir(parents=True, exist_ok=True) diff --git a/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py b/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py index 836e01f..ceb26d4 100644 --- a/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py +++ b/migrations/versions/2025-10-02_7fe1319250c5_auth_add_refresh_token_expires_at.py @@ -25,8 +25,8 @@ def upgrade() -> None: op.add_column("oauth_tokens", sa.Column("refresh_token_expires_at", sa.DateTime(), nullable=True)) op.create_index(op.f("ix_oauth_tokens_expires_at"), "oauth_tokens", ["expires_at"], unique=False) expires_at = datetime.now() + timedelta(days=15) - expires_at_str = expires_at.strftime('%Y-%m-%d %H:%M:%S') - op.execute(f"UPDATE oauth_tokens SET refresh_token_expires_at = '{expires_at_str}'") + expires_at_str = expires_at.strftime("%Y-%m-%d %H:%M:%S") + op.execute(f"UPDATE oauth_tokens SET refresh_token_expires_at = '{expires_at_str}'") # noqa: S608 op.create_index( op.f("ix_oauth_tokens_refresh_token_expires_at"), "oauth_tokens", ["refresh_token_expires_at"], unique=False ) From 21da83e4eb5de20fc0824f521c4ae48c89b9c2fa Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 05:09:31 +0000 Subject: [PATCH 15/26] refactor(middleware): remove unused setup method --- app/middleware/setup.py | 41 ----------------------------------------- 1 file changed, 41 deletions(-) delete mode 100644 app/middleware/setup.py diff --git a/app/middleware/setup.py b/app/middleware/setup.py deleted file mode 100644 index f822ece..0000000 --- a/app/middleware/setup.py +++ /dev/null @@ -1,41 +0,0 @@ -from app.config import settings -from app.middleware.verify_session import VerifySessionMiddleware - -from fastapi import FastAPI - - -def setup_session_verification_middleware(app: FastAPI) -> None: - """设置会话验证中间件 - - Args: - app: FastAPI应用实例 - """ - # 只在启用会话验证时添加中间件 - if settings.enable_session_verification: - app.add_middleware(VerifySessionMiddleware) - - # 可以在这里添加中间件配置日志 - from app.log import logger - - logger.info("[Middleware] Session verification middleware enabled") - else: - from app.log import logger - - logger.info("[Middleware] Session verification middleware disabled") - - -def setup_all_middlewares(app: FastAPI) -> None: - """设置所有中间件 - - Args: - app: FastAPI应用实例 - """ - # 设置会话验证中间件 - setup_session_verification_middleware(app) - - # 可以在这里添加其他中间件 - # app.add_middleware(OtherMiddleware) - - from app.log import logger - - logger.info("[Middleware] All middlewares configured") From c2bfafc67a826a2d97f1cd57ab0511d1fd4d833f Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 05:26:37 +0000 Subject: [PATCH 16/26] refactor(message): replace synchronous Redis client with asynchronous client --- app/dependencies/database.py | 7 +- app/service/redis_message_system.py | 115 ++++++++++------------------ 2 files changed, 43 insertions(+), 79 deletions(-) diff --git a/app/dependencies/database.py b/app/dependencies/database.py index 49c377c..3d490f2 100644 --- a/app/dependencies/database.py +++ b/app/dependencies/database.py @@ -9,7 +9,6 @@ from app.config import settings from fastapi import Depends from pydantic import BaseModel -import redis as sync_redis import redis.asyncio as redis from sqlalchemy.ext.asyncio import create_async_engine from sqlmodel import SQLModel @@ -41,8 +40,8 @@ redis_client = redis.from_url(settings.redis_url, decode_responses=True) # Redis 二进制数据连接 (不自动解码响应,用于存储音频等二进制数据) redis_binary_client = redis.from_url(settings.redis_url, decode_responses=False) -# Redis 消息缓存连接 (db1) - 使用同步客户端在线程池中执行 -redis_message_client = sync_redis.from_url(settings.redis_url, decode_responses=True, db=1) +# Redis 消息缓存连接 (db1) +redis_message_client: redis.Redis = redis.from_url(settings.redis_url, decode_responses=True, db=1) # 数据库依赖 @@ -97,7 +96,7 @@ def get_redis_binary(): return redis_binary_client -def get_redis_message(): +def get_redis_message() -> redis.Redis: """获取消息专用的 Redis 客户端 (db1)""" return redis_message_client diff --git a/app/service/redis_message_system.py b/app/service/redis_message_system.py index 6bf4540..34fefba 100644 --- a/app/service/redis_message_system.py +++ b/app/service/redis_message_system.py @@ -6,7 +6,6 @@ """ import asyncio -from concurrent.futures import ThreadPoolExecutor from datetime import datetime import json import time @@ -23,18 +22,12 @@ class RedisMessageSystem: """Redis 消息系统""" def __init__(self): - self.redis = get_redis_message() - self.executor = ThreadPoolExecutor(max_workers=2) + self.redis: Any = get_redis_message() self._batch_timer: asyncio.Task | None = None self._running = False self.batch_interval = 5.0 # 5秒批量存储一次 self.max_batch_size = 100 # 每批最多处理100条消息 - async def _redis_exec(self, func, *args, **kwargs): - """在线程池中执行 Redis 操作""" - loop = asyncio.get_event_loop() - return await loop.run_in_executor(self.executor, lambda: func(*args, **kwargs)) - async def send_message( self, channel_id: int, @@ -216,10 +209,10 @@ class RedisMessageSystem: async def _generate_message_id(self, channel_id: int) -> int: """生成唯一的消息ID - 确保全局唯一且严格递增""" # 使用全局计数器确保所有频道的消息ID都是严格递增的 - message_id = await self._redis_exec(self.redis.incr, "global_message_id_counter") + message_id = await self.redis.incr("global_message_id_counter") # 同时更新频道的最后消息ID,用于客户端状态同步 - await self._redis_exec(self.redis.set, f"channel:{channel_id}:last_msg_id", message_id) + await self.redis.set(f"channel:{channel_id}:last_msg_id", message_id) return message_id @@ -230,73 +223,70 @@ class RedisMessageSystem: is_multiplayer = message_data.get("is_multiplayer", False) # 存储消息数据 - await self._redis_exec( - self.redis.hset, + await self.redis.hset( f"msg:{channel_id}:{message_id}", - mapping={k: json.dumps(v) if isinstance(v, dict | list) else str(v) for k, v in message_data.items()}, + mapping={k: json.dumps(v) if isinstance(v, (dict, list)) else str(v) for k, v in message_data.items()}, ) # 设置消息过期时间(7天) - await self._redis_exec(self.redis.expire, f"msg:{channel_id}:{message_id}", 604800) + await self.redis.expire(f"msg:{channel_id}:{message_id}", 604800) # 清理可能存在的错误类型键,然后添加到频道消息列表(按时间排序) channel_messages_key = f"channel:{channel_id}:messages" # 更健壮的键类型检查和清理 try: - key_type = await self._redis_exec(self.redis.type, channel_messages_key) + key_type = await self.redis.type(channel_messages_key) if key_type == "none": # 键不存在,这是正常的 pass elif key_type != "zset": # 键类型错误,需要清理 logger.warning(f"Deleting Redis key {channel_messages_key} with wrong type: {key_type}") - await self._redis_exec(self.redis.delete, channel_messages_key) + await self.redis.delete(channel_messages_key) # 验证删除是否成功 - verify_type = await self._redis_exec(self.redis.type, channel_messages_key) + verify_type = await self.redis.type(channel_messages_key) if verify_type != "none": logger.error( f"Failed to delete problematic key {channel_messages_key}, type is still {verify_type}" ) # 强制删除 - await self._redis_exec(self.redis.unlink, channel_messages_key) + await self.redis.unlink(channel_messages_key) except Exception as type_check_error: logger.warning(f"Failed to check key type for {channel_messages_key}: {type_check_error}") # 如果检查失败,尝试强制删除键以确保清理 try: - await self._redis_exec(self.redis.delete, channel_messages_key) + await self.redis.delete(channel_messages_key) except Exception: # 最后的努力:使用unlink try: - await self._redis_exec(self.redis.unlink, channel_messages_key) + await self.redis.unlink(channel_messages_key) except Exception as final_error: logger.error(f"Critical: Unable to clear problematic key {channel_messages_key}: {final_error}") # 添加到频道消息列表(sorted set) try: - await self._redis_exec( - self.redis.zadd, + await self.redis.zadd( channel_messages_key, - {f"msg:{channel_id}:{message_id}": message_id}, + mapping={f"msg:{channel_id}:{message_id}": message_id}, ) except Exception as zadd_error: logger.error(f"Failed to add message to sorted set {channel_messages_key}: {zadd_error}") # 如果添加失败,再次尝试清理并重试 - await self._redis_exec(self.redis.delete, channel_messages_key) - await self._redis_exec( - self.redis.zadd, + await self.redis.delete(channel_messages_key) + await self.redis.zadd( channel_messages_key, - {f"msg:{channel_id}:{message_id}": message_id}, + mapping={f"msg:{channel_id}:{message_id}": message_id}, ) # 保持频道消息列表大小(最多1000条) - await self._redis_exec(self.redis.zremrangebyrank, channel_messages_key, 0, -1001) + await self.redis.zremrangebyrank(channel_messages_key, 0, -1001) # 只有非多人房间消息才添加到待持久化队列 if not is_multiplayer: - await self._redis_exec(self.redis.lpush, "pending_messages", f"{channel_id}:{message_id}") + await self.redis.lpush("pending_messages", f"{channel_id}:{message_id}") logger.debug(f"Message {message_id} added to persistence queue") else: logger.debug(f"Message {message_id} in multiplayer room, skipped persistence queue") @@ -311,8 +301,7 @@ class RedisMessageSystem: # 获取消息键列表,按消息ID排序 if since > 0: # 获取指定ID之后的消息(正序) - message_keys = await self._redis_exec( - self.redis.zrangebyscore, + message_keys = await self.redis.zrangebyscore( f"channel:{channel_id}:messages", since + 1, "+inf", @@ -321,32 +310,24 @@ class RedisMessageSystem: ) else: # 获取最新的消息(倒序获取,然后反转) - message_keys = await self._redis_exec( - self.redis.zrevrange, f"channel:{channel_id}:messages", 0, limit - 1 - ) + message_keys = await self.redis.zrevrange(f"channel:{channel_id}:messages", 0, limit - 1) messages = [] for key in message_keys: - if isinstance(key, bytes): - key = key.decode("utf-8") - # 获取消息数据 - raw_data = await self._redis_exec(self.redis.hgetall, key) + raw_data = await self.redis.hgetall(key) if raw_data: # 解码数据 - message_data = {} + message_data: dict[str, Any] = {} for k, v in raw_data.items(): - if isinstance(k, bytes): - k = k.decode("utf-8") - if isinstance(v, bytes): - v = v.decode("utf-8") - # 尝试解析 JSON try: if k in ["grade_counts", "level"] or v.startswith(("{", "[")): message_data[k] = json.loads(v) elif k in ["message_id", "channel_id", "sender_id"]: message_data[k] = int(v) + elif k == "is_multiplayer": + message_data[k] = v == "True" elif k == "created_at": message_data[k] = float(v) else: @@ -442,12 +423,10 @@ class RedisMessageSystem: # 获取待处理的消息 message_keys = [] for _ in range(self.max_batch_size): - key = await self._redis_exec(self.redis.brpop, ["pending_messages"], timeout=1) + key = await self.redis.brpop("pending_messages", timeout=1) if key: # key 是 (queue_name, value) 的元组 - value = key[1] - if isinstance(value, bytes): - value = value.decode("utf-8") + _, value = key message_keys.append(value) else: break @@ -472,7 +451,7 @@ class RedisMessageSystem: channel_id, message_id = map(int, key.split(":")) # 从 Redis 获取消息数据 - raw_data = await self._redis_exec(self.redis.hgetall, f"msg:{channel_id}:{message_id}") + raw_data = await self.redis.hgetall(f"msg:{channel_id}:{message_id}") if not raw_data: continue @@ -480,18 +459,13 @@ class RedisMessageSystem: # 解码数据 message_data = {} for k, v in raw_data.items(): - if isinstance(k, bytes): - k = k.decode("utf-8") - if isinstance(v, bytes): - v = v.decode("utf-8") message_data[k] = v # 检查是否是多人房间消息,如果是则跳过数据库存储 is_multiplayer = message_data.get("is_multiplayer", "False") == "True" if is_multiplayer: # 多人房间消息不存储到数据库,直接标记为已跳过 - await self._redis_exec( - self.redis.hset, + await self.redis.hset( f"msg:{channel_id}:{message_id}", "status", "skipped_multiplayer", @@ -518,8 +492,7 @@ class RedisMessageSystem: session.add(db_message) # 更新 Redis 中的状态 - await self._redis_exec( - self.redis.hset, + await self.redis.hset( f"msg:{channel_id}:{message_id}", "status", "persisted", @@ -563,57 +536,54 @@ class RedisMessageSystem: max_id = result.one() or 0 # 检查 Redis 中的计数器值 - current_counter = await self._redis_exec(self.redis.get, "global_message_id_counter") + current_counter = await self.redis.get("global_message_id_counter") current_counter = int(current_counter) if current_counter else 0 # 设置计数器为两者中的最大值 initial_counter = max(max_id, current_counter) - await self._redis_exec(self.redis.set, "global_message_id_counter", initial_counter) + await self.redis.set("global_message_id_counter", initial_counter) logger.info(f"Initialized global message ID counter to {initial_counter}") except Exception as e: logger.error(f"Failed to initialize message counter: {e}") # 如果初始化失败,设置一个安全的起始值 - await self._redis_exec(self.redis.setnx, "global_message_id_counter", 1000000) + await self.redis.setnx("global_message_id_counter", 1000000) async def _cleanup_redis_keys(self): """清理可能存在问题的 Redis 键""" try: # 扫描所有 channel:*:messages 键并检查类型 keys_pattern = "channel:*:messages" - keys = await self._redis_exec(self.redis.keys, keys_pattern) + keys = await self.redis.keys(keys_pattern) fixed_count = 0 for key in keys: - if isinstance(key, bytes): - key = key.decode("utf-8") - try: - key_type = await self._redis_exec(self.redis.type, key) + key_type = await self.redis.type(key) if key_type == "none": # 键不存在,正常情况 continue elif key_type != "zset": logger.warning(f"Cleaning up Redis key {key} with wrong type: {key_type}") - await self._redis_exec(self.redis.delete, key) + await self.redis.delete(key) # 验证删除是否成功 - verify_type = await self._redis_exec(self.redis.type, key) + verify_type = await self.redis.type(key) if verify_type != "none": logger.error(f"Failed to delete problematic key {key}, trying unlink...") - await self._redis_exec(self.redis.unlink, key) + await self.redis.unlink(key) fixed_count += 1 except Exception as cleanup_error: logger.warning(f"Failed to cleanup key {key}: {cleanup_error}") # 强制删除问题键 try: - await self._redis_exec(self.redis.delete, key) + await self.redis.delete(key) fixed_count += 1 except Exception: try: - await self._redis_exec(self.redis.unlink, key) + await self.redis.unlink(key) fixed_count += 1 except Exception as final_error: logger.error(f"Critical: Unable to clear problematic key {key}: {final_error}") @@ -654,11 +624,6 @@ class RedisMessageSystem: self._batch_timer = None logger.info("Redis message system stopped") - def __del__(self): - """清理资源""" - if hasattr(self, "executor"): - self.executor.shutdown(wait=False) - # 全局消息系统实例 redis_message_system = RedisMessageSystem() From 216d3ab3bf7ab8cc2b987f9b38e0ad85728ac041 Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 05:39:59 +0000 Subject: [PATCH 17/26] feat(redis): refactor Redis configuration to use multiple logical databases - Updated default REDIS_URL to remove explicit /0 suffix - Added dedicated Redis clients: - db0: general cache (redis_client) - db1: message cache (redis_message_client) - db2: binary storage (redis_binary_client) - db3: rate limiting (redis_rate_limit_client) - Updated configuration, Docker files, and main startup lifecycle accordingly - Replaced `get_redis()` usage in notification server with `redis_message_client` --- .env.example | 2 +- app/config.py | 2 +- app/dependencies/database.py | 13 +++++--- app/router/notification/server.py | 4 +-- docker-compose-osurx.yml | 2 +- docker-compose.yml | 2 +- main.py | 55 ++++++++++++++++++++++--------- 7 files changed, 53 insertions(+), 27 deletions(-) diff --git a/.env.example b/.env.example index f1a89a1..94875da 100644 --- a/.env.example +++ b/.env.example @@ -6,7 +6,7 @@ MYSQL_DATABASE="osu_api" MYSQL_USER="osu_api" MYSQL_PASSWORD="password" MYSQL_ROOT_PASSWORD="password" -REDIS_URL="redis://127.0.0.1:6379/0" +REDIS_URL="redis://127.0.0.1:6379" # JWT Settings # Use `openssl rand -hex 32` to generate a secure key diff --git a/app/config.py b/app/config.py index 53949aa..e2650b0 100644 --- a/app/config.py +++ b/app/config.py @@ -141,7 +141,7 @@ STORAGE_SETTINGS='{ ] redis_url: Annotated[ str, - Field(default="redis://127.0.0.1:6379/0", description="Redis 连接 URL"), + Field(default="redis://127.0.0.1:6379", description="Redis 连接 URL"), "数据库设置", ] diff --git a/app/dependencies/database.py b/app/dependencies/database.py index 3d490f2..a058725 100644 --- a/app/dependencies/database.py +++ b/app/dependencies/database.py @@ -35,13 +35,16 @@ engine = create_async_engine( ) # Redis 连接 -redis_client = redis.from_url(settings.redis_url, decode_responses=True) - -# Redis 二进制数据连接 (不自动解码响应,用于存储音频等二进制数据) -redis_binary_client = redis.from_url(settings.redis_url, decode_responses=False) +redis_client = redis.from_url(settings.redis_url, decode_responses=True, db=0) # Redis 消息缓存连接 (db1) -redis_message_client: redis.Redis = redis.from_url(settings.redis_url, decode_responses=True, db=1) +redis_message_client = redis.from_url(settings.redis_url, decode_responses=True, db=1) + +# Redis 二进制数据连接 (不自动解码响应,用于存储音频等二进制数据,db2) +redis_binary_client = redis.from_url(settings.redis_url, decode_responses=False, db=2) + +# Redis 限流连接 (db3) +redis_rate_limit_client = redis.from_url(settings.redis_url, decode_responses=True, db=3) # 数据库依赖 diff --git a/app/router/notification/server.py b/app/router/notification/server.py index 021a690..7818887 100644 --- a/app/router/notification/server.py +++ b/app/router/notification/server.py @@ -8,7 +8,7 @@ from app.dependencies.database import ( DBFactory, Redis, get_db_factory, - get_redis, + redis_message_client, with_db, ) from app.dependencies.user import get_current_user_and_token @@ -31,7 +31,7 @@ class ChatServer: def __init__(self): self.connect_client: dict[int, WebSocket] = {} self.channels: dict[int, list[int]] = {} - self.redis: Redis = get_redis() + self.redis: Redis = redis_message_client self.tasks: set[asyncio.Task] = set() self.ChatSubscriber = ChatSubscriber() diff --git a/docker-compose-osurx.yml b/docker-compose-osurx.yml index 9cd1c92..b06bad1 100644 --- a/docker-compose-osurx.yml +++ b/docker-compose-osurx.yml @@ -11,7 +11,7 @@ services: environment: - MYSQL_HOST=mysql - MYSQL_PORT=3306 - - REDIS_URL=redis://redis:6379/0 + - REDIS_URL=redis://redis:6379 - ENABLE_OSU_RX=true - ENABLE_OSU_AP=true - ENABLE_ALL_MODS_PP=true diff --git a/docker-compose.yml b/docker-compose.yml index 1a82af3..a2a769f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,7 +13,7 @@ services: environment: - MYSQL_HOST=mysql - MYSQL_PORT=3306 - - REDIS_URL=redis://redis:6379/0 + - REDIS_URL=redis://redis:6379 env_file: - .env depends_on: diff --git a/main.py b/main.py index 6646afb..a9ebed6 100644 --- a/main.py +++ b/main.py @@ -4,7 +4,14 @@ from pathlib import Path from app.config import settings from app.database import User -from app.dependencies.database import Database, engine, get_redis, redis_client +from app.dependencies.database import ( + Database, + engine, + redis_binary_client, + redis_client, + redis_message_client, + redis_rate_limit_client, +) from app.dependencies.fetcher import get_fetcher from app.dependencies.scheduler import start_scheduler, stop_scheduler from app.log import system_logger @@ -50,39 +57,55 @@ import sentry_sdk @asynccontextmanager async def lifespan(app: FastAPI): # noqa: ARG001 - # on startup + # === on startup === + # init mods and achievements init_mods() init_ranked_mods() - await FastAPILimiter.init(get_redis()) - fetcher = await get_fetcher() # 初始化 fetcher - await init_geoip() # 初始化 GeoIP 数据库 + load_achievements() + + # init rate limiter + await FastAPILimiter.init(redis_rate_limit_client) + + # init fetcher + fetcher = await get_fetcher() + # init GeoIP + await init_geoip() + + # init game server await create_rx_statistics() await calculate_user_rank(True) await daily_challenge_job() await process_daily_challenge_top() await create_banchobot() - await start_email_processor() # 启动邮件队列处理器 - await download_service.start_health_check() # 启动下载服务健康检查 - await start_cache_tasks() # 启动缓存调度器 + + # services + await start_email_processor() + await download_service.start_health_check() + await start_cache_tasks() init_beatmapset_update_service(fetcher) # 初始化谱面集更新服务 - redis_message_system.start() # 启动 Redis 消息系统 - load_achievements() + redis_message_system.start() start_scheduler() - # 显示资源代理状态 + # show the status of AssetProxy if settings.enable_asset_proxy: system_logger("AssetProxy").info(f"Asset Proxy enabled - Domain: {settings.custom_asset_domain}") - # on shutdown yield + + # === on shutdown === + # stop services bg_tasks.stop() - redis_message_system.stop() # 停止 Redis 消息系统 - await stop_cache_tasks() # 停止缓存调度器 + await stop_cache_tasks() stop_scheduler() - await download_service.stop_health_check() # 停止下载服务健康检查 - await stop_email_processor() # 停止邮件队列处理器 + await download_service.stop_health_check() + await stop_email_processor() + + # close database & redis await engine.dispose() await redis_client.aclose() + await redis_binary_client.aclose() + await redis_message_client.aclose() + await redis_rate_limit_client.aclose() desc = f"""osu! API 模拟服务器,支持 osu! API v1, v2 和 osu!lazer 的绝大部分功能。 From c6058eb0d84cc8862c3413a0d8a208510b65182c Mon Sep 17 00:00:00 2001 From: MingxuanGame Date: Sat, 4 Oct 2025 06:10:40 +0000 Subject: [PATCH 18/26] docs(dev): update contribution guide & agent instructions --- .github/copilot-instructions.md | 166 +++++++++++++++++++++++++++ .gitignore | 1 - AGENTS.md | 195 ++++++++++++++------------------ CONTRIBUTING.md | 168 ++++++++++++++++++++------- 4 files changed, 376 insertions(+), 154 deletions(-) create mode 100644 .github/copilot-instructions.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..42c417a --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,166 @@ +# copilot-instruction + +> 此文件是 AGENTS.md 的复制。一切以 AGENTS.md 为主。 + +> 使用自动化与 AI 代理(GitHub Copilot、依赖/CI 机器人,以及仓库中的运行时调度器/worker)的指导原则,适用于 g0v0-server 仓库。 + +--- + +## API 参考 + +本项目必须保持与公开的 osu! API 兼容。在添加或映射端点时请参考: + +- **v1(旧版):** [https://github.com/ppy/osu-api/wiki](https://github.com/ppy/osu-api/wiki) +- **v2(OpenAPI):** [https://osu.ppy.sh/docs/openapi.yaml](https://osu.ppy.sh/docs/openapi.yaml) + +任何在 `app/router/v1/`、`app/router/v2/` 或 `app/router/notification/` 中的实现必须与官方规范保持一致。自定义或实验性的端点应放在 `app/router/private/` 中。 + +--- + +## 代理类别 + +允许的代理分为三类: + +- **代码生成/补全代理**(如 GitHub Copilot 或其他 LLM)—— **仅当** 有维护者审核并批准输出时允许使用。 +- **自动维护代理**(如 Dependabot、Renovate、pre-commit.ci)—— 允许使用,但必须遵守严格的 PR 和 CI 政策。 +- **运行时/后台代理**(调度器、worker)—— 属于产品代码的一部分;必须遵守生命周期、并发和幂等性规范。 + +所有由代理生成或建议的更改必须遵守以下规则。 + +--- + +## 所有代理的规则 + +1. **单一职责的 PR。** 代理的 PR 必须只解决一个问题(一个功能、一个 bug 修复或一次依赖更新)。提交信息应使用 Angular 风格(如 `feat(api): add ...`)。 +2. **通过 Lint 与 CI 检查。** 每个 PR(包括代理创建的)在合并前必须通过 `pyright`、`ruff`、`pre-commit` 钩子和仓库 CI。PR 中应附带 CI 运行结果链接。 +3. **绝不可提交敏感信息。** 代理不得提交密钥、密码、token 或真实 `.env` 值。如果检测到可能的敏感信息,代理必须中止并通知指定的维护者。 +4. **API 位置限制。** 不得在 `app/router/v1` 或 `app/router/v2` 下添加新的公开端点,除非该端点在官方 v1/v2 规范中存在。自定义或实验性端点必须放在 `app/router/private/`。 +5. **保持公共契约稳定。** 未经批准的迁移计划,不得随意修改响应 schema、路由前缀或其他公共契约。若有变更,PR 中必须包含明确的兼容性说明。 + +--- + +## Copilot / LLM 使用 + +> 关于在本仓库中使用 GitHub Copilot 和其他基于 LLM 的辅助工具的统一指导。 + +### 关键项目结构(需要了解的内容) + +- **应用入口:** `main.py` —— FastAPI 应用,包含启动/关闭生命周期管理(fetchers、GeoIP、调度器、缓存与健康检查、Redis 消息、统计、成就系统)。 + +- **路由:** `app/router/` 包含所有路由组。主要的路由包括: + - `v1/`(v1 端点) + - `v2/`(v2 端点) + - `notification/` 路由(聊天/通知子系统) + - `auth.py`(认证/token 流程) + - `private/`(自定义或实验性的端点) + + **规则:** `v1/` 和 `v2/` 必须与官方 API 对应。仅内部或实验端点应放在 `app/router/private/`。 + +- **模型与数据库工具:** + - SQLModel/ORM 模型在 `app/database/`。 + - 非数据库模型在 `app/models/`。 + - 修改模型/schema 时必须生成 Alembic 迁移,并手动检查生成的 SQL 与索引。 + +- **服务层:** `app/service/` 保存领域逻辑(如缓存工具、通知/邮件逻辑)。复杂逻辑应放在 service,而不是路由处理器中。 + +- **任务:** `app/tasks/` 保存任务(定时任务、启动任务、关闭任务)。 + - 均在 `__init__.py` 进行导出。 + - 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。 + - 定时任务使用 APScheduler + +- **缓存与依赖:** 使用 `app/dependencies/` 提供的 Redis 依赖和缓存服务(遵循现有 key 命名约定,如 `user:{id}:...`)。 + +- **日志:** 使用 `app/log` 提供的日志工具。 + +### 实用工作流(提示模式) + +- **添加 v2 端点(正确方式):** 在 `app/router/v2/` 下添加文件,导出路由,实现基于数据库与缓存依赖的异步处理函数。**不得**在 v1/v2 添加非官方端点。 +- **添加内部端点:** 放在 `app/router/private/`,保持处理器精简,将业务逻辑放入 `app/service/`。 +- **添加后台任务:** 将任务逻辑写在 `app/service/_job.py`(幂等、可重试)。调度器入口放在 `app/scheduler/_scheduler.py`,并在应用生命周期注册。 +- **数据库 schema 变更:** 修改 `app/models/` 中的 SQLModel 模型,运行 `alembic revision --autogenerate`,检查迁移并本地测试 `alembic upgrade head` 后再提交。 +- **缓存写入与响应:** 使用现有的 `UserResp` 模式和 `UserCacheService`;异步缓存写入应使用后台任务。 + +### 提示指导(给 LLM/Copilot 的输入) + +- 明确文件位置和限制(如:`Add an async endpoint under app/router/private/... DO NOT add to app/router/v1 or v2`)。 +- 要求异步处理函数、依赖注入 DB/Redis、复用已有服务/工具、加上类型注解,并生成最小化 pytest 测试样例。 + +### 约定与质量要求 + +- **使用 Annotated-style 依赖注入** 在路由处理器中。 +- **提交信息风格:** `type(scope): subject`(Angular 风格)。 +- **优先异步:** 路由必须为异步函数;避免阻塞事件循环。 +- **关注点分离:** 业务逻辑应放在 service,而不是路由中。 +- **错误处理:** 客户端错误用 `HTTPException`,服务端错误使用结构化日志。 +- **类型与 lint:** 在请求评审前,代码必须通过 `pyright` 和 `ruff` 检查。 +- **注释:** 避免过多注释,仅为晦涩逻辑添加简洁的“魔法注释”。 +- **日志:** 使用 `app.log` 提供的 `log` 函数获取 logger 实例。(服务、任务除外) + +### 工具参考 + +``` +uv sync +pre-commit install +pre-commit run --all-files +pyright +ruff . +alembic revision --autogenerate -m "feat(db): ..." +alembic upgrade head +uvicorn main:app --reload --host 0.0.0.0 --port 8000 +``` + +### PR 范围指导 + +- 保持 PR 专注:一次只做一件事(如端点或重构,不要混合)。 +- 不确定时,请参考现有服务,并添加简短说明性注释。 + +### PR 审核规则 + +> GitHub Copilot PR review 可参考。 + +1. 如果 PR 修改了端点,简要说明端点的用途和预期行为。同时检查是否满足上述的 API 位置限制。 +2. 如果 PR 修改了数据库模型,必须包含 Alembic 迁移。检查迁移的 SQL 语句和索引是否合理。 +3. 修改的其他功能需要提供简短的说明。 +4. 提供性能优化的建议(见下文)。 + +--- + +## 性能优化提示 + +以下为结合本仓库架构(FastAPI + SQLModel/SQLAlchemy、Redis 缓存、后台调度器)总结的性能优化建议: + +### 数据库 + +- **仅选择必要字段。** 使用 `select(Model.col1, Model.col2)`,避免 `select(Model)`。 + +```py +stmt = select(User.id, User.username).where(User.active == True) +rows = await session.execute(stmt) +``` + +- **使用 `select(exists())` 检查存在性。** 避免加载整行: + +```py +from sqlalchemy import select, exists +exists_stmt = select(exists().where(User.id == some_id)) +found = await session.scalar(exists_stmt) +``` + +- **避免 N+1 查询。** 需要关联对象时用 `selectinload`、`joinedload`。 + +- **批量操作。** 插入/更新时应批量执行,并放在一个事务中,而不是多个小事务。 + + +### 耗时任务 + +- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks) +- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。 + +--- + +## 部分 LLM 的额外要求 + +### Claude Code + +- 禁止创建额外的测试脚本。 + diff --git a/.gitignore b/.gitignore index f174e46..6f15ff1 100644 --- a/.gitignore +++ b/.gitignore @@ -222,7 +222,6 @@ newrelic.ini logs/ osu-server-spectator-master/* spectator-server/ -.github/copilot-instructions.md osu-web-master/* osu-web-master/.env.dusk.local.example osu-web-master/.env.example diff --git a/AGENTS.md b/AGENTS.md index 3ddac59..ef8a10e 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,117 +1,100 @@ -# AGENTS.md +# AGENTS -> Guidelines for using automation and AI agents (GitHub Copilot, dependency/CI bots, and in-repo runtime schedulers/workers) with the g0v0-server repository. +> 使用自动化与 AI 代理(GitHub Copilot、依赖/CI 机器人,以及仓库中的运行时调度器/worker)的指导原则,适用于 g0v0-server 仓库。 --- -## API References +## API 参考 -This project must stay compatible with the public osu! APIs. Use these references when adding or mapping endpoints: +本项目必须保持与公开的 osu! API 兼容。在添加或映射端点时请参考: -- **v1 (legacy):** [https://github.com/ppy/osu-api/wiki](https://github.com/ppy/osu-api/wiki) -- **v2 (OpenAPI):** [https://osu.ppy.sh/docs/openapi.yaml](https://osu.ppy.sh/docs/openapi.yaml) +- **v1(旧版):** [https://github.com/ppy/osu-api/wiki](https://github.com/ppy/osu-api/wiki) +- **v2(OpenAPI):** [https://osu.ppy.sh/docs/openapi.yaml](https://osu.ppy.sh/docs/openapi.yaml) -Any implementation in `app/router/v1/`, `app/router/v2/`, or `app/router/notification/` must match official endpoints from the corresponding specification above. Custom or experimental endpoints belong in `app/router/private/`. +任何在 `app/router/v1/`、`app/router/v2/` 或 `app/router/notification/` 中的实现必须与官方规范保持一致。自定义或实验性的端点应放在 `app/router/private/` 中。 --- -## Agent Categories +## 代理类别 -Agents are allowed in three categories: +允许的代理分为三类: -- **Code authoring / completion agents** (e.g. GitHub Copilot or other LLMs) — allowed **only** when a human maintainer reviews and approves the output. -- **Automated maintenance agents** (e.g. Dependabot, Renovate, pre-commit.ci) — allowed but must follow strict PR and CI policies. -- **Runtime / background agents** (schedulers, workers) — part of the product code; must follow lifecycle, concurrency, and idempotency conventions. +- **代码生成/补全代理**(如 GitHub Copilot 或其他 LLM)—— **仅当** 有维护者审核并批准输出时允许使用。 +- **自动维护代理**(如 Dependabot、Renovate、pre-commit.ci)—— 允许使用,但必须遵守严格的 PR 和 CI 政策。 +- **运行时/后台代理**(调度器、worker)—— 属于产品代码的一部分;必须遵守生命周期、并发和幂等性规范。 -All changes produced or suggested by agents must comply with the rules below. +所有由代理生成或建议的更改必须遵守以下规则。 --- -## Rules for All Agents +## 所有代理的规则 -1. **Human review required.** Any code, configuration, or documentation generated by an AI or automation agent must be reviewed and approved by a human maintainer familiar with g0v0-server. Do not merge agent PRs without explicit human approval. -2. **Single-responsibility PRs.** Agent PRs must address one concern only (one feature, one bugfix, or one dependency update). Use Angular-style commit messages (e.g. `feat(api): add ...`). -3. **Lint & CI compliance.** Every PR (including agent-created ones) must pass `pyright`, `ruff`, `pre-commit` hooks, and the repository CI before merging. Include links to CI runs in the PR. -4. **Never commit secrets.** Agents must not add keys, passwords, tokens, or real `.env` values. If a suspected secret is detected, the agent must abort and notify a designated human. -5. **API location constraints.** Do not add new public endpoints under `app/router/v1` or `app/router/v2` unless the endpoints exist in the official v1/v2 specs. Custom or experimental endpoints must go under `app/router/private/`. -6. **Stable public contracts.** Avoid changing response schemas, route prefixes, or other public contracts without an approved migration plan and explicit compatibility notes in the PR. +1. **单一职责的 PR。** 代理的 PR 必须只解决一个问题(一个功能、一个 bug 修复或一次依赖更新)。提交信息应使用 Angular 风格(如 `feat(api): add ...`)。 +2. **通过 Lint 与 CI 检查。** 每个 PR(包括代理创建的)在合并前必须通过 `pyright`、`ruff`、`pre-commit` 钩子和仓库 CI。PR 中应附带 CI 运行结果链接。 +3. **绝不可提交敏感信息。** 代理不得提交密钥、密码、token 或真实 `.env` 值。如果检测到可能的敏感信息,代理必须中止并通知指定的维护者。 +4. **API 位置限制。** 不得在 `app/router/v1` 或 `app/router/v2` 下添加新的公开端点,除非该端点在官方 v1/v2 规范中存在。自定义或实验性端点必须放在 `app/router/private/`。 +5. **保持公共契约稳定。** 未经批准的迁移计划,不得随意修改响应 schema、路由前缀或其他公共契约。若有变更,PR 中必须包含明确的兼容性说明。 --- -## Copilot / LLM Usage +## Copilot / LLM 使用 -> Consolidated guidance for using GitHub Copilot and other LLM-based helpers with this repository. +> 关于在本仓库中使用 GitHub Copilot 和其他基于 LLM 的辅助工具的统一指导。 -### Key project structure (what you should know) +### 关键项目结构(需要了解的内容) -- **App entry:** `main.py` — FastAPI application with lifespan startup/shutdown orchestration (fetchers, GeoIP, schedulers, cache and health checks, Redis messaging, stats, achievements). +- **应用入口:** `main.py` —— FastAPI 应用,包含启动/关闭生命周期管理(fetchers、GeoIP、调度器、缓存与健康检查、Redis 消息、统计、成就系统)。 -- **Routers:** `app/router/` contains route groups. Important routers exposed by the project include: +- **路由:** `app/router/` 包含所有路由组。主要的路由包括: + - `v1/`(v1 端点) + - `v2/`(v2 端点) + - `notification/` 路由(聊天/通知子系统) + - `auth.py`(认证/token 流程) + - `private/`(自定义或实验性的端点) - - `api_v1_router` (v1 endpoints) - - `api_v2_router` (v2 endpoints) - - `notification` routers (chat/notification subsystems) - - `auth_router` (authentication/token flows) - - `private_router` (internal or server-specific endpoints) + **规则:** `v1/` 和 `v2/` 必须与官方 API 对应。仅内部或实验端点应放在 `app/router/private/`。 - **Rules:** `v1/` and `v2/` must mirror the official APIs. Put internal-only or experimental endpoints under `app/router/private/`. +- **模型与数据库工具:** + - SQLModel/ORM 模型在 `app/database/`。 + - 非数据库模型在 `app/models/`。 + - 修改模型/schema 时必须生成 Alembic 迁移,并手动检查生成的 SQL 与索引。 -- **Models & DB helpers:** +- **服务层:** `app/service/` 保存领域逻辑(如缓存工具、通知/邮件逻辑)。复杂逻辑应放在 service,而不是路由处理器中。 - - SQLModel/ORM models live in `app/models/`. - - DB access helpers and table-specific helpers live in `app/database/`. - - For model/schema changes, draft an Alembic migration and manually review the generated SQL and indexes before applying. +- **任务:** `app/tasks/` 保存任务(定时任务、启动任务、关闭任务)。 + - 均在 `__init__.py` 进行导出。 + - 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。 + - 定时任务使用 APScheduler -- **Services:** `app/service/` holds domain logic (e.g., user ranking calculation, caching helpers, notification/email logic). Heavy logic belongs in services rather than in route handlers. +- **缓存与依赖:** 使用 `app/dependencies/` 提供的 Redis 依赖和缓存服务(遵循现有 key 命名约定,如 `user:{id}:...`)。 -- **Schedulers:** `app/scheduler/` contains scheduler starters; implement `start_*_scheduler()` and `stop_*_scheduler()` and register them in `main.py` lifespan handlers. +- **日志:** 使用 `app/log` 提供的日志工具。 -- **Caching & dependencies:** Use injected Redis dependencies from `app/dependencies/` and shared cache services (follow existing key naming conventions such as `user:{id}:...`). +### 实用工作流(提示模式) -- **Rust/native extensions:** `packages/msgpack_lazer_api` is a native MessagePack encoder/decoder. When changing native code, run `maturin develop -R` and validate compatibility with Python bindings. +- **添加 v2 端点(正确方式):** 在 `app/router/v2/` 下添加文件,导出路由,实现基于数据库与缓存依赖的异步处理函数。**不得**在 v1/v2 添加非官方端点。 +- **添加内部端点:** 放在 `app/router/private/`,保持处理器精简,将业务逻辑放入 `app/service/`。 +- **添加后台任务:** 将任务逻辑写在 `app/service/_job.py`(幂等、可重试)。调度器入口放在 `app/scheduler/_scheduler.py`,并在应用生命周期注册。 +- **数据库 schema 变更:** 修改 `app/models/` 中的 SQLModel 模型,运行 `alembic revision --autogenerate`,检查迁移并本地测试 `alembic upgrade head` 后再提交。 +- **缓存写入与响应:** 使用现有的 `UserResp` 模式和 `UserCacheService`;异步缓存写入应使用后台任务。 -### Practical playbooks (prompt patterns) +### 提示指导(给 LLM/Copilot 的输入) -- **Add a v2 endpoint (correct):** Add files under `app/router/v2/`, export the router, implement async path operations using DB and injected caching dependencies. Do **not** add non-official endpoints to v1/v2. -- **Add an internal endpoint:** Add under `app/router/private/`; keep route handlers thin and move business logic into `app/service/`. -- **Add a background job:** Put pure job logic in `app/service/_job.py` (idempotent, retry-safe). Add scheduler start/stop functions in `app/scheduler/_scheduler.py`, and register them in the app lifespan. -- **DB schema changes:** Update SQLModel models in `app/models/`, run `alembic revision --autogenerate`, inspect the migration, and validate locally with `alembic upgrade head` before committing. -- **Cache writes & responses:** Use existing `UserResp` patterns and `UserCacheService` where applicable; use background tasks for asynchronous cache writes. +- 明确文件位置和限制(如:`Add an async endpoint under app/router/private/... DO NOT add to app/router/v1 or v2`)。 +- 要求异步处理函数、依赖注入 DB/Redis、复用已有服务/工具、加上类型注解,并生成最小化 pytest 测试样例。 -### Prompt guidance (what to include for LLMs/Copilot) +### 约定与质量要求 -- Specify the exact file location and constraints (e.g. `Add an async endpoint under app/router/private/ ... DO NOT add to app/router/v1 or v2`). -- Ask for asynchronous handlers, dependency injection for DB/Redis, reuse of existing services/helpers, type annotations, and a minimal pytest skeleton. -- For native edits, require build instructions, ABI compatibility notes, and import validation steps. +- **使用 Annotated-style 依赖注入** 在路由处理器中。 +- **提交信息风格:** `type(scope): subject`(Angular 风格)。 +- **优先异步:** 路由必须为异步函数;避免阻塞事件循环。 +- **关注点分离:** 业务逻辑应放在 service,而不是路由中。 +- **错误处理:** 客户端错误用 `HTTPException`,服务端错误使用结构化日志。 +- **类型与 lint:** 在请求评审前,代码必须通过 `pyright` 和 `ruff` 检查。 +- **注释:** 避免过多注释,仅为晦涩逻辑添加简洁的“魔法注释”。 +- **日志:** 使用 `app.log` 提供的 `log` 函数获取 logger 实例。(服务、任务除外) -### Conventions & quality expectations - -- **Commit message style:** `type(scope): subject` (Angular-style). -- **Async-first:** Route handlers must be async; avoid blocking the event loop. -- **Separation of concerns:** Business logic should live in services, not inside route handlers. -- **Error handling:** Use `HTTPException` for client errors and structured logging for server-side issues. -- **Types & linting:** Aim for `pyright`-clean, `ruff`-clean code before requesting review. -- **Comments:** Avoid excessive inline comments. Add short, targeted comments to explain non-obvious or "magical" behavior. - -### Human reviewer checklist - -- Is the code async and non-blocking, with heavy logic in `app/service/`? -- Are DB and Redis dependencies injected via the project's dependency utilities? -- Are existing cache keys and services reused consistently? -- Are tests or test skeletons present and runnable? -- If models changed: is an Alembic migration drafted, reviewed, and applied locally? -- If native code changed: was `maturin develop -R` executed and validated? -- Do `pyright` and `ruff` pass locally? - -### Merge checklist - -- Run `uv sync` to install/update dependencies. -- Run `pre-commit` hooks and fix any failures. -- Run `pyright` and `ruff` locally and resolve issues. -- If native modules changed: run `maturin develop -R`. -- If DB migrations changed: run `alembic upgrade head` locally to validate. - -### Tooling reference +### 工具参考 ``` uv sync @@ -119,34 +102,41 @@ pre-commit install pre-commit run --all-files pyright ruff . -maturin develop -R # when native modules changed alembic revision --autogenerate -m "feat(db): ..." alembic upgrade head uvicorn main:app --reload --host 0.0.0.0 --port 8000 ``` -### PR scope guidance +### PR 范围指导 -- Keep PRs focused: one concern per PR (e.g., endpoint OR refactor, not both). -- Update README/config docs when adding new environment variables. -- If unsure about conventions, align with the closest existing service and leave a clarifying comment. +- 保持 PR 专注:一次只做一件事(如端点或重构,不要混合)。 +- 不确定时,请参考现有服务,并添加简短说明性注释。 + +### PR 审核规则 + +> GitHub Copilot PR review 可参考。 + +1. 如果 PR 修改了端点,简要说明端点的用途和预期行为。同时检查是否满足上述的 API 位置限制。 +2. 如果 PR 修改了数据库模型,必须包含 Alembic 迁移。检查迁移的 SQL 语句和索引是否合理。 +3. 修改的其他功能需要提供简短的说明。 +4. 提供性能优化的建议(见下文)。 --- -## Performance Tips +## 性能优化提示 -Below are practical, project-specific performance tips derived from this repository's architecture (FastAPI + SQLModel/SQLAlchemy, Redis caching, background schedulers, and a Rust-native messagepack module). +以下为结合本仓库架构(FastAPI + SQLModel/SQLAlchemy、Redis 缓存、后台调度器)总结的性能优化建议: -### Database +### 数据库 -- **Select only required fields.** Fetch only the columns you need using `select(Model.col1, Model.col2)` instead of `select(Model)`. +- **仅选择必要字段。** 使用 `select(Model.col1, Model.col2)`,避免 `select(Model)`。 ```py stmt = select(User.id, User.username).where(User.active == True) rows = await session.execute(stmt) ``` -- **Use **``** for existence checks.** This avoids loading full rows: +- **使用 `select(exists())` 检查存在性。** 避免加载整行: ```py from sqlalchemy import select, exists @@ -154,34 +144,21 @@ exists_stmt = select(exists().where(User.id == some_id)) found = await session.scalar(exists_stmt) ``` -- **Avoid N+1 queries.** Use relationship loading strategies (`selectinload`, `joinedload`) when you need related objects. +- **避免 N+1 查询。** 需要关联对象时用 `selectinload`、`joinedload`。 -- **Batch operations.** For inserts/updates, use bulk or batched statements inside a single transaction rather than many small transactions. +- **批量操作。** 插入/更新时应批量执行,并放在一个事务中,而不是多个小事务。 -- **Indexes & EXPLAIN.** Add indexes on frequently filtered columns and use `EXPLAIN ANALYZE` to inspect slow queries. -- **Cursor / keyset pagination.** Prefer keyset pagination for large result sets instead of `OFFSET`/`LIMIT` to avoid high-cost scans. +### 耗时任务 -### Caching & Redis +- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks) +- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。 -- **Cache hot reads.** Use `UserCacheService` to cache heavy or frequently-requested responses and store compact serialized forms (e.g., messagepack via the native module). +--- -- **Use pipelines and multi/exec.** When performing multiple Redis commands, pipeline them to reduce roundtrips. +## 部分 LLM 的额外要求 -- **Set appropriate TTLs.** Avoid never-expiring keys; choose TTLs that balance freshness and read amplification. +### Claude Code -- **Prevent cache stampedes.** Use early recompute with jitter or distributed locks (Redis `SET NX` or a small lock library) to avoid many processes rebuilding the same cache. +- 禁止创建额外的测试脚本。 -- **Atomic operations with Lua.** For complex multi-step Redis changes, consider a Lua script to keep operations atomic and fast. - -### Background & Long-running Tasks - -- **BackgroundTasks for lightweight work.** FastAPI's `BackgroundTasks` is fine for quick follow-up work (send email, async cache write). For heavy or long tasks, use a scheduler/worker (e.g., a dedicated async worker or job queue). - -- **Use schedulers or workers for heavy jobs.** For expensive recalculations, use the repository's `app/scheduler/` pattern or an external worker system. Keep request handlers responsive — return quickly and delegate. - -- **Throttling & batching.** When processing many items, batch them and apply concurrency limits (semaphore) to avoid saturating DB/Redis. - -### API & Response Performance - -- **Compress large payloads.** Enable gzip/deflate for large JSON responses diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3f6e3e2..d6ee47d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,9 +54,123 @@ dotnet run --project osu.Server.Spectator --urls "http://0.0.0.0:8086" uv sync ``` -## 代码质量和代码检查 +## 开发规范 -我们使用 `pre-commit` 在提交之前执行代码质量标准。这确保所有代码都通过 `ruff`(用于代码检查和格式化)和 `pyright`(用于类型检查)的检查。 +### 项目结构 + +以下是项目主要目录和文件的结构说明: + +- `main.py`: FastAPI 应用的主入口点,负责初始化和启动服务器。 +- `pyproject.toml`: 项目配置文件,用于管理依赖项 (uv)、代码格式化 (Ruff) 和类型检查 (Pyright)。 +- `alembic.ini`: Alembic 数据库迁移工具的配置文件。 +- `app/`: 存放所有核心应用代码。 + - `router/`: 包含所有 API 端点的定义,根据 API 版本和功能进行组织。 + - `service/`: 存放核心业务逻辑,例如用户排名计算、每日挑战处理等。 + - `database/`: 定义数据库模型 (SQLModel) 和会话管理。 + - `models/`: 定义非数据库模型和其他模型。 + - `tasks/`: 包含由 APScheduler 调度的后台任务和启动/关闭任务。 + - `dependencies/`: 管理 FastAPI 的依赖项注入。 + - `achievements/`: 存放与成就相关的逻辑。 + - `storage/`: 存储服务代码。 + - `fetcher/`: 用于从外部服务(如 osu! 官网)获取数据的模块。 + - `middleware/`: 定义中间件,例如会话验证。 + - `helpers/`: 存放辅助函数和工具类。 + - `config.py`: 应用配置,使用 pydantic-settings 管理。 + - `calculator.py`: 存放所有的计算逻辑,例如 pp 和等级。 + - `log.py`: 日志记录模块,提供统一的日志接口。 + - `const.py`: 定义常量。 + - `path.py`: 定义跨文件使用的常量。 +- `migrations/`: 存放 Alembic 生成的数据库迁移脚本。 +- `static/`: 存放静态文件,如 `mods.json`。 + +### 数据库模型定义 + +所有的数据库模型定义在 `app.database` 里,并且在 `__init__.py` 中导出。 + +如果这个模型的数据表结构和响应不完全相同,遵循 `Base` - `Table` - `Resp` 结构: + +```python +class ModelBase(SQLModel): + # 定义共有内容 + ... + + +class Model(ModelBase, table=True): + # 定义数据库表内容 + ... + + +class ModelResp(ModelBase): + # 定义响应内容 + ... + + @classmethod + def from_db(cls, db: Model) -> "ModelResp": + # 从数据库模型转换 + ... +``` + +数据库模块名应与表名相同,定义了多个模型的除外。 + +如果你需要使用 Session,使用 `app.dependencies.database` 提供的 `with_db`,注意手动使用 `COMMIT`。 + +```python +from app.dependencies.database import with_db + +async with with_db() as session: + ... +``` + +### Redis + +根据你需要的用途选择对应的 Redis 客户端。如果你的用途较为复杂或趋向一个较大的系统,考虑再创建一个 Redis 连接。 + +- `redis_client` (db0):标准用途,存储字符串、哈希等常规数据。 +- `redis_message_client` (db1):用于消息缓存,存储聊天记录等。 +- `redis_binary_client` (db2):用于存储二进制数据,如音频文件等。 +- `redis_rate_limit_client` (db3):仅用于 FastAPI-Limiter 使用。 + +### API Router + +所有的 API Router 定义在 `app.router` 里: + +- `app/router/v2` 存放所有 osu! v2 API 实现,**不允许添加额外的,原 v2 API 不存在的 Endpoint** +- `app/router/notification` **存放所有 osu! v2 API 聊天、通知和 BanchoBot 的实现,不允许添加额外的,原 v2 API 不存在的 Endpoint** +- `app/router/v1` 存放所有 osu! v1 API 实现,**不允许添加额外的,原 v1 API 不存在的 Endpoint** +- `app/router/auth.py` 存放账户鉴权/登录的 API +- `app/router/private` 存放服务器自定义 API (g0v0 API),供其他服务使用 + +任何 Router 需要满足: + +- 使用 Annotated-style 的依赖注入 +- 对于已经存在的依赖注入如 Database 和 Redis,使用 `app.dependencies` 中的实现 +- 需要拥有文档 +- 如果返回需要资源代理,使用 `app.helpers.asset_proxy_helper` 的 `asset_proxy_response` 装饰器。 +- 如果需要记录日志,请使用 `app.log` 提供的 `log` 函数获取一个 logger 实例 + +### Service + +所有的核心业务逻辑放在 `app.service` 里: + +- 业务逻辑需要要以类实现 +- 日志只需要使用 `app.log` 中的 `logger` 即可。服务器会对 Service 的日志进行包装。 + +### 定时任务/启动任务/关闭任务 + +均定义在 `app.tasks` 里。 + +- 均在 `__init__.py` 进行导出 +- 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。 +- 定时任务使用 APScheduler + +### 耗时任务 + +- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks) +- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。 + +### 代码质量和代码检查 + +使用 `pre-commit` 在提交之前执行代码质量标准。这确保所有代码都通过 `ruff`(用于代码检查和格式化)和 `pyright`(用于类型检查)的检查。 ### 设置 @@ -70,19 +184,9 @@ pre-commit install pre-commit 不提供 pyright 的 hook,您需要手动运行 `pyright` 检查类型错误。 -## 提交信息指南 +### 提交信息指南 -我们遵循 [AngularJS 提交规范](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commit-message-format) 来编写提交信息。这使得在查看项目历史记录时,信息更加可读且易于理解。 - -每条提交信息由 **标题**、**主体**和 **页脚** 三部分组成。 - -``` -(): - - - -