refactor(task): move schedulers and startup/shutdown task into tasks directory
This commit is contained in:
28
app/tasks/__init__.py
Normal file
28
app/tasks/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# ruff: noqa: F401
|
||||
from __future__ import annotations
|
||||
|
||||
from . import (
|
||||
beatmapset_update,
|
||||
database_cleanup,
|
||||
recalculate_banned_beatmap,
|
||||
recalculate_failed_score,
|
||||
)
|
||||
from .cache import start_cache_tasks, stop_cache_tasks
|
||||
from .calculate_all_user_rank import calculate_user_rank
|
||||
from .create_banchobot import create_banchobot
|
||||
from .daily_challenge import daily_challenge_job, process_daily_challenge_top
|
||||
from .geoip import init_geoip
|
||||
from .load_achievements import load_achievements
|
||||
from .osu_rx_statistics import create_rx_statistics
|
||||
|
||||
__all__ = [
|
||||
"calculate_user_rank",
|
||||
"create_banchobot",
|
||||
"create_rx_statistics",
|
||||
"daily_challenge_job",
|
||||
"init_geoip",
|
||||
"load_achievements",
|
||||
"process_daily_challenge_top",
|
||||
"start_cache_tasks",
|
||||
"stop_cache_tasks",
|
||||
]
|
||||
21
app/tasks/beatmapset_update.py
Normal file
21
app/tasks/beatmapset_update.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.service.beatmapset_update_service import service
|
||||
from app.utils import bg_tasks
|
||||
|
||||
SCHEDULER_INTERVAL_MINUTES = 2
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job(
|
||||
"interval",
|
||||
id="update_beatmaps",
|
||||
minutes=SCHEDULER_INTERVAL_MINUTES,
|
||||
next_run_time=datetime.now() + timedelta(minutes=1),
|
||||
)
|
||||
async def beatmapset_update_job():
|
||||
if service is not None:
|
||||
bg_tasks.add_task(service.add_missing_beatmapsets)
|
||||
await service._update_beatmaps()
|
||||
254
app/tasks/cache.py
Normal file
254
app/tasks/cache.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""缓存相关的 APScheduler 任务入口。"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import UTC, timedelta
|
||||
from typing import Final
|
||||
|
||||
from app.config import settings
|
||||
from app.database.score import Score
|
||||
from app.dependencies.database import get_redis
|
||||
from app.dependencies.fetcher import get_fetcher
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
from app.service.ranking_cache_service import schedule_ranking_refresh_task
|
||||
from app.service.user_cache_service import get_user_cache_service
|
||||
from app.utils import utcnow
|
||||
|
||||
from apscheduler.jobstores.base import JobLookupError
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from sqlmodel import col, func, select
|
||||
|
||||
CACHE_JOB_IDS: Final[dict[str, str]] = {
|
||||
"beatmap_warmup": "cache:beatmap:warmup",
|
||||
"ranking_refresh": "cache:ranking:refresh",
|
||||
"user_preload": "cache:user:preload",
|
||||
"user_cleanup": "cache:user:cleanup",
|
||||
}
|
||||
|
||||
|
||||
async def warmup_cache() -> None:
|
||||
"""执行缓存预热"""
|
||||
try:
|
||||
logger.info("Starting beatmap cache warmup...")
|
||||
|
||||
fetcher = await get_fetcher()
|
||||
redis = get_redis()
|
||||
|
||||
await fetcher.warmup_homepage_cache(redis)
|
||||
|
||||
logger.info("Beatmap cache warmup completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Beatmap cache warmup failed: %s", e)
|
||||
|
||||
|
||||
async def refresh_ranking_cache() -> None:
|
||||
"""刷新排行榜缓存"""
|
||||
try:
|
||||
logger.info("Starting ranking cache refresh...")
|
||||
|
||||
redis = get_redis()
|
||||
|
||||
from app.dependencies.database import with_db
|
||||
|
||||
async with with_db() as session:
|
||||
await schedule_ranking_refresh_task(session, redis)
|
||||
|
||||
logger.info("Ranking cache refresh completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Ranking cache refresh failed: %s", e)
|
||||
|
||||
|
||||
async def schedule_user_cache_preload_task() -> None:
|
||||
"""定时用户缓存预加载任务"""
|
||||
enable_user_cache_preload = getattr(settings, "enable_user_cache_preload", True)
|
||||
if not enable_user_cache_preload:
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info("Starting user cache preload task...")
|
||||
|
||||
redis = get_redis()
|
||||
cache_service = get_user_cache_service(redis)
|
||||
|
||||
from app.dependencies.database import with_db
|
||||
|
||||
async with with_db() as session:
|
||||
recent_time = utcnow() - timedelta(hours=24)
|
||||
|
||||
score_count = func.count().label("score_count")
|
||||
active_user_ids = (
|
||||
await session.exec(
|
||||
select(Score.user_id, score_count)
|
||||
.where(col(Score.ended_at) >= recent_time)
|
||||
.group_by(col(Score.user_id))
|
||||
.order_by(score_count.desc())
|
||||
.limit(settings.user_cache_max_preload_users)
|
||||
)
|
||||
).all()
|
||||
|
||||
if active_user_ids:
|
||||
user_ids = [row[0] for row in active_user_ids]
|
||||
await cache_service.preload_user_cache(session, user_ids)
|
||||
logger.info("Preloaded cache for %s active users", len(user_ids))
|
||||
else:
|
||||
logger.info("No active users found for cache preload")
|
||||
|
||||
logger.info("User cache preload task completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("User cache preload task failed: %s", e)
|
||||
|
||||
|
||||
async def schedule_user_cache_warmup_task() -> None:
|
||||
"""定时用户缓存预热任务 - 预加载排行榜前100用户"""
|
||||
try:
|
||||
logger.info("Starting user cache warmup task...")
|
||||
|
||||
redis = get_redis()
|
||||
cache_service = get_user_cache_service(redis)
|
||||
|
||||
from app.dependencies.database import with_db
|
||||
|
||||
async with with_db() as session:
|
||||
from app.database.statistics import UserStatistics
|
||||
from app.models.score import GameMode
|
||||
|
||||
for mode in GameMode:
|
||||
try:
|
||||
top_users = (
|
||||
await session.exec(
|
||||
select(UserStatistics.user_id)
|
||||
.where(UserStatistics.mode == mode)
|
||||
.order_by(col(UserStatistics.pp).desc())
|
||||
.limit(100)
|
||||
)
|
||||
).all()
|
||||
|
||||
if top_users:
|
||||
user_ids = list(top_users)
|
||||
await cache_service.preload_user_cache(session, user_ids)
|
||||
logger.info("Warmed cache for top 100 users in %s", mode)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to warm cache for %s: %s", mode, e)
|
||||
continue
|
||||
|
||||
logger.info("User cache warmup task completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("User cache warmup task failed: %s", e)
|
||||
|
||||
|
||||
async def schedule_user_cache_cleanup_task() -> None:
|
||||
"""定时用户缓存清理任务"""
|
||||
try:
|
||||
logger.info("Starting user cache cleanup task...")
|
||||
|
||||
redis = get_redis()
|
||||
|
||||
cache_service = get_user_cache_service(redis)
|
||||
stats = await cache_service.get_cache_stats()
|
||||
|
||||
logger.info("User cache stats: %s", stats)
|
||||
logger.info("User cache cleanup task completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("User cache cleanup task failed: %s", e)
|
||||
|
||||
|
||||
async def warmup_user_cache() -> None:
|
||||
"""用户缓存预热"""
|
||||
try:
|
||||
await schedule_user_cache_warmup_task()
|
||||
except Exception as e:
|
||||
logger.error("User cache warmup failed: %s", e)
|
||||
|
||||
|
||||
async def preload_user_cache() -> None:
|
||||
"""用户缓存预加载"""
|
||||
try:
|
||||
await schedule_user_cache_preload_task()
|
||||
except Exception as e:
|
||||
logger.error("User cache preload failed: %s", e)
|
||||
|
||||
|
||||
async def cleanup_user_cache() -> None:
|
||||
"""用户缓存清理"""
|
||||
try:
|
||||
await schedule_user_cache_cleanup_task()
|
||||
except Exception as e:
|
||||
logger.error("User cache cleanup failed: %s", e)
|
||||
|
||||
|
||||
def register_cache_jobs() -> None:
|
||||
"""注册缓存相关 APScheduler 任务"""
|
||||
scheduler = get_scheduler()
|
||||
|
||||
scheduler.add_job(
|
||||
warmup_cache,
|
||||
trigger=IntervalTrigger(minutes=30, timezone=UTC),
|
||||
id=CACHE_JOB_IDS["beatmap_warmup"],
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
scheduler.add_job(
|
||||
refresh_ranking_cache,
|
||||
trigger=IntervalTrigger(
|
||||
minutes=settings.ranking_cache_refresh_interval_minutes,
|
||||
timezone=UTC,
|
||||
),
|
||||
id=CACHE_JOB_IDS["ranking_refresh"],
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
scheduler.add_job(
|
||||
preload_user_cache,
|
||||
trigger=IntervalTrigger(minutes=15, timezone=UTC),
|
||||
id=CACHE_JOB_IDS["user_preload"],
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
scheduler.add_job(
|
||||
cleanup_user_cache,
|
||||
trigger=IntervalTrigger(hours=1, timezone=UTC),
|
||||
id=CACHE_JOB_IDS["user_cleanup"],
|
||||
replace_existing=True,
|
||||
coalesce=True,
|
||||
max_instances=1,
|
||||
misfire_grace_time=300,
|
||||
)
|
||||
|
||||
logger.info("Registered cache APScheduler jobs")
|
||||
|
||||
|
||||
async def start_cache_tasks() -> None:
|
||||
"""注册 APScheduler 任务并执行启动时任务"""
|
||||
register_cache_jobs()
|
||||
logger.info("Cache APScheduler jobs registered; running initial tasks")
|
||||
|
||||
|
||||
async def stop_cache_tasks() -> None:
|
||||
"""移除 APScheduler 任务"""
|
||||
scheduler = get_scheduler()
|
||||
for job_id in CACHE_JOB_IDS.values():
|
||||
try:
|
||||
scheduler.remove_job(job_id)
|
||||
except JobLookupError:
|
||||
continue
|
||||
|
||||
logger.info("Cache APScheduler jobs removed")
|
||||
100
app/tasks/calculate_all_user_rank.py
Normal file
100
app/tasks/calculate_all_user_rank.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from app.database import RankHistory, UserStatistics
|
||||
from app.database.rank_history import RankTop
|
||||
from app.dependencies.database import with_db
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
from app.models.score import GameMode
|
||||
from app.utils import utcnow
|
||||
|
||||
from sqlmodel import col, exists, select, update
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job("cron", hour=0, minute=0, second=0, id="calculate_user_rank")
|
||||
async def calculate_user_rank(is_today: bool = False):
|
||||
today = utcnow().date()
|
||||
target_date = today if is_today else today - timedelta(days=1)
|
||||
logger.info("Starting user rank calculation for {}", target_date)
|
||||
async with with_db() as session:
|
||||
for gamemode in GameMode:
|
||||
logger.info("Calculating ranks for {} on {}", gamemode.name, target_date)
|
||||
users = await session.exec(
|
||||
select(UserStatistics)
|
||||
.where(
|
||||
UserStatistics.mode == gamemode,
|
||||
UserStatistics.pp > 0,
|
||||
col(UserStatistics.is_ranked).is_(True),
|
||||
)
|
||||
.order_by(
|
||||
col(UserStatistics.pp).desc(),
|
||||
col(UserStatistics.total_score).desc(),
|
||||
)
|
||||
)
|
||||
rank = 1
|
||||
processed_users = 0
|
||||
for user in users:
|
||||
is_exist = (
|
||||
await session.exec(
|
||||
select(exists()).where(
|
||||
RankHistory.user_id == user.user_id,
|
||||
RankHistory.mode == gamemode,
|
||||
RankHistory.date == target_date,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not is_exist:
|
||||
rank_history = RankHistory(
|
||||
user_id=user.user_id,
|
||||
mode=gamemode,
|
||||
rank=rank,
|
||||
date=today,
|
||||
)
|
||||
session.add(rank_history)
|
||||
else:
|
||||
await session.execute(
|
||||
update(RankHistory)
|
||||
.where(
|
||||
col(RankHistory.user_id) == user.user_id,
|
||||
col(RankHistory.mode) == gamemode,
|
||||
col(RankHistory.date) == target_date,
|
||||
)
|
||||
.values(rank=rank)
|
||||
)
|
||||
|
||||
rank_top = (
|
||||
await session.exec(
|
||||
select(RankTop).where(
|
||||
RankTop.user_id == user.user_id,
|
||||
RankTop.mode == gamemode,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not rank_top:
|
||||
rank_top = RankTop(
|
||||
user_id=user.user_id,
|
||||
mode=gamemode,
|
||||
rank=rank,
|
||||
date=today,
|
||||
)
|
||||
session.add(rank_top)
|
||||
else:
|
||||
if rank_top.rank > rank:
|
||||
rank_top.rank = rank
|
||||
rank_top.date = today
|
||||
|
||||
rank += 1
|
||||
processed_users += 1
|
||||
await session.commit()
|
||||
if processed_users > 0:
|
||||
logger.info(
|
||||
"Updated ranks for {} on {} ({} users)",
|
||||
gamemode.name,
|
||||
target_date,
|
||||
processed_users,
|
||||
)
|
||||
else:
|
||||
logger.info("No users found for {} on {}", gamemode.name, target_date)
|
||||
logger.success("User rank calculation completed for {}", target_date)
|
||||
31
app/tasks/create_banchobot.py
Normal file
31
app/tasks/create_banchobot.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.const import BANCHOBOT_ID
|
||||
from app.database.statistics import UserStatistics
|
||||
from app.database.user import User
|
||||
from app.dependencies.database import with_db
|
||||
from app.log import logger
|
||||
from app.models.score import GameMode
|
||||
|
||||
from sqlmodel import exists, select
|
||||
|
||||
|
||||
async def create_banchobot():
|
||||
async with with_db() as session:
|
||||
is_exist = (await session.exec(select(exists()).where(User.id == BANCHOBOT_ID))).first()
|
||||
if not is_exist:
|
||||
banchobot = User(
|
||||
username="BanchoBot",
|
||||
email="banchobot@ppy.sh",
|
||||
is_bot=True,
|
||||
pw_bcrypt="0",
|
||||
id=BANCHOBOT_ID,
|
||||
avatar_url="https://a.ppy.sh/3",
|
||||
country_code="SH",
|
||||
website="https://twitter.com/banchoboat",
|
||||
)
|
||||
session.add(banchobot)
|
||||
statistics = UserStatistics(user_id=BANCHOBOT_ID, mode=GameMode.OSU)
|
||||
session.add(statistics)
|
||||
await session.commit()
|
||||
logger.success("BanchoBot user created")
|
||||
181
app/tasks/daily_challenge.py
Normal file
181
app/tasks/daily_challenge.py
Normal file
@@ -0,0 +1,181 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, timedelta
|
||||
import json
|
||||
from math import ceil
|
||||
|
||||
from app.const import BANCHOBOT_ID
|
||||
from app.database.daily_challenge import DailyChallengeStats
|
||||
from app.database.playlist_best_score import PlaylistBestScore
|
||||
from app.database.playlists import Playlist
|
||||
from app.database.room import Room
|
||||
from app.database.score import Score
|
||||
from app.database.user import User
|
||||
from app.dependencies.database import get_redis, with_db
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
from app.models.metadata_hub import DailyChallengeInfo
|
||||
from app.models.mods import APIMod, get_available_mods
|
||||
from app.models.room import RoomCategory
|
||||
from app.service.room import create_playlist_room
|
||||
from app.utils import are_same_weeks, utcnow
|
||||
|
||||
from sqlmodel import col, select
|
||||
|
||||
|
||||
async def create_daily_challenge_room(
|
||||
beatmap: int,
|
||||
ruleset_id: int,
|
||||
duration: int,
|
||||
required_mods: list[APIMod] = [],
|
||||
allowed_mods: list[APIMod] = [],
|
||||
) -> Room:
|
||||
async with with_db() as session:
|
||||
today = utcnow().date()
|
||||
return await create_playlist_room(
|
||||
session=session,
|
||||
name=str(today),
|
||||
host_id=BANCHOBOT_ID,
|
||||
playlist=[
|
||||
Playlist(
|
||||
id=0,
|
||||
room_id=0,
|
||||
owner_id=BANCHOBOT_ID,
|
||||
ruleset_id=ruleset_id,
|
||||
beatmap_id=beatmap,
|
||||
required_mods=required_mods,
|
||||
allowed_mods=allowed_mods,
|
||||
)
|
||||
],
|
||||
category=RoomCategory.DAILY_CHALLENGE,
|
||||
duration=duration,
|
||||
)
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job("cron", hour=0, minute=0, second=0, id="daily_challenge")
|
||||
async def daily_challenge_job():
|
||||
from app.signalr.hub import MetadataHubs
|
||||
|
||||
now = utcnow()
|
||||
redis = get_redis()
|
||||
key = f"daily_challenge:{now.date()}"
|
||||
if not await redis.exists(key):
|
||||
return
|
||||
async with with_db() as session:
|
||||
room = (
|
||||
await session.exec(
|
||||
select(Room).where(
|
||||
Room.category == RoomCategory.DAILY_CHALLENGE,
|
||||
col(Room.ends_at) > utcnow(),
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if room:
|
||||
return
|
||||
|
||||
try:
|
||||
beatmap = await redis.hget(key, "beatmap") # pyright: ignore[reportGeneralTypeIssues]
|
||||
ruleset_id = await redis.hget(key, "ruleset_id") # pyright: ignore[reportGeneralTypeIssues]
|
||||
required_mods = await redis.hget(key, "required_mods") # pyright: ignore[reportGeneralTypeIssues]
|
||||
allowed_mods = await redis.hget(key, "allowed_mods") # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
if beatmap is None or ruleset_id is None:
|
||||
logger.warning(
|
||||
f"[DailyChallenge] Missing required data for daily challenge {now}. Will try again in 5 minutes."
|
||||
)
|
||||
get_scheduler().add_job(
|
||||
daily_challenge_job,
|
||||
"date",
|
||||
run_date=utcnow() + timedelta(minutes=5),
|
||||
)
|
||||
return
|
||||
|
||||
beatmap_int = int(beatmap)
|
||||
ruleset_id_int = int(ruleset_id)
|
||||
|
||||
required_mods_list = []
|
||||
allowed_mods_list = []
|
||||
if required_mods:
|
||||
required_mods_list = json.loads(required_mods)
|
||||
if allowed_mods:
|
||||
allowed_mods_list = json.loads(allowed_mods)
|
||||
else:
|
||||
allowed_mods_list = get_available_mods(ruleset_id_int, required_mods_list)
|
||||
|
||||
next_day = (now + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
room = await create_daily_challenge_room(
|
||||
beatmap=beatmap_int,
|
||||
ruleset_id=ruleset_id_int,
|
||||
required_mods=required_mods_list,
|
||||
allowed_mods=allowed_mods_list,
|
||||
duration=int((next_day - now - timedelta(minutes=2)).total_seconds() / 60),
|
||||
)
|
||||
await MetadataHubs.broadcast_call("DailyChallengeUpdated", DailyChallengeInfo(room_id=room.id))
|
||||
logger.success(f"[DailyChallenge] Added today's daily challenge: {beatmap=}, {ruleset_id=}, {required_mods=}")
|
||||
return
|
||||
except (ValueError, json.JSONDecodeError) as e:
|
||||
logger.warning(f"[DailyChallenge] Error processing daily challenge data: {e} Will try again in 5 minutes.")
|
||||
except Exception as e:
|
||||
logger.exception(f"[DailyChallenge] Unexpected error in daily challenge job: {e} Will try again in 5 minutes.")
|
||||
get_scheduler().add_job(
|
||||
daily_challenge_job,
|
||||
"date",
|
||||
run_date=utcnow() + timedelta(minutes=5),
|
||||
)
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job("cron", hour=0, minute=1, second=0, id="daily_challenge_last_top")
|
||||
async def process_daily_challenge_top():
|
||||
async with with_db() as session:
|
||||
now = utcnow()
|
||||
room = (
|
||||
await session.exec(
|
||||
select(Room).where(
|
||||
Room.category == RoomCategory.DAILY_CHALLENGE,
|
||||
col(Room.ends_at) > now - timedelta(days=1),
|
||||
col(Room.ends_at) < now,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
participated_users = []
|
||||
if room is not None:
|
||||
scores = (
|
||||
await session.exec(
|
||||
select(PlaylistBestScore)
|
||||
.where(
|
||||
PlaylistBestScore.room_id == room.id,
|
||||
PlaylistBestScore.playlist_id == 0,
|
||||
col(PlaylistBestScore.score).has(col(Score.passed).is_(True)),
|
||||
)
|
||||
.order_by(col(PlaylistBestScore.total_score).desc())
|
||||
)
|
||||
).all()
|
||||
total_score_count = len(scores)
|
||||
s = []
|
||||
for i, score in enumerate(scores):
|
||||
stats = await session.get(DailyChallengeStats, score.user_id)
|
||||
if stats is None: # not execute
|
||||
continue
|
||||
if stats.last_update is None or stats.last_update.replace(tzinfo=UTC).date() != now.date():
|
||||
if total_score_count < 10 or ceil(i + 1 / total_score_count) <= 0.1:
|
||||
stats.top_10p_placements += 1
|
||||
if total_score_count < 2 or ceil(i + 1 / total_score_count) <= 0.5:
|
||||
stats.top_50p_placements += 1
|
||||
s.append(s)
|
||||
participated_users.append(score.user_id)
|
||||
stats.last_update = now
|
||||
await session.commit()
|
||||
del s
|
||||
|
||||
user_ids = (await session.exec(select(User.id).where(col(User.id).not_in(participated_users)))).all()
|
||||
for id in user_ids:
|
||||
stats = await session.get(DailyChallengeStats, id)
|
||||
if stats is None: # not execute
|
||||
continue
|
||||
stats.daily_streak_current = 0
|
||||
if stats.last_weekly_streak and not are_same_weeks(
|
||||
stats.last_weekly_streak.replace(tzinfo=UTC), now - timedelta(days=7)
|
||||
):
|
||||
stats.weekly_streak_current = 0
|
||||
stats.last_update = now
|
||||
await session.commit()
|
||||
21
app/tasks/database_cleanup.py
Normal file
21
app/tasks/database_cleanup.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.dependencies.database import with_db
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
from app.service.database_cleanup_service import DatabaseCleanupService
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job(
|
||||
"interval",
|
||||
id="cleanup_database",
|
||||
hours=1,
|
||||
)
|
||||
async def scheduled_cleanup_job():
|
||||
async with with_db() as session:
|
||||
logger.info("Starting database cleanup...")
|
||||
results = await DatabaseCleanupService.run_full_cleanup(session)
|
||||
total = sum(results.values())
|
||||
if total > 0:
|
||||
logger.success(f"Cleanup completed, total records cleaned: {total}")
|
||||
return results
|
||||
57
app/tasks/geoip.py
Normal file
57
app/tasks/geoip.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""
|
||||
Scheduled Update Service
|
||||
Periodically update the MaxMind GeoIP database
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from app.config import settings
|
||||
from app.dependencies.geoip import get_geoip_helper
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job(
|
||||
"cron",
|
||||
day_of_week=settings.geoip_update_day,
|
||||
hour=settings.geoip_update_hour,
|
||||
minute=0,
|
||||
id="geoip_weekly_update",
|
||||
name="Weekly GeoIP database update",
|
||||
)
|
||||
async def update_geoip_database():
|
||||
"""
|
||||
Asynchronous task to update the GeoIP database
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting scheduled GeoIP database update...")
|
||||
geoip = get_geoip_helper()
|
||||
|
||||
# Run the synchronous update method in a background thread
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(None, lambda: geoip.update(force=False))
|
||||
|
||||
logger.info("Scheduled GeoIP database update completed successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled GeoIP database update failed: {e}")
|
||||
|
||||
|
||||
async def init_geoip():
|
||||
"""
|
||||
Asynchronously initialize the GeoIP database
|
||||
"""
|
||||
try:
|
||||
geoip = get_geoip_helper()
|
||||
logger.info("Initializing GeoIP database...")
|
||||
|
||||
# Run the synchronous update method in a background thread
|
||||
# force=False means only download if files don't exist or are expired
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(None, lambda: geoip.update(force=False))
|
||||
|
||||
logger.info("GeoIP database initialization completed")
|
||||
except Exception as e:
|
||||
logger.error(f"GeoIP database initialization failed: {e}")
|
||||
# Do not raise an exception to avoid blocking application startup
|
||||
18
app/tasks/load_achievements.py
Normal file
18
app/tasks/load_achievements.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
|
||||
from app.log import logger
|
||||
from app.models.achievement import MEDALS, Medals
|
||||
from app.path import ACHIEVEMENTS_DIR
|
||||
|
||||
|
||||
def load_achievements() -> Medals:
|
||||
for module in ACHIEVEMENTS_DIR.iterdir():
|
||||
if module.is_file() and module.suffix == ".py":
|
||||
module_name = module.stem
|
||||
module_achievements = importlib.import_module(f"app.achievements.{module_name}")
|
||||
medals = getattr(module_achievements, "MEDALS", {})
|
||||
MEDALS.update(medals)
|
||||
logger.success(f"Successfully loaded {len(medals)} achievements from {module_name}.py")
|
||||
return MEDALS
|
||||
63
app/tasks/osu_rx_statistics.py
Normal file
63
app/tasks/osu_rx_statistics.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.config import settings
|
||||
from app.const import BANCHOBOT_ID
|
||||
from app.database.statistics import UserStatistics
|
||||
from app.database.user import User
|
||||
from app.dependencies.database import with_db
|
||||
from app.log import logger
|
||||
from app.models.score import GameMode
|
||||
|
||||
from sqlalchemy import exists
|
||||
from sqlmodel import select
|
||||
|
||||
|
||||
async def create_rx_statistics():
|
||||
async with with_db() as session:
|
||||
users = (await session.exec(select(User.id))).all()
|
||||
total_users = len(users)
|
||||
logger.info("Ensuring RX/AP statistics exist for %s users", total_users)
|
||||
rx_created = 0
|
||||
ap_created = 0
|
||||
for i in users:
|
||||
if i == BANCHOBOT_ID:
|
||||
continue
|
||||
|
||||
if settings.enable_rx:
|
||||
for mode in (
|
||||
GameMode.OSURX,
|
||||
GameMode.TAIKORX,
|
||||
GameMode.FRUITSRX,
|
||||
):
|
||||
is_exist = (
|
||||
await session.exec(
|
||||
select(exists()).where(
|
||||
UserStatistics.user_id == i,
|
||||
UserStatistics.mode == mode,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not is_exist:
|
||||
statistics_rx = UserStatistics(mode=mode, user_id=i)
|
||||
session.add(statistics_rx)
|
||||
rx_created += 1
|
||||
if settings.enable_ap:
|
||||
is_exist = (
|
||||
await session.exec(
|
||||
select(exists()).where(
|
||||
UserStatistics.user_id == i,
|
||||
UserStatistics.mode == GameMode.OSUAP,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not is_exist:
|
||||
statistics_ap = UserStatistics(mode=GameMode.OSUAP, user_id=i)
|
||||
session.add(statistics_ap)
|
||||
ap_created += 1
|
||||
await session.commit()
|
||||
if rx_created or ap_created:
|
||||
logger.success(
|
||||
"Created %s RX statistics rows and %s AP statistics rows during backfill",
|
||||
rx_created,
|
||||
ap_created,
|
||||
)
|
||||
130
app/tasks/recalculate_banned_beatmap.py
Normal file
130
app/tasks/recalculate_banned_beatmap.py
Normal file
@@ -0,0 +1,130 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from app.calculator import calculate_pp
|
||||
from app.config import settings
|
||||
from app.database.beatmap import BannedBeatmaps, Beatmap
|
||||
from app.database.best_scores import PPBestScore
|
||||
from app.database.score import Score, calculate_user_pp
|
||||
from app.database.statistics import UserStatistics
|
||||
from app.dependencies.database import get_redis, with_db
|
||||
from app.dependencies.fetcher import get_fetcher
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
from app.models.mods import mods_can_get_pp
|
||||
|
||||
from sqlmodel import col, delete, select
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job("interval", id="recalculate_banned_beatmap", hours=1)
|
||||
async def recalculate_banned_beatmap():
|
||||
redis = get_redis()
|
||||
last_banned_beatmaps = set()
|
||||
last_banned = await redis.get("last_banned_beatmap")
|
||||
if last_banned:
|
||||
last_banned_beatmaps = set(json.loads(last_banned))
|
||||
affected_users = set()
|
||||
|
||||
async with with_db() as session:
|
||||
query = select(BannedBeatmaps.beatmap_id).distinct()
|
||||
if last_banned_beatmaps:
|
||||
query = query.where(col(BannedBeatmaps.beatmap_id).not_in(last_banned_beatmaps))
|
||||
new_banned_beatmaps = (await session.exec(query)).all()
|
||||
|
||||
current_banned = (await session.exec(select(BannedBeatmaps.beatmap_id).distinct())).all()
|
||||
unbanned_beatmaps = [b for b in last_banned_beatmaps if b not in current_banned]
|
||||
for i in new_banned_beatmaps:
|
||||
last_banned_beatmaps.add(i)
|
||||
await session.execute(delete(PPBestScore).where(col(PPBestScore.beatmap_id) == i))
|
||||
scores = (await session.exec(select(Score).where(Score.beatmap_id == i, Score.pp > 0))).all()
|
||||
for score in scores:
|
||||
score.pp = 0
|
||||
affected_users.add((score.user_id, score.gamemode))
|
||||
|
||||
if unbanned_beatmaps:
|
||||
fetcher = await get_fetcher()
|
||||
for beatmap_id in unbanned_beatmaps:
|
||||
last_banned_beatmaps.discard(beatmap_id)
|
||||
try:
|
||||
scores = (
|
||||
await session.exec(
|
||||
select(Score).where(
|
||||
Score.beatmap_id == beatmap_id,
|
||||
col(Score.passed).is_(True),
|
||||
)
|
||||
)
|
||||
).all()
|
||||
except Exception:
|
||||
logger.exception(f"Failed to query scores for unbanned beatmap {beatmap_id}")
|
||||
continue
|
||||
|
||||
prev: dict[tuple[int, int], PPBestScore] = {}
|
||||
for score in scores:
|
||||
attempts = 3
|
||||
while attempts > 0:
|
||||
try:
|
||||
db_beatmap = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
||||
break
|
||||
except Exception:
|
||||
attempts -= 1
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
logger.warning(f"Could not fetch beatmap raw for {beatmap_id}, skipping pp calc")
|
||||
continue
|
||||
|
||||
try:
|
||||
beatmap_obj = await Beatmap.get_or_fetch(session, fetcher, bid=beatmap_id)
|
||||
except Exception:
|
||||
beatmap_obj = None
|
||||
|
||||
ranked = (
|
||||
beatmap_obj.beatmap_status.has_pp() if beatmap_obj else False
|
||||
) | settings.enable_all_beatmap_pp
|
||||
|
||||
if not ranked or not mods_can_get_pp(int(score.gamemode), score.mods):
|
||||
continue
|
||||
|
||||
try:
|
||||
pp = await calculate_pp(score, db_beatmap, session)
|
||||
if not pp or pp == 0:
|
||||
continue
|
||||
key = (score.beatmap_id, score.user_id)
|
||||
if key not in prev or prev[key].pp < pp:
|
||||
best_score = PPBestScore(
|
||||
user_id=score.user_id,
|
||||
beatmap_id=beatmap_id,
|
||||
acc=score.accuracy,
|
||||
score_id=score.id,
|
||||
pp=pp,
|
||||
gamemode=score.gamemode,
|
||||
)
|
||||
prev[key] = best_score
|
||||
affected_users.add((score.user_id, score.gamemode))
|
||||
score.pp = pp
|
||||
except Exception:
|
||||
logger.exception(f"Error calculating pp for score {score.id} on unbanned beatmap {beatmap_id}")
|
||||
continue
|
||||
|
||||
for best in prev.values():
|
||||
session.add(best)
|
||||
|
||||
for user_id, gamemode in affected_users:
|
||||
statistics = (
|
||||
await session.exec(
|
||||
select(UserStatistics)
|
||||
.where(UserStatistics.user_id == user_id)
|
||||
.where(col(UserStatistics.mode) == gamemode)
|
||||
)
|
||||
).first()
|
||||
if not statistics:
|
||||
continue
|
||||
statistics.pp, statistics.hit_accuracy = await calculate_user_pp(session, statistics.user_id, gamemode)
|
||||
|
||||
await session.commit()
|
||||
logger.info(
|
||||
f"Recalculated banned beatmaps, banned {len(new_banned_beatmaps)} beatmaps, "
|
||||
f"unbanned {len(unbanned_beatmaps)} beatmaps, affected {len(affected_users)} users"
|
||||
)
|
||||
await redis.set("last_banned_beatmap", json.dumps(list(last_banned_beatmaps)))
|
||||
53
app/tasks/recalculate_failed_score.py
Normal file
53
app/tasks/recalculate_failed_score.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.calculator import pre_fetch_and_calculate_pp
|
||||
from app.database.score import Score, calculate_user_pp
|
||||
from app.database.statistics import UserStatistics
|
||||
from app.dependencies.database import get_redis, with_db
|
||||
from app.dependencies.fetcher import get_fetcher
|
||||
from app.dependencies.scheduler import get_scheduler
|
||||
from app.log import logger
|
||||
|
||||
from sqlmodel import select
|
||||
|
||||
|
||||
@get_scheduler().scheduled_job("interval", id="recalculate_banned_beatmap", minutes=5)
|
||||
async def recalculate_failed_score():
|
||||
redis = get_redis()
|
||||
fetcher = await get_fetcher()
|
||||
need_add = set()
|
||||
affected_user = set()
|
||||
while True:
|
||||
scores = await redis.lpop("score:need_recalculate", 100) # pyright: ignore[reportGeneralTypeIssues]
|
||||
if not scores:
|
||||
break
|
||||
if isinstance(scores, bytes):
|
||||
scores = [scores]
|
||||
async with with_db() as session:
|
||||
for score_id in scores:
|
||||
score_id = int(score_id)
|
||||
score = await session.get(Score, score_id)
|
||||
if score is None:
|
||||
continue
|
||||
pp, successed = await pre_fetch_and_calculate_pp(score, session, redis, fetcher)
|
||||
if not successed:
|
||||
need_add.add(score_id)
|
||||
else:
|
||||
score.pp = pp
|
||||
logger.info(
|
||||
f"Recalculated PP for score {score.id} (user: {score.user_id}) at {score.ended_at}: {pp}"
|
||||
)
|
||||
affected_user.add((score.user_id, score.gamemode))
|
||||
await session.commit()
|
||||
for user_id, gamemode in affected_user:
|
||||
stats = (
|
||||
await session.exec(
|
||||
select(UserStatistics).where(UserStatistics.user_id == user_id, UserStatistics.mode == gamemode)
|
||||
)
|
||||
).first()
|
||||
if not stats:
|
||||
continue
|
||||
stats.pp, stats.hit_accuracy = await calculate_user_pp(session, user_id, gamemode)
|
||||
await session.commit()
|
||||
if need_add:
|
||||
await redis.rpush("score:need_recalculate", *need_add) # pyright: ignore[reportGeneralTypeIssues]
|
||||
Reference in New Issue
Block a user