feat(performance-point): switch performance calculator to performance-server (#80)
* feat(config): make `performance_server` as default calculator * deploy(docker): use osu-performance-server * docs(readme): add ruleset download instructions * chore(dev): update development environment * feat(dev): update development environment setup and service startup order * fix(deps): move `rosu-pp-py` to `project.optional-dependencies` * feat(beatmap): handle deleted beatmaps * feat(performance-server): add a long timeout for calculation * feat(recalculate): enhance CLI arguments for performance, leaderboard, and rating recalculations with CSV output support * fix(recalculate): resolve reviews * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix(beatmapsync): resolve too long line --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -107,6 +107,6 @@
|
|||||||
80,
|
80,
|
||||||
8080
|
8080
|
||||||
],
|
],
|
||||||
"postCreateCommand": "uv sync --dev && uv run alembic upgrade head && uv run pre-commit install && cd spectator-server && dotnet restore",
|
"postCreateCommand": "uv sync --dev --all-extras && uv run alembic upgrade head && uv run pre-commit install && cd spectator-server && dotnet restore && cd ../performance-server && dotnet restore",
|
||||||
"remoteUser": "vscode"
|
"remoteUser": "vscode"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ services:
|
|||||||
REDIS_URL: redis://redis:6379/0
|
REDIS_URL: redis://redis:6379/0
|
||||||
OSU_CLIENT_ID: "5"
|
OSU_CLIENT_ID: "5"
|
||||||
OSU_CLIENT_SECRET: "FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
OSU_CLIENT_SECRET: "FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
||||||
|
CALCULATOR_CONFIG: '{"server_url":"http://localhost:8090"}'
|
||||||
|
|
||||||
# Spectator Server 环境变量
|
# Spectator Server 环境变量
|
||||||
SAVE_REPLAYS: "0"
|
SAVE_REPLAYS: "0"
|
||||||
@@ -34,11 +35,12 @@ services:
|
|||||||
SHARED_INTEROP_SECRET: "dev-interop-secret"
|
SHARED_INTEROP_SECRET: "dev-interop-secret"
|
||||||
SENTRY_DSN: "https://5840d8cb8d2b4d238369443bedef1d74@glitchtip.g0v0.top/4"
|
SENTRY_DSN: "https://5840d8cb8d2b4d238369443bedef1d74@glitchtip.g0v0.top/4"
|
||||||
USE_LEGACY_RSA_AUTH: "0"
|
USE_LEGACY_RSA_AUTH: "0"
|
||||||
|
|
||||||
# .NET 环境变量
|
# .NET 环境变量
|
||||||
DOTNET_CLI_TELEMETRY_OPTOUT: "1"
|
DOTNET_CLI_TELEMETRY_OPTOUT: "1"
|
||||||
DOTNET_NOLOGO: "1"
|
DOTNET_NOLOGO: "1"
|
||||||
|
|
||||||
|
RULESETS_PATH: "/workspaces/osu_lazer_api/rulesets"
|
||||||
|
|
||||||
mysql:
|
mysql:
|
||||||
image: mysql:8.0
|
image: mysql:8.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# 开发环境启动脚本
|
# 开发环境启动脚本
|
||||||
# 同时启动 FastAPI 和 Spectator Server
|
# 按依赖顺序启动:Performance Server → FastAPI → Spectator Server
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
@@ -16,27 +16,92 @@ fi
|
|||||||
|
|
||||||
echo "🚀 启动开发环境..."
|
echo "🚀 启动开发环境..."
|
||||||
|
|
||||||
# 启动 FastAPI 服务器
|
# 清理函数
|
||||||
|
cleanup() {
|
||||||
|
echo "🛑 正在停止服务..."
|
||||||
|
[ ! -z "$SPECTATOR_PID" ] && kill $SPECTATOR_PID 2>/dev/null || true
|
||||||
|
[ ! -z "$FASTAPI_PID" ] && kill $FASTAPI_PID 2>/dev/null || true
|
||||||
|
[ ! -z "$PERFORMANCE_PID" ] && kill $PERFORMANCE_PID 2>/dev/null || true
|
||||||
|
exit ${1:-0}
|
||||||
|
}
|
||||||
|
|
||||||
|
# 捕获中断信号和错误
|
||||||
|
trap 'cleanup 1' INT TERM ERR
|
||||||
|
|
||||||
|
# 健康检查函数
|
||||||
|
wait_for_service() {
|
||||||
|
local url=$1
|
||||||
|
local service_name=$2
|
||||||
|
local pre_sleep=$3
|
||||||
|
local max_attempts=30
|
||||||
|
local attempt=0
|
||||||
|
|
||||||
|
echo "等待 $service_name 启动..."
|
||||||
|
if [ ! -z "$pre_sleep" ]; then
|
||||||
|
sleep $pre_sleep
|
||||||
|
fi
|
||||||
|
|
||||||
|
while [ $attempt -lt $max_attempts ]; do
|
||||||
|
# 使用 curl 检查,添加 10 秒超时,区分连接失败和 HTTP 错误
|
||||||
|
http_code=$(curl -s -o /dev/null -w "%{http_code}" --connect-timeout 5 --max-time 5 "$url" 2>/dev/null || echo "000")
|
||||||
|
|
||||||
|
if [ "$http_code" = "200" ] || [ "$http_code" = "404" ]; then
|
||||||
|
echo "✅ $service_name 已就绪 (HTTP $http_code)"
|
||||||
|
return 0
|
||||||
|
elif [ "$http_code" = "000" ]; then
|
||||||
|
# 连接被拒绝或超时,服务还在启动中
|
||||||
|
echo " ⏳ $service_name 正在启动... (尝试 $((attempt + 1))/$max_attempts)"
|
||||||
|
else
|
||||||
|
# 其他 HTTP 状态码
|
||||||
|
echo " ⚠️ $service_name 返回 HTTP $http_code (尝试 $((attempt + 1))/$max_attempts)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
attempt=$((attempt + 1))
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "❌ $service_name 启动超时"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# 1. 启动 Performance Server (最底层依赖)
|
||||||
|
echo "启动 Performance Server..."
|
||||||
|
cd /workspaces/osu_lazer_api/performance-server
|
||||||
|
dotnet run --project PerformanceServer --urls "http://0.0.0.0:8090" &
|
||||||
|
PERFORMANCE_PID=$!
|
||||||
|
|
||||||
|
# 等待 Performance Server 就绪
|
||||||
|
if ! wait_for_service "http://localhost:8090" "Performance Server"; then
|
||||||
|
echo "Performance Server 启动失败,停止启动流程"
|
||||||
|
cleanup 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. 启动 FastAPI 服务器 (依赖 Performance Server)
|
||||||
echo "启动 FastAPI 服务器..."
|
echo "启动 FastAPI 服务器..."
|
||||||
cd /workspaces/osu_lazer_api
|
cd /workspaces/osu_lazer_api
|
||||||
uv run uvicorn main:app --host 0.0.0.0 --port 8000 --reload &
|
uv run uvicorn main:app --host 0.0.0.0 --port 8000 --reload &
|
||||||
FASTAPI_PID=$!
|
FASTAPI_PID=$!
|
||||||
|
|
||||||
# 启动 Spectator Server
|
# 等待 FastAPI 就绪
|
||||||
|
if ! wait_for_service "http://localhost:8000/health" "FastAPI"; then
|
||||||
|
echo "FastAPI 启动失败,停止启动流程"
|
||||||
|
cleanup 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 3. 启动 Spectator Server (依赖 FastAPI)
|
||||||
echo "启动 Spectator Server..."
|
echo "启动 Spectator Server..."
|
||||||
cd /workspaces/osu_lazer_api/spectator-server
|
cd /workspaces/osu_lazer_api/spectator-server
|
||||||
dotnet run --project osu.Server.Spectator --urls "http://0.0.0.0:8086" &
|
dotnet run --project osu.Server.Spectator --urls "http://0.0.0.0:8086" &
|
||||||
SPECTATOR_PID=$!
|
SPECTATOR_PID=$!
|
||||||
|
|
||||||
echo "✅ 服务已启动:"
|
echo ""
|
||||||
|
echo "✅ 所有服务已启动:"
|
||||||
echo " - FastAPI: http://localhost:8000"
|
echo " - FastAPI: http://localhost:8000"
|
||||||
echo " - Spectator Server: http://localhost:8086"
|
echo " - Spectator Server: http://localhost:8086"
|
||||||
|
echo " - Performance Server: http://localhost:8090"
|
||||||
echo " - Nginx (统一入口): http://localhost:8080"
|
echo " - Nginx (统一入口): http://localhost:8080"
|
||||||
echo ""
|
echo ""
|
||||||
echo "按 Ctrl+C 停止所有服务"
|
echo "按 Ctrl+C 停止所有服务"
|
||||||
|
|
||||||
# 等待用户中断
|
# 等待用户中断
|
||||||
trap 'echo "🛑 正在停止服务..."; kill $FASTAPI_PID $SPECTATOR_PID; exit 0' INT
|
|
||||||
|
|
||||||
# 保持脚本运行
|
|
||||||
wait
|
wait
|
||||||
|
|||||||
57
.github/workflows/docker-publish-osurx.yml
vendored
57
.github/workflows/docker-publish-osurx.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Build and Push Docker Image (osu!RX)
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main ]
|
|
||||||
paths-ignore:
|
|
||||||
- '*.md'
|
|
||||||
- '**/*.md'
|
|
||||||
- 'docs/**'
|
|
||||||
|
|
||||||
env:
|
|
||||||
IMAGE_NAME: mingxuangame/g0v0-server
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-osurx:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.IMAGE_NAME }}
|
|
||||||
tags: |
|
|
||||||
type=raw,value=osurx
|
|
||||||
type=sha,prefix=osurx-
|
|
||||||
|
|
||||||
- name: Build and push Docker image (osu!RX)
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile-osurx
|
|
||||||
platforms: linux/amd64, linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: |
|
|
||||||
${{ env.IMAGE_NAME }}:osurx
|
|
||||||
${{ env.IMAGE_NAME }}:osurx-${{ github.sha }}
|
|
||||||
cache-from: type=gha,scope=osurx
|
|
||||||
cache-to: type=gha,mode=max,scope=osurx
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -216,7 +216,7 @@ bancho.py-master/*
|
|||||||
storage/
|
storage/
|
||||||
replays/
|
replays/
|
||||||
osu-master/*
|
osu-master/*
|
||||||
|
rulesets/
|
||||||
geoip/*
|
geoip/*
|
||||||
newrelic.ini
|
newrelic.ini
|
||||||
logs/
|
logs/
|
||||||
@@ -230,3 +230,4 @@ config/*
|
|||||||
!config/
|
!config/
|
||||||
!config/.gitkeep
|
!config/.gitkeep
|
||||||
osu-web-master/*
|
osu-web-master/*
|
||||||
|
performance-server
|
||||||
|
|||||||
@@ -6,11 +6,18 @@
|
|||||||
git clone https://github.com/GooGuTeam/g0v0-server.git
|
git clone https://github.com/GooGuTeam/g0v0-server.git
|
||||||
```
|
```
|
||||||
|
|
||||||
此外,您还需要 clone 一个 spectator-server 到 g0v0-server 的文件夹。
|
此外,您还需要:
|
||||||
|
|
||||||
|
- clone 旁观服务器到 g0v0-server 的文件夹。
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/GooGuTeam/osu-server-spectator.git spectator-server
|
git clone https://github.com/GooGuTeam/osu-server-spectator.git spectator-server
|
||||||
```
|
```
|
||||||
|
- clone 表现分计算器到 g0v0-server 的文件夹。
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/GooGuTeam/osu-performance-server.git performance-server
|
||||||
|
```
|
||||||
|
- 下载并放置自定义规则集 DLL 到 `rulesets/` 目录(如果需要)。
|
||||||
|
|
||||||
## 开发环境
|
## 开发环境
|
||||||
|
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim AS builder
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y gcc pkg-config default-libmysqlclient-dev git \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
|
||||||
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
|
||||||
|
|
||||||
ENV PATH="/root/.cargo/bin:${PATH}" \
|
|
||||||
PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1 UV_PROJECT_ENVIRONMENT=/app/.venv
|
|
||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
|
||||||
ENV UV_PROJECT_ENVIRONMENT=/app/.venv
|
|
||||||
|
|
||||||
COPY pyproject.toml uv.lock ./
|
|
||||||
|
|
||||||
RUN uv sync --frozen --no-dev
|
|
||||||
RUN uv pip install git+https://github.com/GooGuTeam/gu-pp-py.git
|
|
||||||
|
|
||||||
COPY alembic.ini ./
|
|
||||||
COPY tools/ ./tools/
|
|
||||||
COPY migrations/ ./migrations/
|
|
||||||
COPY static/ ./app/static/
|
|
||||||
COPY app/ ./app/
|
|
||||||
COPY main.py ./
|
|
||||||
|
|
||||||
# ---
|
|
||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y curl netcat-openbsd \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
ENV PATH="/app/.venv/bin:${PATH}" \
|
|
||||||
PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
|
||||||
|
|
||||||
COPY --from=builder /app/.venv /app/.venv
|
|
||||||
COPY --from=builder /app /app
|
|
||||||
|
|
||||||
RUN mkdir -p /app/logs
|
|
||||||
VOLUME ["/app/logs"]
|
|
||||||
|
|
||||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
|
||||||
RUN chmod +x /app/docker-entrypoint.sh
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
|
||||||
CMD curl -f http://localhost:8000/health || exit 1
|
|
||||||
|
|
||||||
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
|
||||||
CMD ["uv", "run", "--no-sync", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
@@ -56,14 +56,16 @@ Go to [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets) to downloa
|
|||||||
```bash
|
```bash
|
||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
3. Start the service
|
3. (Optional) Download rulesets
|
||||||
|
Go to [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets/releases/latest) to download the custom rulesets modified for g0v0-server. Place the downloaded DLLs into the `rulesets/` directory to enable custom ruleset support in the spectator server and performance calculator.
|
||||||
|
4. Start the service
|
||||||
```bash
|
```bash
|
||||||
# Standard server
|
# Standard server
|
||||||
docker-compose -f docker-compose.yml up -d
|
docker-compose -f docker-compose.yml up -d
|
||||||
# Enable osu!RX and osu!AP statistics (Gu pp algorithm based on ppy-sb pp algorithm)
|
# Enable osu!RX and osu!AP statistics (Gu pp algorithm based on ppy-sb pp algorithm)
|
||||||
docker-compose -f docker-compose-osurx.yml up -d
|
docker-compose -f docker-compose-osurx.yml up -d
|
||||||
```
|
```
|
||||||
4. Connect to the server from the game
|
5. Connect to the server from the game
|
||||||
|
|
||||||
Use a [custom osu!lazer client](https://github.com/GooGuTeam/osu), or use [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection), and change the server settings to the server's address.
|
Use a [custom osu!lazer client](https://github.com/GooGuTeam/osu), or use [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection), and change the server settings to the server's address.
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,11 @@ cd g0v0-server
|
|||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 启动服务
|
3. (可选)下载 rulesets
|
||||||
|
|
||||||
|
前往 [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets/releases/latest) 下载为 g0v0-server 修改的自定义 ruleset。将下载的 DLL 放入 `rulesets/` 目录,以在旁观服务器和表现分计算器中启用自定义 ruleset 支持。
|
||||||
|
|
||||||
|
4. 启动服务
|
||||||
```bash
|
```bash
|
||||||
# 标准服务器
|
# 标准服务器
|
||||||
docker-compose -f docker-compose.yml up -d
|
docker-compose -f docker-compose.yml up -d
|
||||||
@@ -68,7 +72,7 @@ docker-compose -f docker-compose.yml up -d
|
|||||||
docker-compose -f docker-compose-osurx.yml up -d
|
docker-compose -f docker-compose-osurx.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
4. 通过游戏连接服务器
|
5. 通过游戏连接服务器
|
||||||
|
|
||||||
使用[自定义的 osu!lazer 客户端](https://github.com/GooGuTeam/osu),或者使用 [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection),修改服务器设置为服务器的 IP
|
使用[自定义的 osu!lazer 客户端](https://github.com/GooGuTeam/osu),或者使用 [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection),修改服务器设置为服务器的 IP
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ class PerformanceServerPerformanceCalculator(BasePerformanceCalculator):
|
|||||||
|
|
||||||
async def calculate_performance(self, beatmap_raw: str, score: "Score") -> PerformanceAttributes:
|
async def calculate_performance(self, beatmap_raw: str, score: "Score") -> PerformanceAttributes:
|
||||||
# https://github.com/GooGuTeam/osu-performance-server#post-performance
|
# https://github.com/GooGuTeam/osu-performance-server#post-performance
|
||||||
async with AsyncClient() as client:
|
async with AsyncClient(timeout=15) as client:
|
||||||
try:
|
try:
|
||||||
resp = await client.post(
|
resp = await client.post(
|
||||||
f"{self.server_url}/performance",
|
f"{self.server_url}/performance",
|
||||||
@@ -121,7 +121,7 @@ class PerformanceServerPerformanceCalculator(BasePerformanceCalculator):
|
|||||||
self, beatmap_raw: str, mods: list[APIMod] | None = None, gamemode: GameMode | None = None
|
self, beatmap_raw: str, mods: list[APIMod] | None = None, gamemode: GameMode | None = None
|
||||||
) -> DifficultyAttributes:
|
) -> DifficultyAttributes:
|
||||||
# https://github.com/GooGuTeam/osu-performance-server#post-difficulty
|
# https://github.com/GooGuTeam/osu-performance-server#post-difficulty
|
||||||
async with AsyncClient() as client:
|
async with AsyncClient(timeout=15) as client:
|
||||||
try:
|
try:
|
||||||
resp = await client.post(
|
resp = await client.post(
|
||||||
f"{self.server_url}/difficulty",
|
f"{self.server_url}/difficulty",
|
||||||
|
|||||||
@@ -114,14 +114,7 @@ STORAGE_SETTINGS='{
|
|||||||
""",
|
""",
|
||||||
"表现计算设置": """配置表现分计算器及其参数。
|
"表现计算设置": """配置表现分计算器及其参数。
|
||||||
|
|
||||||
### rosu-pp-py (默认)
|
### [osu-performance-server](https://github.com/GooGuTeam/osu-performance-server) (默认)
|
||||||
|
|
||||||
```bash
|
|
||||||
CALCULATOR="rosu"
|
|
||||||
CALCULATOR_CONFIG='{}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### [osu-performance-server](https://github.com/GooGuTeam/osu-performance-server)
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
CALCULATOR="performance_server"
|
CALCULATOR="performance_server"
|
||||||
@@ -129,6 +122,13 @@ CALCULATOR_CONFIG='{
|
|||||||
"server_url": "http://localhost:5225"
|
"server_url": "http://localhost:5225"
|
||||||
}'
|
}'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### rosu-pp-py
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CALCULATOR="rosu"
|
||||||
|
CALCULATOR_CONFIG='{}'
|
||||||
|
```
|
||||||
""",
|
""",
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -533,13 +533,13 @@ CALCULATOR_CONFIG='{
|
|||||||
# 表现计算设置
|
# 表现计算设置
|
||||||
calculator: Annotated[
|
calculator: Annotated[
|
||||||
Literal["rosu", "performance_server"],
|
Literal["rosu", "performance_server"],
|
||||||
Field(default="rosu", description="表现分计算器"),
|
Field(default="performance_server", description="表现分计算器"),
|
||||||
"表现计算设置",
|
"表现计算设置",
|
||||||
]
|
]
|
||||||
calculator_config: Annotated[
|
calculator_config: Annotated[
|
||||||
dict[str, Any],
|
dict[str, Any],
|
||||||
Field(
|
Field(
|
||||||
default={},
|
default={"server_url": "http://localhost:5225"},
|
||||||
description="表现分计算器配置 (JSON 格式),具体配置项请参考上方",
|
description="表现分计算器配置 (JSON 格式),具体配置项请参考上方",
|
||||||
),
|
),
|
||||||
"表现计算设置",
|
"表现计算设置",
|
||||||
|
|||||||
@@ -160,6 +160,7 @@ class BeatmapResp(BeatmapBase):
|
|||||||
failtimes: FailTimeResp | None = None
|
failtimes: FailTimeResp | None = None
|
||||||
top_tag_ids: list[APIBeatmapTag] | None = None
|
top_tag_ids: list[APIBeatmapTag] | None = None
|
||||||
current_user_tag_ids: list[int] | None = None
|
current_user_tag_ids: list[int] | None = None
|
||||||
|
is_deleted: bool = False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_db(
|
async def from_db(
|
||||||
@@ -184,6 +185,7 @@ class BeatmapResp(BeatmapBase):
|
|||||||
beatmap_["status"] = beatmap_status.name.lower()
|
beatmap_["status"] = beatmap_status.name.lower()
|
||||||
beatmap_["ranked"] = beatmap_status.value
|
beatmap_["ranked"] = beatmap_status.value
|
||||||
beatmap_["mode_int"] = int(beatmap.mode)
|
beatmap_["mode_int"] = int(beatmap.mode)
|
||||||
|
beatmap_["is_deleted"] = beatmap.deleted_at is not None
|
||||||
if not from_set:
|
if not from_set:
|
||||||
beatmap_["beatmapset"] = await BeatmapsetResp.from_db(beatmap.beatmapset, session=session, user=user)
|
beatmap_["beatmapset"] = await BeatmapsetResp.from_db(beatmap.beatmapset, session=session, user=user)
|
||||||
if beatmap.failtimes is not None:
|
if beatmap.failtimes is not None:
|
||||||
|
|||||||
@@ -23,11 +23,13 @@ class BeatmapRawFetcher(BaseFetcher):
|
|||||||
resp = await self._request(req_url)
|
resp = await self._request(req_url)
|
||||||
if resp.status_code >= 400:
|
if resp.status_code >= 400:
|
||||||
continue
|
continue
|
||||||
|
if not resp.text:
|
||||||
|
continue
|
||||||
return resp.text
|
return resp.text
|
||||||
raise HTTPError("Failed to fetch beatmap")
|
raise HTTPError("Failed to fetch beatmap")
|
||||||
|
|
||||||
async def _request(self, url: str) -> Response:
|
async def _request(self, url: str) -> Response:
|
||||||
async with AsyncClient() as client:
|
async with AsyncClient(timeout=15) as client:
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
url,
|
url,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -182,6 +182,7 @@ class BeatmapsetUpdateService:
|
|||||||
logger.error(f"failed to add missing beatmapset {missing}: {e}")
|
logger.error(f"failed to add missing beatmapset {missing}: {e}")
|
||||||
if total > 0:
|
if total > 0:
|
||||||
logger.opt(colors=True).info(f"added {total} missing beatmapset")
|
logger.opt(colors=True).info(f"added {total} missing beatmapset")
|
||||||
|
await session.commit()
|
||||||
self._adding_missing = False
|
self._adding_missing = False
|
||||||
|
|
||||||
async def add(self, beatmapset: BeatmapsetResp, calculate_next_sync: bool = True):
|
async def add(self, beatmapset: BeatmapsetResp, calculate_next_sync: bool = True):
|
||||||
@@ -397,7 +398,15 @@ class BeatmapsetUpdateService:
|
|||||||
existing_beatmap = await session.get(Beatmap, change.beatmap_id)
|
existing_beatmap = await session.get(Beatmap, change.beatmap_id)
|
||||||
if existing_beatmap:
|
if existing_beatmap:
|
||||||
await session.merge(new_db_beatmap)
|
await session.merge(new_db_beatmap)
|
||||||
|
if change.type == BeatmapChangeType.MAP_DELETED:
|
||||||
|
existing_beatmap.deleted_at = utcnow()
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
else:
|
||||||
|
if change.type == BeatmapChangeType.MAP_DELETED:
|
||||||
|
logger.opt(colors=True).warning(
|
||||||
|
f"<g>[beatmap: {change.beatmap_id}]</g> MAP_DELETED received "
|
||||||
|
f"but beatmap not found in database; deletion skipped"
|
||||||
|
)
|
||||||
if change.type != BeatmapChangeType.STATUS_CHANGED:
|
if change.type != BeatmapChangeType.STATUS_CHANGED:
|
||||||
await _process_update_or_delete_beatmaps(change.beatmap_id)
|
await _process_update_or_delete_beatmaps(change.beatmap_id)
|
||||||
await get_beatmapset_cache_service(get_redis()).invalidate_beatmap_lookup_cache(change.beatmap_id)
|
await get_beatmapset_cache_service(get_redis()).invalidate_beatmap_lookup_cache(change.beatmap_id)
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ version: '3.8'
|
|||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
# or use
|
# or use
|
||||||
# image: mingxuangame/g0v0-server:osurx
|
# image: mingxuangame/g0v0-server:latest
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile-osurx
|
dockerfile: Dockerfile
|
||||||
container_name: osu_api_server_osurx
|
container_name: osu_api_server_osurx
|
||||||
environment:
|
environment:
|
||||||
- MYSQL_HOST=mysql
|
- MYSQL_HOST=mysql
|
||||||
@@ -17,8 +17,7 @@ services:
|
|||||||
- ENABLE_ALL_MODS_PP=true
|
- ENABLE_ALL_MODS_PP=true
|
||||||
- ENABLE_SUPPORTER_FOR_ALL_USERS=true
|
- ENABLE_SUPPORTER_FOR_ALL_USERS=true
|
||||||
- ENABLE_ALL_BEATMAP_LEADERBOARD=true
|
- ENABLE_ALL_BEATMAP_LEADERBOARD=true
|
||||||
# - CALCULATOR=performance_server
|
- CALCULATOR_CONFIG='{"server_url":"http://performance-server:8080"}'
|
||||||
# - CALCULATOR_CONFIG='{"server_url":"http://performance-server:8080"}'
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -79,7 +78,7 @@ services:
|
|||||||
command: redis-server --appendonly yes
|
command: redis-server --appendonly yes
|
||||||
|
|
||||||
spectator:
|
spectator:
|
||||||
image: ghcr.io/googuteam/osu-server-spectator-custom-rulesets:master
|
image: ghcr.io/googuteam/osu-server-spectator:master
|
||||||
pull_policy: never
|
pull_policy: never
|
||||||
environment:
|
environment:
|
||||||
- REPLAY_UPLOAD_THREADS=${REPLAY_UPLOAD_THREADS:-1}
|
- REPLAY_UPLOAD_THREADS=${REPLAY_UPLOAD_THREADS:-1}
|
||||||
@@ -98,6 +97,8 @@ services:
|
|||||||
- mysql
|
- mysql
|
||||||
- redis
|
- redis
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./rulesets:/data/rulesets
|
||||||
networks:
|
networks:
|
||||||
- osu-network
|
- osu-network
|
||||||
|
|
||||||
@@ -111,14 +112,16 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- osu-network
|
- osu-network
|
||||||
|
|
||||||
# performance-server:
|
performance-server:
|
||||||
# image: ghcr.io/googuteam/osu-performance-server-osurx:custom-rulesets
|
image: ghcr.io/googuteam/osu-performance-server-osurx
|
||||||
# container_name: performance_server_osurx
|
container_name: performance_server_osurx
|
||||||
# environment:
|
environment:
|
||||||
# - SAVE_BEATMAP_FILES=false
|
- SAVE_BEATMAP_FILES=false
|
||||||
# restart: unless-stopped
|
restart: unless-stopped
|
||||||
# networks:
|
networks:
|
||||||
# - osu-network
|
- osu-network
|
||||||
|
volumes:
|
||||||
|
- ./rulesets:/data/rulesets
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mysql_data:
|
mysql_data:
|
||||||
|
|||||||
@@ -14,8 +14,7 @@ services:
|
|||||||
- MYSQL_HOST=mysql
|
- MYSQL_HOST=mysql
|
||||||
- MYSQL_PORT=3306
|
- MYSQL_PORT=3306
|
||||||
- REDIS_URL=redis://redis:6379
|
- REDIS_URL=redis://redis:6379
|
||||||
# - CALCULATOR=performance_server
|
- CALCULATOR_CONFIG='{"server_url":"http://performance-server:8080"}'
|
||||||
# - CALCULATOR_CONFIG='{"server_url":"http://performance-server:8080"}'
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -23,6 +22,8 @@ services:
|
|||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
performance-server:
|
||||||
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- ./replays:/app/replays
|
- ./replays:/app/replays
|
||||||
- ./storage:/app/storage
|
- ./storage:/app/storage
|
||||||
@@ -56,7 +57,7 @@ services:
|
|||||||
- osu-network
|
- osu-network
|
||||||
|
|
||||||
spectator:
|
spectator:
|
||||||
image: ghcr.io/googuteam/osu-server-spectator-custom-rulesets:master
|
image: ghcr.io/googuteam/osu-server-spectator:master
|
||||||
pull_policy: never
|
pull_policy: never
|
||||||
environment:
|
environment:
|
||||||
- REPLAY_UPLOAD_THREADS=${REPLAY_UPLOAD_THREADS:-1}
|
- REPLAY_UPLOAD_THREADS=${REPLAY_UPLOAD_THREADS:-1}
|
||||||
@@ -75,6 +76,8 @@ services:
|
|||||||
- mysql
|
- mysql
|
||||||
- redis
|
- redis
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./rulesets:/data/rulesets
|
||||||
networks:
|
networks:
|
||||||
- osu-network
|
- osu-network
|
||||||
|
|
||||||
@@ -104,14 +107,16 @@ services:
|
|||||||
- osu-network
|
- osu-network
|
||||||
command: redis-server --appendonly yes
|
command: redis-server --appendonly yes
|
||||||
|
|
||||||
# performance-server:
|
performance-server:
|
||||||
# image: ghcr.io/googuteam/osu-performance-server:custom-rulesets
|
image: ghcr.io/googuteam/osu-performance-server:latest
|
||||||
# container_name: performance_server
|
container_name: performance_server
|
||||||
# environment:
|
environment:
|
||||||
# - SAVE_BEATMAP_FILES=false
|
- SAVE_BEATMAP_FILES=false
|
||||||
# restart: unless-stopped
|
volumes:
|
||||||
# networks:
|
- ./rulesets:/data/rulesets
|
||||||
# - osu-network
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- osu-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mysql_data:
|
mysql_data:
|
||||||
|
|||||||
@@ -5,6 +5,9 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "spectator-server"
|
"path": "spectator-server"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "performance-server"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ dependencies = [
|
|||||||
"python-jose[cryptography]>=3.3.0",
|
"python-jose[cryptography]>=3.3.0",
|
||||||
"python-multipart>=0.0.6",
|
"python-multipart>=0.0.6",
|
||||||
"redis>=5.0.1",
|
"redis>=5.0.1",
|
||||||
"rosu-pp-py>=3.1.0",
|
|
||||||
"sentry-sdk[fastapi,httpx,loguru,sqlalchemy]>=2.34.1",
|
"sentry-sdk[fastapi,httpx,loguru,sqlalchemy]>=2.34.1",
|
||||||
"sqlalchemy>=2.0.23",
|
"sqlalchemy>=2.0.23",
|
||||||
"sqlmodel>=0.0.24",
|
"sqlmodel>=0.0.24",
|
||||||
@@ -40,6 +39,11 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
authors = [{ name = "GooGuTeam" }]
|
authors = [{ name = "GooGuTeam" }]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
rosu = [
|
||||||
|
"rosu-pp-py>=3.1.0",
|
||||||
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 120
|
line-length = 120
|
||||||
target-version = "py312"
|
target-version = "py312"
|
||||||
|
|||||||
@@ -3,20 +3,23 @@ from __future__ import annotations
|
|||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable, Sequence
|
from collections.abc import Awaitable, Sequence
|
||||||
|
import csv
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from email.utils import parsedate_to_datetime
|
from email.utils import parsedate_to_datetime
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
||||||
|
|
||||||
from app.calculator import calculate_pp, calculate_score_to_level, init_calculator
|
from app.calculator import calculate_pp, calculate_score_to_level, init_calculator
|
||||||
|
from app.calculators.performance import CalculateError
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.const import BANCHOBOT_ID
|
from app.const import BANCHOBOT_ID
|
||||||
from app.database import TotalScoreBestScore, UserStatistics
|
from app.database import TotalScoreBestScore, UserStatistics
|
||||||
from app.database.beatmap import Beatmap
|
from app.database.beatmap import Beatmap, calculate_beatmap_attributes
|
||||||
from app.database.best_scores import BestScore
|
from app.database.best_scores import BestScore
|
||||||
from app.database.score import Score, calculate_playtime, calculate_user_pp
|
from app.database.score import Score, calculate_playtime, calculate_user_pp
|
||||||
from app.dependencies.database import engine, get_redis
|
from app.dependencies.database import engine, get_redis
|
||||||
@@ -38,40 +41,44 @@ warnings.filterwarnings("ignore")
|
|||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class RecalculateConfig:
|
class GlobalConfig:
|
||||||
|
dry_run: bool
|
||||||
|
concurrency: int
|
||||||
|
output_csv: str | None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class PerformanceConfig:
|
||||||
user_ids: set[int]
|
user_ids: set[int]
|
||||||
modes: set[GameMode]
|
modes: set[GameMode]
|
||||||
mods: set[str]
|
mods: set[str]
|
||||||
beatmap_ids: set[int]
|
beatmap_ids: set[int]
|
||||||
beatmapset_ids: set[int]
|
beatmapset_ids: set[int]
|
||||||
dry_run: bool
|
|
||||||
concurrency: int
|
|
||||||
recalculate_all: bool
|
recalculate_all: bool
|
||||||
|
|
||||||
|
|
||||||
def parse_cli_args(argv: list[str]) -> RecalculateConfig:
|
@dataclass(frozen=True)
|
||||||
|
class LeaderboardConfig:
|
||||||
|
user_ids: set[int]
|
||||||
|
modes: set[GameMode]
|
||||||
|
mods: set[str]
|
||||||
|
beatmap_ids: set[int]
|
||||||
|
beatmapset_ids: set[int]
|
||||||
|
recalculate_all: bool
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class RatingConfig:
|
||||||
|
modes: set[GameMode]
|
||||||
|
beatmap_ids: set[int]
|
||||||
|
beatmapset_ids: set[int]
|
||||||
|
recalculate_all: bool
|
||||||
|
|
||||||
|
|
||||||
|
def parse_cli_args(
|
||||||
|
argv: list[str],
|
||||||
|
) -> tuple[str, GlobalConfig, PerformanceConfig | LeaderboardConfig | RatingConfig | None]:
|
||||||
parser = argparse.ArgumentParser(description="Recalculate stored performance data")
|
parser = argparse.ArgumentParser(description="Recalculate stored performance data")
|
||||||
parser.add_argument("--user-id", dest="user_ids", action="append", type=int, help="Filter by user id")
|
|
||||||
parser.add_argument(
|
|
||||||
"--mode",
|
|
||||||
dest="modes",
|
|
||||||
action="append",
|
|
||||||
help="Filter by game mode (accepts names like osu, taiko or numeric ids)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--mod",
|
|
||||||
dest="mods",
|
|
||||||
action="append",
|
|
||||||
help="Filter by mod acronym (can be passed multiple times or comma separated)",
|
|
||||||
)
|
|
||||||
parser.add_argument("--beatmap-id", dest="beatmap_ids", action="append", type=int, help="Filter by beatmap id")
|
|
||||||
parser.add_argument(
|
|
||||||
"--beatmapset-id",
|
|
||||||
dest="beatmapset_ids",
|
|
||||||
action="append",
|
|
||||||
type=int,
|
|
||||||
help="Filter by beatmapset id",
|
|
||||||
)
|
|
||||||
parser.add_argument("--dry-run", dest="dry_run", action="store_true", help="Execute without committing changes")
|
parser.add_argument("--dry-run", dest="dry_run", action="store_true", help="Execute without committing changes")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--concurrency",
|
"--concurrency",
|
||||||
@@ -81,13 +88,118 @@ def parse_cli_args(argv: list[str]) -> RecalculateConfig:
|
|||||||
help="Maximum number of concurrent recalculation tasks",
|
help="Maximum number of concurrent recalculation tasks",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
"--output-csv",
|
||||||
|
dest="output_csv",
|
||||||
|
type=str,
|
||||||
|
help="Output results to a CSV file at the specified path",
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
||||||
|
|
||||||
|
# performance subcommand
|
||||||
|
perf_parser = subparsers.add_parser("performance", help="Recalculate performance points (pp) and best scores")
|
||||||
|
perf_parser.add_argument("--user-id", dest="user_ids", action="append", type=int, help="Filter by user id")
|
||||||
|
perf_parser.add_argument(
|
||||||
|
"--mode",
|
||||||
|
dest="modes",
|
||||||
|
action="append",
|
||||||
|
help="Filter by game mode (accepts names like osu, taiko or numeric ids)",
|
||||||
|
)
|
||||||
|
perf_parser.add_argument(
|
||||||
|
"--mod",
|
||||||
|
dest="mods",
|
||||||
|
action="append",
|
||||||
|
help="Filter by mod acronym (can be passed multiple times or comma separated)",
|
||||||
|
)
|
||||||
|
perf_parser.add_argument("--beatmap-id", dest="beatmap_ids", action="append", type=int, help="Filter by beatmap id")
|
||||||
|
perf_parser.add_argument(
|
||||||
|
"--beatmapset-id",
|
||||||
|
dest="beatmapset_ids",
|
||||||
|
action="append",
|
||||||
|
type=int,
|
||||||
|
help="Filter by beatmapset id",
|
||||||
|
)
|
||||||
|
perf_parser.add_argument(
|
||||||
"--all",
|
"--all",
|
||||||
dest="recalculate_all",
|
dest="recalculate_all",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Recalculate all users across all modes (ignores filter requirement)",
|
help="Recalculate all users across all modes (ignores filter requirement)",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# leaderboard subcommand
|
||||||
|
lead_parser = subparsers.add_parser("leaderboard", help="Recalculate leaderboard (TotalScoreBestScore)")
|
||||||
|
lead_parser.add_argument("--user-id", dest="user_ids", action="append", type=int, help="Filter by user id")
|
||||||
|
lead_parser.add_argument(
|
||||||
|
"--mode",
|
||||||
|
dest="modes",
|
||||||
|
action="append",
|
||||||
|
help="Filter by game mode (accepts names like osu, taiko or numeric ids)",
|
||||||
|
)
|
||||||
|
lead_parser.add_argument(
|
||||||
|
"--mod",
|
||||||
|
dest="mods",
|
||||||
|
action="append",
|
||||||
|
help="Filter by mod acronym (can be passed multiple times or comma separated)",
|
||||||
|
)
|
||||||
|
lead_parser.add_argument("--beatmap-id", dest="beatmap_ids", action="append", type=int, help="Filter by beatmap id")
|
||||||
|
lead_parser.add_argument(
|
||||||
|
"--beatmapset-id",
|
||||||
|
dest="beatmapset_ids",
|
||||||
|
action="append",
|
||||||
|
type=int,
|
||||||
|
help="Filter by beatmapset id",
|
||||||
|
)
|
||||||
|
lead_parser.add_argument(
|
||||||
|
"--all",
|
||||||
|
dest="recalculate_all",
|
||||||
|
action="store_true",
|
||||||
|
help="Recalculate all users across all modes (ignores filter requirement)",
|
||||||
|
)
|
||||||
|
|
||||||
|
# rating subcommand
|
||||||
|
rating_parser = subparsers.add_parser("rating", help="Recalculate beatmap difficulty ratings")
|
||||||
|
rating_parser.add_argument(
|
||||||
|
"--mode",
|
||||||
|
dest="modes",
|
||||||
|
action="append",
|
||||||
|
help="Filter by game mode (accepts names like osu, taiko or numeric ids)",
|
||||||
|
)
|
||||||
|
rating_parser.add_argument(
|
||||||
|
"--beatmap-id", dest="beatmap_ids", action="append", type=int, help="Filter by beatmap id"
|
||||||
|
)
|
||||||
|
rating_parser.add_argument(
|
||||||
|
"--beatmapset-id",
|
||||||
|
dest="beatmapset_ids",
|
||||||
|
action="append",
|
||||||
|
type=int,
|
||||||
|
help="Filter by beatmapset id",
|
||||||
|
)
|
||||||
|
rating_parser.add_argument(
|
||||||
|
"--all",
|
||||||
|
dest="recalculate_all",
|
||||||
|
action="store_true",
|
||||||
|
help="Recalculate all beatmaps",
|
||||||
|
)
|
||||||
|
|
||||||
|
# all subcommand
|
||||||
|
subparsers.add_parser("all", help="Execute performance, leaderboard, and rating with --all")
|
||||||
|
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
if not args.command:
|
||||||
|
parser.print_help(sys.stderr)
|
||||||
|
parser.exit(1, "\nNo command specified.\n")
|
||||||
|
|
||||||
|
global_config = GlobalConfig(
|
||||||
|
dry_run=args.dry_run,
|
||||||
|
concurrency=max(1, args.concurrency),
|
||||||
|
output_csv=args.output_csv,
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.command == "all":
|
||||||
|
return args.command, global_config, None
|
||||||
|
|
||||||
|
if args.command in ("performance", "leaderboard"):
|
||||||
if not args.recalculate_all and not any(
|
if not args.recalculate_all and not any(
|
||||||
(
|
(
|
||||||
args.user_ids,
|
args.user_ids,
|
||||||
@@ -97,11 +209,11 @@ def parse_cli_args(argv: list[str]) -> RecalculateConfig:
|
|||||||
args.beatmapset_ids,
|
args.beatmapset_ids,
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
parser.print_help(sys.stderr)
|
parser.error(
|
||||||
parser.exit(1, "\nNo filters provided; please specify at least one target option.\n")
|
f"\n{args.command}: No filters provided; please specify at least one target option or use --all.\n"
|
||||||
|
)
|
||||||
|
|
||||||
user_ids = set(args.user_ids or [])
|
user_ids = set(args.user_ids or [])
|
||||||
|
|
||||||
modes: set[GameMode] = set()
|
modes: set[GameMode] = set()
|
||||||
for raw in args.modes or []:
|
for raw in args.modes or []:
|
||||||
for piece in raw.split(","):
|
for piece in raw.split(","):
|
||||||
@@ -116,18 +228,199 @@ def parse_cli_args(argv: list[str]) -> RecalculateConfig:
|
|||||||
mods = {mod.strip().upper() for raw in args.mods or [] for mod in raw.split(",") if mod.strip()}
|
mods = {mod.strip().upper() for raw in args.mods or [] for mod in raw.split(",") if mod.strip()}
|
||||||
beatmap_ids = set(args.beatmap_ids or [])
|
beatmap_ids = set(args.beatmap_ids or [])
|
||||||
beatmapset_ids = set(args.beatmapset_ids or [])
|
beatmapset_ids = set(args.beatmapset_ids or [])
|
||||||
concurrency = max(1, args.concurrency)
|
|
||||||
|
|
||||||
return RecalculateConfig(
|
if args.command == "performance":
|
||||||
|
return (
|
||||||
|
args.command,
|
||||||
|
global_config,
|
||||||
|
PerformanceConfig(
|
||||||
user_ids=user_ids,
|
user_ids=user_ids,
|
||||||
modes=modes,
|
modes=modes,
|
||||||
mods=mods,
|
mods=mods,
|
||||||
beatmap_ids=beatmap_ids,
|
beatmap_ids=beatmap_ids,
|
||||||
beatmapset_ids=beatmapset_ids,
|
beatmapset_ids=beatmapset_ids,
|
||||||
dry_run=args.dry_run,
|
|
||||||
concurrency=concurrency,
|
|
||||||
recalculate_all=args.recalculate_all,
|
recalculate_all=args.recalculate_all,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
else: # leaderboard
|
||||||
|
return (
|
||||||
|
args.command,
|
||||||
|
global_config,
|
||||||
|
LeaderboardConfig(
|
||||||
|
user_ids=user_ids,
|
||||||
|
modes=modes,
|
||||||
|
mods=mods,
|
||||||
|
beatmap_ids=beatmap_ids,
|
||||||
|
beatmapset_ids=beatmapset_ids,
|
||||||
|
recalculate_all=args.recalculate_all,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
elif args.command == "rating":
|
||||||
|
if not args.recalculate_all and not any(
|
||||||
|
(
|
||||||
|
args.modes,
|
||||||
|
args.beatmap_ids,
|
||||||
|
args.beatmapset_ids,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
parser.error("\nrating: No filters provided; please specify at least one target option or use --all.\n")
|
||||||
|
|
||||||
|
rating_modes: set[GameMode] = set()
|
||||||
|
for raw in args.modes or []:
|
||||||
|
for piece in raw.split(","):
|
||||||
|
piece = piece.strip()
|
||||||
|
if not piece:
|
||||||
|
continue
|
||||||
|
mode = GameMode.parse(piece)
|
||||||
|
if mode is None:
|
||||||
|
parser.error(f"Unknown game mode: {piece}")
|
||||||
|
rating_modes.add(mode)
|
||||||
|
|
||||||
|
beatmap_ids = set(args.beatmap_ids or [])
|
||||||
|
beatmapset_ids = set(args.beatmapset_ids or [])
|
||||||
|
|
||||||
|
return (
|
||||||
|
args.command,
|
||||||
|
global_config,
|
||||||
|
RatingConfig(
|
||||||
|
modes=rating_modes,
|
||||||
|
beatmap_ids=beatmap_ids,
|
||||||
|
beatmapset_ids=beatmapset_ids,
|
||||||
|
recalculate_all=args.recalculate_all,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return args.command, global_config, None
|
||||||
|
|
||||||
|
|
||||||
|
class CSVWriter:
|
||||||
|
"""Helper class to write recalculation results to CSV files."""
|
||||||
|
|
||||||
|
def __init__(self, csv_path: str | None):
|
||||||
|
self.csv_path = csv_path
|
||||||
|
self.file = None
|
||||||
|
self.writer = None
|
||||||
|
self.lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
if self.csv_path:
|
||||||
|
# Create directory if it doesn't exist
|
||||||
|
Path(self.csv_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.file = open(self.csv_path, "w", newline="", encoding="utf-8") # noqa: ASYNC230, SIM115
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.file:
|
||||||
|
self.file.close()
|
||||||
|
self.writer = None
|
||||||
|
|
||||||
|
async def write_performance(
|
||||||
|
self,
|
||||||
|
user_id: int,
|
||||||
|
mode: str,
|
||||||
|
recalculated: int,
|
||||||
|
failed: int,
|
||||||
|
old_pp: float,
|
||||||
|
new_pp: float,
|
||||||
|
old_acc: float,
|
||||||
|
new_acc: float,
|
||||||
|
):
|
||||||
|
"""Write performance recalculation result."""
|
||||||
|
if not self.file:
|
||||||
|
return
|
||||||
|
|
||||||
|
async with self.lock:
|
||||||
|
if not self.writer:
|
||||||
|
self.writer = csv.writer(self.file)
|
||||||
|
self.writer.writerow(
|
||||||
|
[
|
||||||
|
"type",
|
||||||
|
"user_id",
|
||||||
|
"mode",
|
||||||
|
"recalculated",
|
||||||
|
"failed",
|
||||||
|
"old_pp",
|
||||||
|
"new_pp",
|
||||||
|
"pp_diff",
|
||||||
|
"old_acc",
|
||||||
|
"new_acc",
|
||||||
|
"acc_diff",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.writer.writerow(
|
||||||
|
[
|
||||||
|
"performance",
|
||||||
|
user_id,
|
||||||
|
mode,
|
||||||
|
recalculated,
|
||||||
|
failed,
|
||||||
|
f"{old_pp:.2f}",
|
||||||
|
f"{new_pp:.2f}",
|
||||||
|
f"{new_pp - old_pp:.2f}",
|
||||||
|
f"{old_acc:.2f}",
|
||||||
|
f"{new_acc:.2f}",
|
||||||
|
f"{new_acc - old_acc:.2f}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.file.flush()
|
||||||
|
|
||||||
|
async def write_leaderboard(self, user_id: int, mode: str, count: int, changes: dict[str, int]):
|
||||||
|
"""Write leaderboard recalculation result."""
|
||||||
|
if not self.file:
|
||||||
|
return
|
||||||
|
|
||||||
|
async with self.lock:
|
||||||
|
if not self.writer:
|
||||||
|
self.writer = csv.writer(self.file)
|
||||||
|
self.writer.writerow(
|
||||||
|
[
|
||||||
|
"type",
|
||||||
|
"user_id",
|
||||||
|
"mode",
|
||||||
|
"entries",
|
||||||
|
"ranked_score_diff",
|
||||||
|
"max_combo_diff",
|
||||||
|
"ss_diff",
|
||||||
|
"ssh_diff",
|
||||||
|
"s_diff",
|
||||||
|
"sh_diff",
|
||||||
|
"a_diff",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.writer.writerow(
|
||||||
|
[
|
||||||
|
"leaderboard",
|
||||||
|
user_id,
|
||||||
|
mode,
|
||||||
|
count,
|
||||||
|
changes["ranked_score"],
|
||||||
|
changes["maximum_combo"],
|
||||||
|
changes["grade_ss"],
|
||||||
|
changes["grade_ssh"],
|
||||||
|
changes["grade_s"],
|
||||||
|
changes["grade_sh"],
|
||||||
|
changes["grade_a"],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.file.flush()
|
||||||
|
|
||||||
|
async def write_rating(self, beatmap_id: int, old_rating: float, new_rating: float):
|
||||||
|
"""Write beatmap rating recalculation result."""
|
||||||
|
if not self.file:
|
||||||
|
return
|
||||||
|
|
||||||
|
async with self.lock:
|
||||||
|
if not self.writer:
|
||||||
|
self.writer = csv.writer(self.file)
|
||||||
|
self.writer.writerow(["type", "beatmap_id", "old_rating", "new_rating", "rating_diff"])
|
||||||
|
|
||||||
|
self.writer.writerow(
|
||||||
|
["rating", beatmap_id, f"{old_rating:.2f}", f"{new_rating:.2f}", f"{new_rating - old_rating:.2f}"]
|
||||||
|
)
|
||||||
|
self.file.flush()
|
||||||
|
|
||||||
|
|
||||||
async def run_in_batches(coros: Sequence[Awaitable[None]], batch_size: int) -> None:
|
async def run_in_batches(coros: Sequence[Awaitable[None]], batch_size: int) -> None:
|
||||||
@@ -168,7 +461,9 @@ def _retry_wait_seconds(exc: HTTPError) -> float | None:
|
|||||||
return max(delay, 1.0)
|
return max(delay, 1.0)
|
||||||
|
|
||||||
|
|
||||||
async def determine_targets(config: RecalculateConfig) -> dict[tuple[int, GameMode], set[int] | None]:
|
async def determine_targets(
|
||||||
|
config: PerformanceConfig | LeaderboardConfig,
|
||||||
|
) -> dict[tuple[int, GameMode], set[int] | None]:
|
||||||
targets: dict[tuple[int, GameMode], set[int] | None] = {}
|
targets: dict[tuple[int, GameMode], set[int] | None] = {}
|
||||||
if config.mods or config.beatmap_ids or config.beatmapset_ids:
|
if config.mods or config.beatmap_ids or config.beatmapset_ids:
|
||||||
await _populate_targets_from_scores(config, targets)
|
await _populate_targets_from_scores(config, targets)
|
||||||
@@ -188,7 +483,7 @@ async def determine_targets(config: RecalculateConfig) -> dict[tuple[int, GameMo
|
|||||||
|
|
||||||
|
|
||||||
async def _populate_targets_from_scores(
|
async def _populate_targets_from_scores(
|
||||||
config: RecalculateConfig,
|
config: PerformanceConfig | LeaderboardConfig,
|
||||||
targets: dict[tuple[int, GameMode], set[int] | None],
|
targets: dict[tuple[int, GameMode], set[int] | None],
|
||||||
) -> None:
|
) -> None:
|
||||||
async with AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
async with AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
||||||
@@ -218,7 +513,7 @@ async def _populate_targets_from_scores(
|
|||||||
|
|
||||||
|
|
||||||
async def _populate_targets_from_statistics(
|
async def _populate_targets_from_statistics(
|
||||||
config: RecalculateConfig,
|
config: PerformanceConfig | LeaderboardConfig,
|
||||||
targets: dict[tuple[int, GameMode], set[int] | None],
|
targets: dict[tuple[int, GameMode], set[int] | None],
|
||||||
user_filter: set[int] | None,
|
user_filter: set[int] | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -425,15 +720,17 @@ async def _recalculate_statistics(
|
|||||||
statistics.level_current = calculate_score_to_level(statistics.total_score)
|
statistics.level_current = calculate_score_to_level(statistics.total_score)
|
||||||
|
|
||||||
|
|
||||||
async def recalculate_user_mode(
|
async def recalculate_user_mode_performance(
|
||||||
user_id: int,
|
user_id: int,
|
||||||
gamemode: GameMode,
|
gamemode: GameMode,
|
||||||
score_filter: set[int] | None,
|
score_filter: set[int] | None,
|
||||||
config: RecalculateConfig,
|
global_config: GlobalConfig,
|
||||||
fetcher: Fetcher,
|
fetcher: Fetcher,
|
||||||
redis: Redis,
|
redis: Redis,
|
||||||
semaphore: asyncio.Semaphore,
|
semaphore: asyncio.Semaphore,
|
||||||
|
csv_writer: CSVWriter | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""Recalculate performance points and best scores (without TotalScoreBestScore)."""
|
||||||
async with semaphore, AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
async with semaphore, AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
||||||
try:
|
try:
|
||||||
statistics = (
|
statistics = (
|
||||||
@@ -477,7 +774,6 @@ async def recalculate_user_mode(
|
|||||||
recalculated += 1
|
recalculated += 1
|
||||||
|
|
||||||
best_scores = build_best_scores(user_id, gamemode, passed_scores)
|
best_scores = build_best_scores(user_id, gamemode, passed_scores)
|
||||||
total_best_scores = build_total_score_best_scores(passed_scores)
|
|
||||||
|
|
||||||
await session.execute(
|
await session.execute(
|
||||||
delete(BestScore).where(
|
delete(BestScore).where(
|
||||||
@@ -485,14 +781,7 @@ async def recalculate_user_mode(
|
|||||||
col(BestScore.gamemode) == gamemode,
|
col(BestScore.gamemode) == gamemode,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
await session.execute(
|
|
||||||
delete(TotalScoreBestScore).where(
|
|
||||||
col(TotalScoreBestScore.user_id) == user_id,
|
|
||||||
col(TotalScoreBestScore.gamemode) == gamemode,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
session.add_all(best_scores)
|
session.add_all(best_scores)
|
||||||
session.add_all(total_best_scores)
|
|
||||||
await session.flush()
|
await session.flush()
|
||||||
|
|
||||||
await _recalculate_statistics(statistics, session, scores)
|
await _recalculate_statistics(statistics, session, scores)
|
||||||
@@ -510,7 +799,7 @@ async def recalculate_user_mode(
|
|||||||
"pp {old_pp:.2f} -> {new_pp:.2f} | acc {old_acc:.2f} -> {new_acc:.2f}"
|
"pp {old_pp:.2f} -> {new_pp:.2f} | acc {old_acc:.2f} -> {new_acc:.2f}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.dry_run:
|
if global_config.dry_run:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
logger.info(
|
logger.info(
|
||||||
message.format(
|
message.format(
|
||||||
@@ -538,13 +827,237 @@ async def recalculate_user_mode(
|
|||||||
new_acc=new_acc,
|
new_acc=new_acc,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Write to CSV if enabled
|
||||||
|
if csv_writer:
|
||||||
|
await csv_writer.write_performance(
|
||||||
|
user_id, str(gamemode), recalculated, failed, old_pp, new_pp, old_acc, new_acc
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
if session.in_transaction():
|
if session.in_transaction():
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
logger.exception(f"Failed to process user {user_id} mode {gamemode}")
|
logger.exception(f"Failed to process user {user_id} mode {gamemode}")
|
||||||
|
|
||||||
|
|
||||||
async def recalculate(config: RecalculateConfig) -> None:
|
async def recalculate_user_mode_leaderboard(
|
||||||
|
user_id: int,
|
||||||
|
gamemode: GameMode,
|
||||||
|
score_filter: set[int] | None,
|
||||||
|
global_config: GlobalConfig,
|
||||||
|
semaphore: asyncio.Semaphore,
|
||||||
|
csv_writer: CSVWriter | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Recalculate leaderboard (TotalScoreBestScore only)."""
|
||||||
|
async with semaphore, AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
||||||
|
try:
|
||||||
|
# Get statistics
|
||||||
|
statistics = (
|
||||||
|
await session.exec(
|
||||||
|
select(UserStatistics).where(
|
||||||
|
UserStatistics.user_id == user_id,
|
||||||
|
UserStatistics.mode == gamemode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
if statistics is None:
|
||||||
|
logger.warning(f"No statistics found for user {user_id} mode {gamemode}")
|
||||||
|
return
|
||||||
|
previous_data = {
|
||||||
|
"ranked_score": statistics.ranked_score,
|
||||||
|
"maximum_combo": statistics.maximum_combo,
|
||||||
|
"grade_ss": statistics.grade_ss,
|
||||||
|
"grade_ssh": statistics.grade_ssh,
|
||||||
|
"grade_s": statistics.grade_s,
|
||||||
|
"grade_sh": statistics.grade_sh,
|
||||||
|
"grade_a": statistics.grade_a,
|
||||||
|
}
|
||||||
|
|
||||||
|
score_stmt = (
|
||||||
|
select(Score)
|
||||||
|
.where(Score.user_id == user_id, Score.gamemode == gamemode)
|
||||||
|
.options(joinedload(Score.beatmap))
|
||||||
|
)
|
||||||
|
result = await session.exec(score_stmt)
|
||||||
|
scores: list[Score] = list(result)
|
||||||
|
|
||||||
|
passed_scores = [score for score in scores if score.passed]
|
||||||
|
target_set = score_filter if score_filter is not None else {score.id for score in passed_scores}
|
||||||
|
if score_filter is not None and not target_set:
|
||||||
|
logger.info(f"User {user_id} mode {gamemode}: no scores matched filters")
|
||||||
|
return
|
||||||
|
|
||||||
|
total_best_scores = build_total_score_best_scores(passed_scores)
|
||||||
|
|
||||||
|
await session.execute(
|
||||||
|
delete(TotalScoreBestScore).where(
|
||||||
|
col(TotalScoreBestScore.user_id) == user_id,
|
||||||
|
col(TotalScoreBestScore.gamemode) == gamemode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
session.add_all(total_best_scores)
|
||||||
|
await session.flush()
|
||||||
|
|
||||||
|
# Recalculate statistics using the helper function
|
||||||
|
await _recalculate_statistics(statistics, session, scores)
|
||||||
|
await session.flush()
|
||||||
|
changes = {
|
||||||
|
"ranked_score": statistics.ranked_score - previous_data["ranked_score"],
|
||||||
|
"maximum_combo": statistics.maximum_combo - previous_data["maximum_combo"],
|
||||||
|
"grade_ss": statistics.grade_ss - previous_data["grade_ss"],
|
||||||
|
"grade_ssh": statistics.grade_ssh - previous_data["grade_ssh"],
|
||||||
|
"grade_s": statistics.grade_s - previous_data["grade_s"],
|
||||||
|
"grade_sh": statistics.grade_sh - previous_data["grade_sh"],
|
||||||
|
"grade_a": statistics.grade_a - previous_data["grade_a"],
|
||||||
|
}
|
||||||
|
|
||||||
|
message = (
|
||||||
|
"Dry-run | user {user_id} mode {mode} | {count} leaderboard entries | "
|
||||||
|
"ranked_score: {ranked_score:+d} | max_combo: {max_combo:+d} | "
|
||||||
|
"SS: {ss:+d} | SSH: {ssh:+d} | S: {s:+d} | SH: {sh:+d} | A: {a:+d}"
|
||||||
|
)
|
||||||
|
success_message = (
|
||||||
|
"Recalculated leaderboard | user {user_id} mode {mode} | {count} entries | "
|
||||||
|
"ranked_score: {ranked_score:+d} | max_combo: {max_combo:+d} | "
|
||||||
|
"SS: {ss:+d} | SSH: {ssh:+d} | S: {s:+d} | SH: {sh:+d} | A: {a:+d}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if global_config.dry_run:
|
||||||
|
await session.rollback()
|
||||||
|
logger.info(
|
||||||
|
message.format(
|
||||||
|
user_id=user_id,
|
||||||
|
mode=gamemode,
|
||||||
|
count=len(total_best_scores),
|
||||||
|
ranked_score=changes["ranked_score"],
|
||||||
|
max_combo=changes["maximum_combo"],
|
||||||
|
ss=changes["grade_ss"],
|
||||||
|
ssh=changes["grade_ssh"],
|
||||||
|
s=changes["grade_s"],
|
||||||
|
sh=changes["grade_sh"],
|
||||||
|
a=changes["grade_a"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await session.commit()
|
||||||
|
logger.success(
|
||||||
|
success_message.format(
|
||||||
|
user_id=user_id,
|
||||||
|
mode=gamemode,
|
||||||
|
count=len(total_best_scores),
|
||||||
|
ranked_score=changes["ranked_score"],
|
||||||
|
max_combo=changes["maximum_combo"],
|
||||||
|
ss=changes["grade_ss"],
|
||||||
|
ssh=changes["grade_ssh"],
|
||||||
|
s=changes["grade_s"],
|
||||||
|
sh=changes["grade_sh"],
|
||||||
|
a=changes["grade_a"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write to CSV if enabled
|
||||||
|
if csv_writer:
|
||||||
|
await csv_writer.write_leaderboard(user_id, str(gamemode), len(total_best_scores), changes)
|
||||||
|
except Exception:
|
||||||
|
if session.in_transaction():
|
||||||
|
await session.rollback()
|
||||||
|
logger.exception(f"Failed to process leaderboard for user {user_id} mode {gamemode}")
|
||||||
|
|
||||||
|
|
||||||
|
async def recalculate_beatmap_rating(
|
||||||
|
beatmap_id: int,
|
||||||
|
global_config: GlobalConfig,
|
||||||
|
fetcher: Fetcher,
|
||||||
|
redis: Redis,
|
||||||
|
semaphore: asyncio.Semaphore,
|
||||||
|
csv_writer: CSVWriter | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Recalculate difficulty rating for a beatmap."""
|
||||||
|
async with semaphore, AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
||||||
|
try:
|
||||||
|
beatmap = await session.get(Beatmap, beatmap_id)
|
||||||
|
if beatmap is None:
|
||||||
|
logger.warning(f"Beatmap {beatmap_id} not found")
|
||||||
|
return
|
||||||
|
if beatmap.deleted_at is not None:
|
||||||
|
logger.warning(f"Beatmap {beatmap_id} is deleted; skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
old_rating = beatmap.difficulty_rating
|
||||||
|
|
||||||
|
attempts = 10
|
||||||
|
while attempts > 0:
|
||||||
|
try:
|
||||||
|
ruleset = GameMode(beatmap.mode) if isinstance(beatmap.mode, int) else beatmap.mode
|
||||||
|
attributes = await calculate_beatmap_attributes(beatmap_id, ruleset, [], redis, fetcher)
|
||||||
|
beatmap.difficulty_rating = attributes.star_rating
|
||||||
|
break
|
||||||
|
except CalculateError as exc:
|
||||||
|
attempts -= 1
|
||||||
|
if attempts > 0:
|
||||||
|
logger.warning(
|
||||||
|
f"CalculateError for beatmap {beatmap_id} (attempts remaining: {attempts}); retrying..."
|
||||||
|
)
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to calculate rating for beatmap {beatmap_id} after 10 attempts: {exc}")
|
||||||
|
return
|
||||||
|
except HTTPError as exc:
|
||||||
|
wait = _retry_wait_seconds(exc)
|
||||||
|
if wait is not None:
|
||||||
|
logger.warning(
|
||||||
|
f"Rate limited while calculating rating for beatmap {beatmap_id}; "
|
||||||
|
f"waiting {wait:.1f}s before retry"
|
||||||
|
)
|
||||||
|
await asyncio.sleep(wait)
|
||||||
|
continue
|
||||||
|
attempts -= 1
|
||||||
|
if attempts > 0:
|
||||||
|
await asyncio.sleep(2)
|
||||||
|
else:
|
||||||
|
logger.exception(f"Failed to calculate rating for beatmap {beatmap_id} after multiple attempts")
|
||||||
|
return
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Unexpected error calculating rating for beatmap {beatmap_id}")
|
||||||
|
return
|
||||||
|
|
||||||
|
new_rating = beatmap.difficulty_rating
|
||||||
|
|
||||||
|
message = "Dry-run | beatmap {beatmap_id} | rating {old_rating:.2f} -> {new_rating:.2f}"
|
||||||
|
success_message = "Recalculated beatmap {beatmap_id} | rating {old_rating:.2f} -> {new_rating:.2f}"
|
||||||
|
|
||||||
|
if global_config.dry_run:
|
||||||
|
await session.rollback()
|
||||||
|
logger.info(
|
||||||
|
message.format(
|
||||||
|
beatmap_id=beatmap_id,
|
||||||
|
old_rating=old_rating,
|
||||||
|
new_rating=new_rating,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await session.commit()
|
||||||
|
logger.success(
|
||||||
|
success_message.format(
|
||||||
|
beatmap_id=beatmap_id,
|
||||||
|
old_rating=old_rating,
|
||||||
|
new_rating=new_rating,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write to CSV if enabled
|
||||||
|
if csv_writer:
|
||||||
|
await csv_writer.write_rating(beatmap_id, old_rating, new_rating)
|
||||||
|
except Exception:
|
||||||
|
if session.in_transaction():
|
||||||
|
await session.rollback()
|
||||||
|
logger.exception(f"Failed to process beatmap {beatmap_id}")
|
||||||
|
|
||||||
|
|
||||||
|
async def recalculate_performance(
|
||||||
|
config: PerformanceConfig,
|
||||||
|
global_config: GlobalConfig,
|
||||||
|
) -> None:
|
||||||
|
"""Execute performance recalculation."""
|
||||||
fetcher = await get_fetcher()
|
fetcher = await get_fetcher()
|
||||||
redis = get_redis()
|
redis = get_redis()
|
||||||
|
|
||||||
@@ -555,27 +1068,184 @@ async def recalculate(config: RecalculateConfig) -> None:
|
|||||||
targets = await determine_targets(config)
|
targets = await determine_targets(config)
|
||||||
if not targets:
|
if not targets:
|
||||||
logger.info("No targets matched the provided filters; nothing to recalculate")
|
logger.info("No targets matched the provided filters; nothing to recalculate")
|
||||||
await engine.dispose()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
scope = "full" if config.recalculate_all else "filtered"
|
scope = "full" if config.recalculate_all else "filtered"
|
||||||
logger.info(
|
logger.info(
|
||||||
"Recalculating {} user/mode pairs ({}) | dry-run={} | concurrency={}",
|
"Recalculating performance for {} user/mode pairs ({}) | dry-run={} | concurrency={}",
|
||||||
len(targets),
|
len(targets),
|
||||||
scope,
|
scope,
|
||||||
config.dry_run,
|
global_config.dry_run,
|
||||||
config.concurrency,
|
global_config.concurrency,
|
||||||
)
|
)
|
||||||
|
|
||||||
semaphore = asyncio.Semaphore(config.concurrency)
|
async with CSVWriter(global_config.output_csv) as csv_writer:
|
||||||
|
semaphore = asyncio.Semaphore(global_config.concurrency)
|
||||||
coroutines = [
|
coroutines = [
|
||||||
recalculate_user_mode(user_id, mode, score_ids, config, fetcher, redis, semaphore)
|
recalculate_user_mode_performance(
|
||||||
|
user_id, mode, score_ids, global_config, fetcher, redis, semaphore, csv_writer
|
||||||
|
)
|
||||||
for (user_id, mode), score_ids in targets.items()
|
for (user_id, mode), score_ids in targets.items()
|
||||||
]
|
]
|
||||||
await run_in_batches(coroutines, config.concurrency)
|
await run_in_batches(coroutines, global_config.concurrency)
|
||||||
|
|
||||||
|
|
||||||
|
async def recalculate_leaderboard(
|
||||||
|
config: LeaderboardConfig,
|
||||||
|
global_config: GlobalConfig,
|
||||||
|
) -> None:
|
||||||
|
"""Execute leaderboard recalculation."""
|
||||||
|
targets = await determine_targets(config)
|
||||||
|
if not targets:
|
||||||
|
logger.info("No targets matched the provided filters; nothing to recalculate")
|
||||||
|
return
|
||||||
|
|
||||||
|
scope = "full" if config.recalculate_all else "filtered"
|
||||||
|
logger.info(
|
||||||
|
"Recalculating leaderboard for {} user/mode pairs ({}) | dry-run={} | concurrency={}",
|
||||||
|
len(targets),
|
||||||
|
scope,
|
||||||
|
global_config.dry_run,
|
||||||
|
global_config.concurrency,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with CSVWriter(global_config.output_csv) as csv_writer:
|
||||||
|
semaphore = asyncio.Semaphore(global_config.concurrency)
|
||||||
|
coroutines = [
|
||||||
|
recalculate_user_mode_leaderboard(user_id, mode, score_ids, global_config, semaphore, csv_writer)
|
||||||
|
for (user_id, mode), score_ids in targets.items()
|
||||||
|
]
|
||||||
|
await run_in_batches(coroutines, global_config.concurrency)
|
||||||
|
|
||||||
|
|
||||||
|
async def recalculate_rating(
|
||||||
|
config: RatingConfig,
|
||||||
|
global_config: GlobalConfig,
|
||||||
|
) -> None:
|
||||||
|
"""Execute beatmap rating recalculation."""
|
||||||
|
fetcher = await get_fetcher()
|
||||||
|
redis = get_redis()
|
||||||
|
|
||||||
|
await init_calculator()
|
||||||
|
|
||||||
|
# Determine beatmaps to recalculate
|
||||||
|
async with AsyncSession(engine, expire_on_commit=False, autoflush=False) as session:
|
||||||
|
stmt = select(Beatmap.id)
|
||||||
|
if not config.recalculate_all:
|
||||||
|
if config.beatmap_ids:
|
||||||
|
stmt = stmt.where(col(Beatmap.id).in_(list(config.beatmap_ids)))
|
||||||
|
if config.beatmapset_ids:
|
||||||
|
stmt = stmt.where(col(Beatmap.beatmapset_id).in_(list(config.beatmapset_ids)))
|
||||||
|
if config.modes:
|
||||||
|
stmt = stmt.where(col(Beatmap.mode).in_(list(config.modes)))
|
||||||
|
|
||||||
|
result = await session.exec(stmt)
|
||||||
|
beatmap_ids = list(result)
|
||||||
|
|
||||||
|
if not beatmap_ids:
|
||||||
|
logger.info("No beatmaps matched the provided filters; nothing to recalculate")
|
||||||
|
return
|
||||||
|
|
||||||
|
scope = "full" if config.recalculate_all else "filtered"
|
||||||
|
logger.info(
|
||||||
|
"Recalculating rating for {} beatmaps ({}) | dry-run={} | concurrency={}",
|
||||||
|
len(beatmap_ids),
|
||||||
|
scope,
|
||||||
|
global_config.dry_run,
|
||||||
|
global_config.concurrency,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with CSVWriter(global_config.output_csv) as csv_writer:
|
||||||
|
semaphore = asyncio.Semaphore(global_config.concurrency)
|
||||||
|
coroutines = [
|
||||||
|
recalculate_beatmap_rating(beatmap_id, global_config, fetcher, redis, semaphore, csv_writer)
|
||||||
|
for beatmap_id in beatmap_ids
|
||||||
|
]
|
||||||
|
await run_in_batches(coroutines, global_config.concurrency)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_csv_path_for_subcommand(base_path: str | None, subcommand: str) -> str | None:
|
||||||
|
"""Generate a CSV path with subcommand name inserted before extension."""
|
||||||
|
if base_path is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
path = Path(base_path)
|
||||||
|
# Insert subcommand name before the extension
|
||||||
|
# e.g., "results.csv" -> "results.performance.csv"
|
||||||
|
new_name = f"{path.stem}.{subcommand}{path.suffix}"
|
||||||
|
if path.parent == Path("."):
|
||||||
|
return new_name
|
||||||
|
return str(path.parent / new_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
"""Main entry point."""
|
||||||
|
command, global_config, sub_config = parse_cli_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if command == "all":
|
||||||
|
logger.info("Executing all recalculations (performance, leaderboard, rating) with --all")
|
||||||
|
|
||||||
|
# Rating
|
||||||
|
rating_config = RatingConfig(
|
||||||
|
modes=set(),
|
||||||
|
beatmap_ids=set(),
|
||||||
|
beatmapset_ids=set(),
|
||||||
|
recalculate_all=True,
|
||||||
|
)
|
||||||
|
rating_csv_path = _get_csv_path_for_subcommand(global_config.output_csv, "rating")
|
||||||
|
rating_global_config = GlobalConfig(
|
||||||
|
dry_run=global_config.dry_run,
|
||||||
|
concurrency=global_config.concurrency,
|
||||||
|
output_csv=rating_csv_path,
|
||||||
|
)
|
||||||
|
await recalculate_rating(rating_config, rating_global_config)
|
||||||
|
|
||||||
|
# Performance
|
||||||
|
perf_config = PerformanceConfig(
|
||||||
|
user_ids=set(),
|
||||||
|
modes=set(),
|
||||||
|
mods=set(),
|
||||||
|
beatmap_ids=set(),
|
||||||
|
beatmapset_ids=set(),
|
||||||
|
recalculate_all=True,
|
||||||
|
)
|
||||||
|
perf_csv_path = _get_csv_path_for_subcommand(global_config.output_csv, "performance")
|
||||||
|
perf_global_config = GlobalConfig(
|
||||||
|
dry_run=global_config.dry_run,
|
||||||
|
concurrency=global_config.concurrency,
|
||||||
|
output_csv=perf_csv_path,
|
||||||
|
)
|
||||||
|
await recalculate_performance(perf_config, perf_global_config)
|
||||||
|
|
||||||
|
# Leaderboard
|
||||||
|
lead_config = LeaderboardConfig(
|
||||||
|
user_ids=set(),
|
||||||
|
modes=set(),
|
||||||
|
mods=set(),
|
||||||
|
beatmap_ids=set(),
|
||||||
|
beatmapset_ids=set(),
|
||||||
|
recalculate_all=True,
|
||||||
|
)
|
||||||
|
lead_csv_path = _get_csv_path_for_subcommand(global_config.output_csv, "leaderboard")
|
||||||
|
lead_global_config = GlobalConfig(
|
||||||
|
dry_run=global_config.dry_run,
|
||||||
|
concurrency=global_config.concurrency,
|
||||||
|
output_csv=lead_csv_path,
|
||||||
|
)
|
||||||
|
await recalculate_leaderboard(lead_config, lead_global_config)
|
||||||
|
|
||||||
|
elif command == "performance":
|
||||||
|
assert isinstance(sub_config, PerformanceConfig)
|
||||||
|
await recalculate_performance(sub_config, global_config)
|
||||||
|
elif command == "leaderboard":
|
||||||
|
assert isinstance(sub_config, LeaderboardConfig)
|
||||||
|
await recalculate_leaderboard(sub_config, global_config)
|
||||||
|
elif command == "rating":
|
||||||
|
assert isinstance(sub_config, RatingConfig)
|
||||||
|
await recalculate_rating(sub_config, global_config)
|
||||||
|
|
||||||
await engine.dispose()
|
await engine.dispose()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
config = parse_cli_args(sys.argv[1:])
|
asyncio.run(main())
|
||||||
asyncio.run(recalculate(config))
|
|
||||||
|
|||||||
15
uv.lock
generated
15
uv.lock
generated
@@ -745,7 +745,6 @@ dependencies = [
|
|||||||
{ name = "python-jose", extra = ["cryptography"] },
|
{ name = "python-jose", extra = ["cryptography"] },
|
||||||
{ name = "python-multipart" },
|
{ name = "python-multipart" },
|
||||||
{ name = "redis" },
|
{ name = "redis" },
|
||||||
{ name = "rosu-pp-py" },
|
|
||||||
{ name = "sentry-sdk", extra = ["fastapi", "httpx", "loguru", "sqlalchemy"] },
|
{ name = "sentry-sdk", extra = ["fastapi", "httpx", "loguru", "sqlalchemy"] },
|
||||||
{ name = "sqlalchemy" },
|
{ name = "sqlalchemy" },
|
||||||
{ name = "sqlmodel" },
|
{ name = "sqlmodel" },
|
||||||
@@ -753,6 +752,11 @@ dependencies = [
|
|||||||
{ name = "uvicorn", extra = ["standard"] },
|
{ name = "uvicorn", extra = ["standard"] },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
rosu = [
|
||||||
|
{ name = "rosu-pp-py" },
|
||||||
|
]
|
||||||
|
|
||||||
[package.dev-dependencies]
|
[package.dev-dependencies]
|
||||||
dev = [
|
dev = [
|
||||||
{ name = "datamodel-code-generator" },
|
{ name = "datamodel-code-generator" },
|
||||||
@@ -790,13 +794,14 @@ requires-dist = [
|
|||||||
{ name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
|
{ name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" },
|
||||||
{ name = "python-multipart", specifier = ">=0.0.6" },
|
{ name = "python-multipart", specifier = ">=0.0.6" },
|
||||||
{ name = "redis", specifier = ">=5.0.1" },
|
{ name = "redis", specifier = ">=5.0.1" },
|
||||||
{ name = "rosu-pp-py", specifier = ">=3.1.0" },
|
{ name = "rosu-pp-py", marker = "extra == 'rosu'", specifier = ">=3.1.0" },
|
||||||
{ name = "sentry-sdk", extras = ["fastapi", "httpx", "loguru", "sqlalchemy"], specifier = ">=2.34.1" },
|
{ name = "sentry-sdk", extras = ["fastapi", "httpx", "loguru", "sqlalchemy"], specifier = ">=2.34.1" },
|
||||||
{ name = "sqlalchemy", specifier = ">=2.0.23" },
|
{ name = "sqlalchemy", specifier = ">=2.0.23" },
|
||||||
{ name = "sqlmodel", specifier = ">=0.0.24" },
|
{ name = "sqlmodel", specifier = ">=0.0.24" },
|
||||||
{ name = "tinycss2", specifier = ">=1.4.0" },
|
{ name = "tinycss2", specifier = ">=1.4.0" },
|
||||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" },
|
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" },
|
||||||
]
|
]
|
||||||
|
provides-extras = ["rosu"]
|
||||||
|
|
||||||
[package.metadata.requires-dev]
|
[package.metadata.requires-dev]
|
||||||
dev = [
|
dev = [
|
||||||
@@ -830,6 +835,8 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" },
|
{ url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" },
|
{ url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" },
|
{ url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" },
|
{ url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" },
|
{ url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" },
|
{ url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" },
|
||||||
@@ -839,6 +846,8 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" },
|
{ url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" },
|
{ url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" },
|
{ url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" },
|
{ url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" },
|
{ url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" },
|
{ url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" },
|
||||||
@@ -846,6 +855,8 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" },
|
{ url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" },
|
{ url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" },
|
{ url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" },
|
{ url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user