Compare commits
327 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ce5f3cc16 | ||
|
|
96c8b70df6 | ||
|
|
8923d714a7 | ||
|
|
e5802aefbb | ||
|
|
c323373510 | ||
|
|
fe0c13bdd3 | ||
|
|
1c3b309804 | ||
|
|
282eda3250 | ||
|
|
38a2c8720b | ||
|
|
87ffc6f581 | ||
|
|
735a22d500 | ||
|
|
a6c596318e | ||
|
|
fed1471129 | ||
|
|
a58b4cb172 | ||
|
|
e5a4a0d9e4 | ||
|
|
10095f7da2 | ||
|
|
18574587e3 | ||
|
|
f628061971 | ||
|
|
a24302c2df | ||
|
|
bb181d930a | ||
|
|
511150cc4c | ||
|
|
e0c3e06ffe | ||
|
|
0f51d7a834 | ||
|
|
a62ef049ff | ||
|
|
36f5bd2ab3 | ||
|
|
355f13273f | ||
|
|
d87b0fe505 | ||
|
|
ab8c809ea0 | ||
|
|
31ccc9f0a2 | ||
|
|
e688d2777d | ||
|
|
855cd2933d | ||
|
|
aa9387c53a | ||
|
|
db430db01b | ||
|
|
7b34bd8b0b | ||
|
|
e049056534 | ||
|
|
819f52450c | ||
|
|
40da994ae8 | ||
|
|
42f1d53d3e | ||
|
|
7bec5870f1 | ||
|
|
7063f409b1 | ||
|
|
4b942e3cb3 | ||
|
|
97f8713c71 | ||
|
|
f0ed4450af | ||
|
|
b37300220a | ||
|
|
ec8bb02935 | ||
|
|
3a00ca9b91 | ||
|
|
2bd770a995 | ||
|
|
20ccd3e979 | ||
|
|
05ccf5991f | ||
|
|
27f345cb75 | ||
|
|
05540d44d0 | ||
|
|
6753843158 | ||
|
|
a46b17fce4 | ||
|
|
5c2687e1e4 | ||
|
|
0be3e903d4 | ||
|
|
293e57aea3 | ||
|
|
f289e8b74b | ||
|
|
7d6d5696db | ||
|
|
18caf1763f | ||
|
|
aa5592917d | ||
|
|
d9d26d0523 | ||
|
|
ef3a900de0 | ||
|
|
65d7fd241b | ||
|
|
c177735fd0 | ||
|
|
6a5ad0f57c | ||
|
|
a4599de6f9 | ||
|
|
811cbdf340 | ||
|
|
9be89d764c | ||
|
|
05134d260e | ||
|
|
39daebd589 | ||
|
|
bb24b5ae4d | ||
|
|
55604621cf | ||
|
|
cf160f1357 | ||
|
|
2519301690 | ||
|
|
f6e2e8ed44 | ||
|
|
33f321952d | ||
|
|
8f4a9d5fed | ||
|
|
2c81e22749 | ||
|
|
f792d146b5 | ||
|
|
07bf9864d2 | ||
|
|
965c96d0b2 | ||
|
|
a4dbb9a167 | ||
|
|
b180d3f39d | ||
|
|
8790ccad64 | ||
|
|
563a30d28f | ||
|
|
2b78920fed | ||
|
|
de917b7456 | ||
|
|
a10c07dc57 | ||
|
|
0e2df8dfef | ||
|
|
a8af03a464 | ||
|
|
73d25c7604 | ||
|
|
301130df02 | ||
|
|
6731373ded | ||
|
|
a32976857f | ||
|
|
6af0f814aa | ||
|
|
45ed9e51a9 | ||
|
|
e2f3c5099f | ||
|
|
3dd74fc703 | ||
|
|
85ac57a584 | ||
|
|
1d1221a740 | ||
|
|
b4e164faad | ||
|
|
c0575f3274 | ||
|
|
c76d6163ac | ||
|
|
fa81f837a0 | ||
|
|
64e221c641 | ||
|
|
60049a777f | ||
|
|
95f7c010af | ||
|
|
10caa82320 | ||
|
|
febc1d761f | ||
|
|
d19f82df80 | ||
|
|
1db34bf5c5 | ||
|
|
8884f8993c | ||
|
|
bb4d454093 | ||
|
|
6f522847ec | ||
|
|
38671cd471 | ||
|
|
6f48dd4575 | ||
|
|
3f702dc5ec | ||
|
|
6c23694061 | ||
|
|
806dd819fb | ||
|
|
96d96ddcd3 | ||
|
|
d1d2bd55a0 | ||
|
|
1163a93053 | ||
|
|
80ba65c746 | ||
|
|
9fe493a12e | ||
|
|
470e8b3c06 | ||
|
|
00f53f0dc0 | ||
|
|
238cb96c12 | ||
|
|
ca7c74f709 | ||
|
|
c6058eb0d8 | ||
|
|
216d3ab3bf | ||
|
|
c2bfafc67a | ||
|
|
21da83e4eb | ||
|
|
238520c577 | ||
|
|
382c1721df | ||
|
|
7c18fc5fb6 | ||
|
|
2bfde24b84 | ||
|
|
dfd656f2fb | ||
|
|
046f894407 | ||
|
|
d490239f46 | ||
|
|
b10425ad91 | ||
|
|
1e775c9a36 | ||
|
|
0d9019c6cc | ||
|
|
d23f32f08d | ||
|
|
fce88272b5 | ||
|
|
56fcc84196 | ||
|
|
afd5018bcd | ||
|
|
346c2557cf | ||
|
|
e6b60c54bd | ||
|
|
37b4eadf79 | ||
|
|
40670c094b | ||
|
|
f34ed53a55 | ||
|
|
2e1d922f59 | ||
|
|
9a77c8d246 | ||
|
|
e57702073f | ||
|
|
f31056ced3 | ||
|
|
bb1c09f4fd | ||
|
|
7deb328b09 | ||
|
|
5ff6708781 | ||
|
|
6252e04253 | ||
|
|
3f6776847e | ||
|
|
860ebe9fa9 | ||
|
|
017b058e63 | ||
|
|
0f637446df | ||
|
|
4017f2af73 | ||
|
|
bc6553f143 | ||
|
|
3a434ee02c | ||
|
|
86c7bbb74e | ||
|
|
8054281b15 | ||
|
|
953f33be4f | ||
|
|
99d6af1c1f | ||
|
|
7d6eeae073 | ||
|
|
884a4cad2c | ||
|
|
79805c2858 | ||
|
|
0cd10bc543 | ||
|
|
bb06643de8 | ||
|
|
f8e5fdc08e | ||
|
|
edbf01daa1 | ||
|
|
1527e23b43 | ||
|
|
7b4ff1224d | ||
|
|
ccab7bc584 | ||
|
|
509f759b1a | ||
|
|
bef303cc93 | ||
|
|
19c7303b83 | ||
|
|
ca9a2abc6f | ||
|
|
9e7409c9f7 | ||
|
|
ad78663ba0 | ||
|
|
b834799a2d | ||
|
|
08ff35509e | ||
|
|
acb12120a2 | ||
|
|
6330e9b6e1 | ||
|
|
dbbabc8a15 | ||
|
|
e3976c0a11 | ||
|
|
0913625836 | ||
|
|
0b43f2aad8 | ||
|
|
55225366dc | ||
|
|
0bfe4d3702 | ||
|
|
1554f448d0 | ||
|
|
7a3752669f | ||
|
|
6baaeda1af | ||
|
|
fa6b7487aa | ||
|
|
98f5a1d184 | ||
|
|
7f1aee3d4a | ||
|
|
c31c05d3f6 | ||
|
|
61639454a3 | ||
|
|
19f94fffbb | ||
|
|
e591280620 | ||
|
|
66b5610dea | ||
|
|
f4e6c3a58f | ||
|
|
ad6bed4333 | ||
|
|
ca89ddf657 | ||
|
|
c4f002fd45 | ||
|
|
e553b0cc40 | ||
|
|
6382b744c1 | ||
|
|
7d0b2c657d | ||
|
|
ffe4c5b14d | ||
|
|
c072dde9d5 | ||
|
|
e589e68881 | ||
|
|
7d79f3cee7 | ||
|
|
c167cbcea6 | ||
|
|
617fdc2cfc | ||
|
|
59ee88c603 | ||
|
|
76f38b84b9 | ||
|
|
e872c25918 | ||
|
|
554f1e6432 | ||
|
|
6c2e88c485 | ||
|
|
d38cf12826 | ||
|
|
224e890e31 | ||
|
|
fdb08fe31f | ||
|
|
4324d7ed2b | ||
|
|
e35e7ee0ef | ||
|
|
bc2f481323 | ||
|
|
d5aaab480b | ||
|
|
aa239a81fa | ||
|
|
c7f6c76b0f | ||
|
|
3c5336ed61 | ||
|
|
80b102af2d | ||
|
|
cbc46d63b6 | ||
|
|
d1fc10626b | ||
|
|
7a0283086d | ||
|
|
1f53c66700 | ||
|
|
dfe173b40f | ||
|
|
7ec716d4de | ||
|
|
d2e3b86bd1 | ||
|
|
bc12182770 | ||
|
|
60745c1269 | ||
|
|
f8535fdce4 | ||
|
|
8d9f1d2750 | ||
|
|
46b60e555f | ||
|
|
900fa9b121 | ||
|
|
9681aa68b4 | ||
|
|
d11303b562 | ||
|
|
81a07b5544 | ||
|
|
8810eabfd3 | ||
|
|
db30cb31dc | ||
|
|
d873c227c1 | ||
|
|
bab6f843a5 | ||
|
|
d470ee1482 | ||
|
|
29857506f2 | ||
|
|
292e6b0eba | ||
|
|
debb4cb110 | ||
|
|
6ae64fd50c | ||
|
|
d08b39d482 | ||
|
|
9d92fa0a68 | ||
|
|
2f033f108a | ||
|
|
8d38f230cb | ||
|
|
6c04b15764 | ||
|
|
86c6c291e4 | ||
|
|
7eea68aa9a | ||
|
|
282ae3895e | ||
|
|
ccfafd9c5f | ||
|
|
7625cd99f5 | ||
|
|
b4fd4e0256 | ||
|
|
8adc88bf00 | ||
|
|
d13e5ba5cd | ||
|
|
ea1a6cd007 | ||
|
|
eae3b44310 | ||
|
|
616656638d | ||
|
|
0cf3061f8a | ||
|
|
71acc7182d | ||
|
|
6fa9d5ba79 | ||
|
|
35a7bc19da | ||
|
|
f796c3c1d2 | ||
|
|
56d6911589 | ||
|
|
fabc1e9e88 | ||
|
|
97dcc86d4d | ||
|
|
9043389f25 | ||
|
|
c99807dabb | ||
|
|
573a9a52ea | ||
|
|
ea575de250 | ||
|
|
4c653552a1 | ||
|
|
bec6aa2639 | ||
|
|
403d395ccc | ||
|
|
8e6b462645 | ||
|
|
e0aae80f4b | ||
|
|
5959254de6 | ||
|
|
d4f542c64b | ||
|
|
3cd6cd81a0 | ||
|
|
373a2a31f6 | ||
|
|
5c57ec63d9 | ||
|
|
254a56437d | ||
|
|
207fc086c0 | ||
|
|
6bcd8c1a21 | ||
|
|
2ac56ed3bf | ||
|
|
1f40c6f70d | ||
|
|
32df7a4ce8 | ||
|
|
9e5b9f023d | ||
|
|
4566ecbab5 | ||
|
|
93257f424c | ||
|
|
9b00dbda28 | ||
|
|
da66420eaa | ||
|
|
545fc9e0c6 | ||
|
|
e3cbd31312 | ||
|
|
a0b76bbde3 | ||
|
|
fdd57c54f7 | ||
|
|
f23d8298b9 | ||
|
|
c02aa8941e | ||
|
|
bff9a87a74 | ||
|
|
4b2e467128 | ||
|
|
598fcc8b38 | ||
|
|
3b1d7a2234 | ||
|
|
e293d7541b | ||
|
|
b1f0cbfed1 | ||
|
|
8c6f7aa0ef | ||
|
|
c8a68f97ed | ||
|
|
3350081e80 | ||
|
|
b300ce9b09 | ||
|
|
6136b9fed3 |
63
.all-contributorsrc
Normal file
63
.all-contributorsrc
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
{
|
||||||
|
"projectName": "g0v0-server",
|
||||||
|
"projectOwner": "GooGuTeam",
|
||||||
|
"repoType": "github",
|
||||||
|
"repoHost": "https://github.com",
|
||||||
|
"files": [
|
||||||
|
"README.md",
|
||||||
|
"README.en.md"
|
||||||
|
],
|
||||||
|
"imageSize": 100,
|
||||||
|
"commit": true,
|
||||||
|
"commitConvention": "angular",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"login": "GooGuJiang",
|
||||||
|
"name": "咕谷酱",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/74496778?v=4",
|
||||||
|
"profile": "https://github.com/GooGuJiang",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "MingxuanGame",
|
||||||
|
"name": "MingxuanGame",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/68982190?v=4",
|
||||||
|
"profile": "https://blog.mxgame.top/",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "chenjintang-shrimp",
|
||||||
|
"name": "陈晋瑭",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/110657724?v=4",
|
||||||
|
"profile": "https://github.com/chenjintang-shrimp",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "4aya",
|
||||||
|
"name": "4ayo",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/115783539?v=4",
|
||||||
|
"profile": "https://4ayo.ovh",
|
||||||
|
"contributions": [
|
||||||
|
"ideas"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "kyiuu1",
|
||||||
|
"name": "kyiuu1",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/188347675?v=4",
|
||||||
|
"profile": "https://github.com/kyiuu1",
|
||||||
|
"contributions": [
|
||||||
|
"ideas"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"contributorsPerLine": 7,
|
||||||
|
"linkToUsage": false,
|
||||||
|
"commitType": "docs"
|
||||||
|
}
|
||||||
@@ -6,11 +6,17 @@
|
|||||||
"workspaceFolder": "/workspaces/osu_lazer_api",
|
"workspaceFolder": "/workspaces/osu_lazer_api",
|
||||||
"containerEnv": {
|
"containerEnv": {
|
||||||
"MYSQL_DATABASE": "osu_api",
|
"MYSQL_DATABASE": "osu_api",
|
||||||
"MYSQL_USER": "osu_user",
|
"MYSQL_USER": "osu_api",
|
||||||
"MYSQL_PASSWORD": "osu_password",
|
"MYSQL_PASSWORD": "password",
|
||||||
"MYSQL_HOST": "mysql",
|
"MYSQL_HOST": "mysql",
|
||||||
"MYSQL_PORT": "3306"
|
"MYSQL_PORT": "3306"
|
||||||
},
|
},
|
||||||
|
"runArgs": [
|
||||||
|
"--name",
|
||||||
|
"g0v0-devcontainer",
|
||||||
|
"--label",
|
||||||
|
"jetbrains-attach=true"
|
||||||
|
],
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
@@ -21,7 +27,12 @@
|
|||||||
"ms-vscode.vscode-json",
|
"ms-vscode.vscode-json",
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"ms-vscode.docker",
|
"ms-vscode.docker",
|
||||||
"rust-lang.rust-analyzer"
|
"rust-lang.rust-analyzer",
|
||||||
|
"ms-dotnettools.csdevkit",
|
||||||
|
"ms-dotnettools.csharp",
|
||||||
|
"ms-dotnettools.vscode-dotnet-runtime",
|
||||||
|
"ms-dotnettools.blazorwasm-companion",
|
||||||
|
"editorconfig.editorconfig"
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||||
@@ -57,7 +68,24 @@
|
|||||||
"[rust]": {
|
"[rust]": {
|
||||||
"editor.defaultFormatter": "rust-lang.rust-analyzer",
|
"editor.defaultFormatter": "rust-lang.rust-analyzer",
|
||||||
"editor.formatOnSave": true
|
"editor.formatOnSave": true
|
||||||
}
|
},
|
||||||
|
"dotnet.defaultSolution": "/workspaces/osu_lazer_api/spectator-server/osu.Server.Spectator.sln",
|
||||||
|
"[csharp]": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dotnet.completion.showCompletionItemsFromUnimportedNamespaces": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForParameters": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForLiteralParameters": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForIndexerParameters": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForObjectCreationParameters": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForOtherParameters": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForTypes": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForImplicitVariableTypes": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForLambdaParameterTypes": true,
|
||||||
|
"dotnet.inlayHints.enableInlayHintsForImplicitObjectCreation": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -66,13 +94,19 @@
|
|||||||
"ghcr.io/devcontainers/features/rust:1": {
|
"ghcr.io/devcontainers/features/rust:1": {
|
||||||
"version": "latest",
|
"version": "latest",
|
||||||
"profile": "default"
|
"profile": "default"
|
||||||
|
},
|
||||||
|
"ghcr.io/devcontainers/features/dotnet:2": {
|
||||||
|
"version": "8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"forwardPorts": [
|
"forwardPorts": [
|
||||||
8000,
|
8000,
|
||||||
3306,
|
3306,
|
||||||
6379
|
6379,
|
||||||
|
8086,
|
||||||
|
80,
|
||||||
|
8080
|
||||||
],
|
],
|
||||||
"postCreateCommand": "uv sync --dev && uv pip install rosu-pp-py && uv run alembic upgrade head && uv run pre-commit install && cd packages/msgpack_lazer_api && cargo check",
|
"postCreateCommand": "uv sync --dev --all-extras && uv run alembic upgrade head && uv run pre-commit install && cd spectator-server && dotnet restore && cd ../performance-server && dotnet restore",
|
||||||
"remoteUser": "vscode"
|
"remoteUser": "vscode"
|
||||||
}
|
}
|
||||||
@@ -12,12 +12,34 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- mysql
|
- mysql
|
||||||
- redis
|
- redis
|
||||||
|
- nginx
|
||||||
environment:
|
environment:
|
||||||
DATABASE_URL: mysql+aiomysql://osu_user:osu_password@mysql:3306/osu_api
|
# Python/FastAPI 环境变量
|
||||||
|
MYSQL_ROOT_PASSWORD: password
|
||||||
|
MYSQL_DATABASE: osu_api
|
||||||
|
MYSQL_USER: osu_api
|
||||||
|
MYSQL_PASSWORD: password
|
||||||
REDIS_URL: redis://redis:6379/0
|
REDIS_URL: redis://redis:6379/0
|
||||||
SECRET_KEY: dev-secret-key-change-in-production
|
|
||||||
OSU_CLIENT_ID: "5"
|
OSU_CLIENT_ID: "5"
|
||||||
OSU_CLIENT_SECRET: "FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
OSU_CLIENT_SECRET: "FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
||||||
|
CALCULATOR_CONFIG: '{"server_url":"http://localhost:8090"}'
|
||||||
|
|
||||||
|
# Spectator Server 环境变量
|
||||||
|
SAVE_REPLAYS: "0"
|
||||||
|
REPLAY_UPLOAD_THREADS: "1"
|
||||||
|
TRACK_BUILD_USER_COUNTS: "1"
|
||||||
|
SERVER_PORT: "8086"
|
||||||
|
REDIS_HOST: "redis"
|
||||||
|
DD_AGENT_HOST: "localhost"
|
||||||
|
SHARED_INTEROP_DOMAIN: "http://localhost:8000"
|
||||||
|
SHARED_INTEROP_SECRET: "dev-interop-secret"
|
||||||
|
SENTRY_DSN: "https://5840d8cb8d2b4d238369443bedef1d74@glitchtip.g0v0.top/4"
|
||||||
|
USE_LEGACY_RSA_AUTH: "0"
|
||||||
|
# .NET 环境变量
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: "1"
|
||||||
|
DOTNET_NOLOGO: "1"
|
||||||
|
|
||||||
|
RULESETS_PATH: "/workspaces/osu_lazer_api/rulesets"
|
||||||
|
|
||||||
mysql:
|
mysql:
|
||||||
image: mysql:8.0
|
image: mysql:8.0
|
||||||
@@ -25,8 +47,8 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: password
|
MYSQL_ROOT_PASSWORD: password
|
||||||
MYSQL_DATABASE: osu_api
|
MYSQL_DATABASE: osu_api
|
||||||
MYSQL_USER: osu_user
|
MYSQL_USER: osu_api
|
||||||
MYSQL_PASSWORD: osu_password
|
MYSQL_PASSWORD: password
|
||||||
ports:
|
ports:
|
||||||
- "3306:3306"
|
- "3306:3306"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -47,6 +69,19 @@ services:
|
|||||||
- devcontainer-network
|
- devcontainer-network
|
||||||
command: redis-server --appendonly yes
|
command: redis-server --appendonly yes
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx:alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "8080:80"
|
||||||
|
volumes:
|
||||||
|
- ./nginx.conf:/etc/nginx/conf.d/default.conf:ro
|
||||||
|
networks:
|
||||||
|
- devcontainer-network
|
||||||
|
depends_on:
|
||||||
|
- mysql
|
||||||
|
- redis
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
devcontainer-network:
|
devcontainer-network:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
|||||||
82
.devcontainer/nginx.conf
Normal file
82
.devcontainer/nginx.conf
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
resolver 127.0.0.11 valid=10s ipv6=off;
|
||||||
|
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
|
||||||
|
upstream app {
|
||||||
|
zone app_backend 64k;
|
||||||
|
server devcontainer:8000 resolve;
|
||||||
|
}
|
||||||
|
|
||||||
|
upstream spectator {
|
||||||
|
zone app_backend 64k;
|
||||||
|
server devcontainer:8086 resolve;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
|
||||||
|
client_max_body_size 50m;
|
||||||
|
|
||||||
|
# 屏蔽 /_lio/ 及其所有子路径的外部访问
|
||||||
|
location ~ ^/_lio/ {
|
||||||
|
return 403;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Spectator Server SignalR Hub
|
||||||
|
location /signalr/ {
|
||||||
|
proxy_pass http://spectator/;
|
||||||
|
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
proxy_set_header Authorization $http_authorization;
|
||||||
|
|
||||||
|
proxy_read_timeout 86400s;
|
||||||
|
proxy_send_timeout 86400s;
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
|
||||||
|
proxy_buffering off;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Health check for spectator server
|
||||||
|
location /health {
|
||||||
|
proxy_pass http://spectator/health;
|
||||||
|
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# FastAPI application
|
||||||
|
location / {
|
||||||
|
proxy_pass http://app;
|
||||||
|
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|
||||||
|
proxy_read_timeout 86400s;
|
||||||
|
proxy_send_timeout 86400s;
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
|
||||||
|
proxy_buffering off;
|
||||||
|
}
|
||||||
|
}
|
||||||
107
.devcontainer/start-dev.sh
Executable file
107
.devcontainer/start-dev.sh
Executable file
@@ -0,0 +1,107 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# 开发环境启动脚本
|
||||||
|
# 按依赖顺序启动:Performance Server → FastAPI → Spectator Server
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ -f .env ]; then
|
||||||
|
echo "加载 .env 文件中的环境变量..."
|
||||||
|
set -a
|
||||||
|
source .env
|
||||||
|
set +a
|
||||||
|
else
|
||||||
|
echo ".env 文件未找到,跳过加载环境变量。"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "🚀 启动开发环境..."
|
||||||
|
|
||||||
|
# 清理函数
|
||||||
|
cleanup() {
|
||||||
|
echo "🛑 正在停止服务..."
|
||||||
|
[ ! -z "$SPECTATOR_PID" ] && kill $SPECTATOR_PID 2>/dev/null || true
|
||||||
|
[ ! -z "$FASTAPI_PID" ] && kill $FASTAPI_PID 2>/dev/null || true
|
||||||
|
[ ! -z "$PERFORMANCE_PID" ] && kill $PERFORMANCE_PID 2>/dev/null || true
|
||||||
|
exit ${1:-0}
|
||||||
|
}
|
||||||
|
|
||||||
|
# 捕获中断信号和错误
|
||||||
|
trap 'cleanup 1' INT TERM ERR
|
||||||
|
|
||||||
|
# 健康检查函数
|
||||||
|
wait_for_service() {
|
||||||
|
local url=$1
|
||||||
|
local service_name=$2
|
||||||
|
local pre_sleep=$3
|
||||||
|
local max_attempts=30
|
||||||
|
local attempt=0
|
||||||
|
|
||||||
|
echo "等待 $service_name 启动..."
|
||||||
|
if [ ! -z "$pre_sleep" ]; then
|
||||||
|
sleep $pre_sleep
|
||||||
|
fi
|
||||||
|
|
||||||
|
while [ $attempt -lt $max_attempts ]; do
|
||||||
|
# 使用 curl 检查,添加 10 秒超时,区分连接失败和 HTTP 错误
|
||||||
|
http_code=$(curl -s -o /dev/null -w "%{http_code}" --connect-timeout 5 --max-time 5 "$url" 2>/dev/null || echo "000")
|
||||||
|
|
||||||
|
if [ "$http_code" = "200" ] || [ "$http_code" = "404" ]; then
|
||||||
|
echo "✅ $service_name 已就绪 (HTTP $http_code)"
|
||||||
|
return 0
|
||||||
|
elif [ "$http_code" = "000" ]; then
|
||||||
|
# 连接被拒绝或超时,服务还在启动中
|
||||||
|
echo " ⏳ $service_name 正在启动... (尝试 $((attempt + 1))/$max_attempts)"
|
||||||
|
else
|
||||||
|
# 其他 HTTP 状态码
|
||||||
|
echo " ⚠️ $service_name 返回 HTTP $http_code (尝试 $((attempt + 1))/$max_attempts)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
attempt=$((attempt + 1))
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "❌ $service_name 启动超时"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# 1. 启动 Performance Server (最底层依赖)
|
||||||
|
echo "启动 Performance Server..."
|
||||||
|
cd /workspaces/osu_lazer_api/performance-server
|
||||||
|
dotnet run --project PerformanceServer --urls "http://0.0.0.0:8090" &
|
||||||
|
PERFORMANCE_PID=$!
|
||||||
|
|
||||||
|
# 等待 Performance Server 就绪
|
||||||
|
if ! wait_for_service "http://localhost:8090" "Performance Server"; then
|
||||||
|
echo "Performance Server 启动失败,停止启动流程"
|
||||||
|
cleanup 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. 启动 FastAPI 服务器 (依赖 Performance Server)
|
||||||
|
echo "启动 FastAPI 服务器..."
|
||||||
|
cd /workspaces/osu_lazer_api
|
||||||
|
uv run uvicorn main:app --host 0.0.0.0 --port 8000 --reload &
|
||||||
|
FASTAPI_PID=$!
|
||||||
|
|
||||||
|
# 等待 FastAPI 就绪
|
||||||
|
if ! wait_for_service "http://localhost:8000/health" "FastAPI"; then
|
||||||
|
echo "FastAPI 启动失败,停止启动流程"
|
||||||
|
cleanup 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 3. 启动 Spectator Server (依赖 FastAPI)
|
||||||
|
echo "启动 Spectator Server..."
|
||||||
|
cd /workspaces/osu_lazer_api/spectator-server
|
||||||
|
dotnet run --project osu.Server.Spectator --urls "http://0.0.0.0:8086" &
|
||||||
|
SPECTATOR_PID=$!
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ 所有服务已启动:"
|
||||||
|
echo " - FastAPI: http://localhost:8000"
|
||||||
|
echo " - Spectator Server: http://localhost:8086"
|
||||||
|
echo " - Performance Server: http://localhost:8090"
|
||||||
|
echo " - Nginx (统一入口): http://localhost:8080"
|
||||||
|
echo ""
|
||||||
|
echo "按 Ctrl+C 停止所有服务"
|
||||||
|
|
||||||
|
# 等待用户中断
|
||||||
|
wait
|
||||||
229
.editorconfig
229
.editorconfig
@@ -26,3 +26,232 @@ indent_size = 2
|
|||||||
|
|
||||||
[{*.py,*.pyi}]
|
[{*.py,*.pyi}]
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
# C# files
|
||||||
|
[*.cs]
|
||||||
|
|
||||||
|
#### Core EditorConfig Options ####
|
||||||
|
|
||||||
|
# Indentation and spacing
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
tab_width = 4
|
||||||
|
|
||||||
|
# New line preferences
|
||||||
|
end_of_line = crlf
|
||||||
|
insert_final_newline = false
|
||||||
|
|
||||||
|
#### .NET Coding Conventions ####
|
||||||
|
|
||||||
|
# Organize usings
|
||||||
|
dotnet_separate_import_directive_groups = false
|
||||||
|
dotnet_sort_system_directives_first = false
|
||||||
|
file_header_template = unset
|
||||||
|
|
||||||
|
# this. and Me. preferences
|
||||||
|
dotnet_style_qualification_for_event = false
|
||||||
|
dotnet_style_qualification_for_field = false
|
||||||
|
dotnet_style_qualification_for_method = false
|
||||||
|
dotnet_style_qualification_for_property = false
|
||||||
|
|
||||||
|
# Language keywords vs BCL types preferences
|
||||||
|
dotnet_style_predefined_type_for_locals_parameters_members = true
|
||||||
|
dotnet_style_predefined_type_for_member_access = true
|
||||||
|
|
||||||
|
# Parentheses preferences
|
||||||
|
dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity
|
||||||
|
dotnet_style_parentheses_in_other_binary_operators = always_for_clarity
|
||||||
|
dotnet_style_parentheses_in_other_operators = never_if_unnecessary
|
||||||
|
dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity
|
||||||
|
|
||||||
|
# Modifier preferences
|
||||||
|
dotnet_style_require_accessibility_modifiers = for_non_interface_members
|
||||||
|
|
||||||
|
# Expression-level preferences
|
||||||
|
dotnet_style_coalesce_expression = true
|
||||||
|
dotnet_style_collection_initializer = true
|
||||||
|
dotnet_style_explicit_tuple_names = true
|
||||||
|
dotnet_style_namespace_match_folder = true
|
||||||
|
dotnet_style_null_propagation = true
|
||||||
|
dotnet_style_object_initializer = true
|
||||||
|
dotnet_style_operator_placement_when_wrapping = beginning_of_line
|
||||||
|
dotnet_style_prefer_auto_properties = true
|
||||||
|
dotnet_style_prefer_collection_expression = when_types_loosely_match
|
||||||
|
dotnet_style_prefer_compound_assignment = true
|
||||||
|
dotnet_style_prefer_conditional_expression_over_assignment = true
|
||||||
|
dotnet_style_prefer_conditional_expression_over_return = true
|
||||||
|
dotnet_style_prefer_foreach_explicit_cast_in_source = when_strongly_typed
|
||||||
|
dotnet_style_prefer_inferred_anonymous_type_member_names = true
|
||||||
|
dotnet_style_prefer_inferred_tuple_names = true
|
||||||
|
dotnet_style_prefer_is_null_check_over_reference_equality_method = true
|
||||||
|
dotnet_style_prefer_simplified_boolean_expressions = true
|
||||||
|
dotnet_style_prefer_simplified_interpolation = true
|
||||||
|
|
||||||
|
# Field preferences
|
||||||
|
dotnet_style_readonly_field = true
|
||||||
|
|
||||||
|
# Parameter preferences
|
||||||
|
dotnet_code_quality_unused_parameters = all:silent
|
||||||
|
|
||||||
|
# Suppression preferences
|
||||||
|
dotnet_remove_unnecessary_suppression_exclusions = none
|
||||||
|
|
||||||
|
# New line preferences
|
||||||
|
dotnet_style_allow_multiple_blank_lines_experimental = true
|
||||||
|
dotnet_style_allow_statement_immediately_after_block_experimental = true
|
||||||
|
|
||||||
|
#### C# Coding Conventions ####
|
||||||
|
|
||||||
|
# var preferences
|
||||||
|
csharp_style_var_elsewhere = false
|
||||||
|
csharp_style_var_for_built_in_types = false
|
||||||
|
csharp_style_var_when_type_is_apparent = false
|
||||||
|
|
||||||
|
# Expression-bodied members
|
||||||
|
csharp_style_expression_bodied_accessors = true
|
||||||
|
csharp_style_expression_bodied_constructors = false
|
||||||
|
csharp_style_expression_bodied_indexers = true
|
||||||
|
csharp_style_expression_bodied_lambdas = true
|
||||||
|
csharp_style_expression_bodied_local_functions = false
|
||||||
|
csharp_style_expression_bodied_methods = false
|
||||||
|
csharp_style_expression_bodied_operators = false
|
||||||
|
csharp_style_expression_bodied_properties = true
|
||||||
|
|
||||||
|
# Pattern matching preferences
|
||||||
|
csharp_style_pattern_matching_over_as_with_null_check = true
|
||||||
|
csharp_style_pattern_matching_over_is_with_cast_check = true
|
||||||
|
csharp_style_prefer_extended_property_pattern = true
|
||||||
|
csharp_style_prefer_not_pattern = true
|
||||||
|
csharp_style_prefer_pattern_matching = true
|
||||||
|
csharp_style_prefer_switch_expression = true
|
||||||
|
|
||||||
|
# Null-checking preferences
|
||||||
|
csharp_style_conditional_delegate_call = true
|
||||||
|
|
||||||
|
# Modifier preferences
|
||||||
|
csharp_prefer_static_local_function = true
|
||||||
|
csharp_preferred_modifier_order = public,private,protected,internal,file,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,required,volatile,async
|
||||||
|
csharp_style_prefer_readonly_struct = true
|
||||||
|
csharp_style_prefer_readonly_struct_member = true
|
||||||
|
|
||||||
|
# Code-block preferences
|
||||||
|
csharp_prefer_braces = true
|
||||||
|
csharp_prefer_simple_using_statement = true
|
||||||
|
csharp_style_namespace_declarations = block_scoped
|
||||||
|
csharp_style_prefer_method_group_conversion = true
|
||||||
|
csharp_style_prefer_primary_constructors = true
|
||||||
|
csharp_style_prefer_top_level_statements = true
|
||||||
|
|
||||||
|
# Expression-level preferences
|
||||||
|
csharp_prefer_simple_default_expression = true
|
||||||
|
csharp_style_deconstructed_variable_declaration = true
|
||||||
|
csharp_style_implicit_object_creation_when_type_is_apparent = true
|
||||||
|
csharp_style_inlined_variable_declaration = true
|
||||||
|
csharp_style_prefer_index_operator = true
|
||||||
|
csharp_style_prefer_local_over_anonymous_function = true
|
||||||
|
csharp_style_prefer_null_check_over_type_check = true
|
||||||
|
csharp_style_prefer_range_operator = true
|
||||||
|
csharp_style_prefer_tuple_swap = true
|
||||||
|
csharp_style_prefer_utf8_string_literals = true
|
||||||
|
csharp_style_throw_expression = true
|
||||||
|
csharp_style_unused_value_assignment_preference = discard_variable
|
||||||
|
csharp_style_unused_value_expression_statement_preference = discard_variable
|
||||||
|
|
||||||
|
# 'using' directive preferences
|
||||||
|
csharp_using_directive_placement = outside_namespace
|
||||||
|
|
||||||
|
# New line preferences
|
||||||
|
csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true
|
||||||
|
csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true
|
||||||
|
csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true
|
||||||
|
csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true
|
||||||
|
csharp_style_allow_embedded_statements_on_same_line_experimental = true
|
||||||
|
|
||||||
|
#### C# Formatting Rules ####
|
||||||
|
|
||||||
|
# New line preferences
|
||||||
|
csharp_new_line_before_catch = true
|
||||||
|
csharp_new_line_before_else = true
|
||||||
|
csharp_new_line_before_finally = true
|
||||||
|
csharp_new_line_before_members_in_anonymous_types = true
|
||||||
|
csharp_new_line_before_members_in_object_initializers = true
|
||||||
|
csharp_new_line_before_open_brace = all
|
||||||
|
csharp_new_line_between_query_expression_clauses = true
|
||||||
|
|
||||||
|
# Indentation preferences
|
||||||
|
csharp_indent_block_contents = true
|
||||||
|
csharp_indent_braces = false
|
||||||
|
csharp_indent_case_contents = true
|
||||||
|
csharp_indent_case_contents_when_block = true
|
||||||
|
csharp_indent_labels = one_less_than_current
|
||||||
|
csharp_indent_switch_labels = true
|
||||||
|
|
||||||
|
# Space preferences
|
||||||
|
csharp_space_after_cast = false
|
||||||
|
csharp_space_after_colon_in_inheritance_clause = true
|
||||||
|
csharp_space_after_comma = true
|
||||||
|
csharp_space_after_dot = false
|
||||||
|
csharp_space_after_keywords_in_control_flow_statements = true
|
||||||
|
csharp_space_after_semicolon_in_for_statement = true
|
||||||
|
csharp_space_around_binary_operators = before_and_after
|
||||||
|
csharp_space_around_declaration_statements = false
|
||||||
|
csharp_space_before_colon_in_inheritance_clause = true
|
||||||
|
csharp_space_before_comma = false
|
||||||
|
csharp_space_before_dot = false
|
||||||
|
csharp_space_before_open_square_brackets = false
|
||||||
|
csharp_space_before_semicolon_in_for_statement = false
|
||||||
|
csharp_space_between_empty_square_brackets = false
|
||||||
|
csharp_space_between_method_call_empty_parameter_list_parentheses = false
|
||||||
|
csharp_space_between_method_call_name_and_opening_parenthesis = false
|
||||||
|
csharp_space_between_method_call_parameter_list_parentheses = false
|
||||||
|
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
|
||||||
|
csharp_space_between_method_declaration_name_and_open_parenthesis = false
|
||||||
|
csharp_space_between_method_declaration_parameter_list_parentheses = false
|
||||||
|
csharp_space_between_parentheses = false
|
||||||
|
csharp_space_between_square_brackets = false
|
||||||
|
|
||||||
|
# Wrapping preferences
|
||||||
|
csharp_preserve_single_line_blocks = true
|
||||||
|
csharp_preserve_single_line_statements = true
|
||||||
|
|
||||||
|
#### Naming styles ####
|
||||||
|
|
||||||
|
# Naming rules
|
||||||
|
|
||||||
|
dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion
|
||||||
|
dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface
|
||||||
|
dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i
|
||||||
|
|
||||||
|
dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion
|
||||||
|
dotnet_naming_rule.types_should_be_pascal_case.symbols = types
|
||||||
|
dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case
|
||||||
|
|
||||||
|
dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion
|
||||||
|
dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members
|
||||||
|
dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case
|
||||||
|
|
||||||
|
# Symbol specifications
|
||||||
|
|
||||||
|
dotnet_naming_symbols.interface.applicable_kinds = interface
|
||||||
|
dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||||
|
dotnet_naming_symbols.interface.required_modifiers =
|
||||||
|
|
||||||
|
dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum
|
||||||
|
dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||||
|
dotnet_naming_symbols.types.required_modifiers =
|
||||||
|
|
||||||
|
dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method
|
||||||
|
dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected
|
||||||
|
dotnet_naming_symbols.non_field_members.required_modifiers =
|
||||||
|
|
||||||
|
# Naming styles
|
||||||
|
|
||||||
|
dotnet_naming_style.pascal_case.required_prefix =
|
||||||
|
dotnet_naming_style.pascal_case.required_suffix =
|
||||||
|
dotnet_naming_style.pascal_case.word_separator =
|
||||||
|
dotnet_naming_style.pascal_case.capitalization = pascal_case
|
||||||
|
|
||||||
|
dotnet_naming_style.begins_with_i.required_prefix = I
|
||||||
|
dotnet_naming_style.begins_with_i.required_suffix =
|
||||||
|
dotnet_naming_style.begins_with_i.word_separator =
|
||||||
|
dotnet_naming_style.begins_with_i.capitalization = pascal_case
|
||||||
|
|||||||
172
.env.example
172
.env.example
@@ -1,94 +1,122 @@
|
|||||||
# 数据库设置
|
# see https://github.com/GooGuTeam/g0v0-server/wiki/Configuration
|
||||||
|
# Database Settings
|
||||||
MYSQL_HOST="localhost"
|
MYSQL_HOST="localhost"
|
||||||
MYSQL_PORT=3306
|
MYSQL_PORT=3306
|
||||||
MYSQL_DATABASE="osu_api"
|
MYSQL_DATABASE="osu_api"
|
||||||
MYSQL_USER="osu_api"
|
MYSQL_USER="osu_api"
|
||||||
MYSQL_PASSWORD="password"
|
MYSQL_PASSWORD="password"
|
||||||
MYSQL_ROOT_PASSWORD="password"
|
MYSQL_ROOT_PASSWORD="password"
|
||||||
# Redis URL
|
REDIS_URL="redis://127.0.0.1:6379"
|
||||||
REDIS_URL="redis://127.0.0.1:6379/0"
|
|
||||||
|
|
||||||
# JWT 密钥,使用 openssl rand -hex 32 生成
|
# JWT Settings
|
||||||
|
# Use `openssl rand -hex 32` to generate a secure key
|
||||||
JWT_SECRET_KEY="your_jwt_secret_here"
|
JWT_SECRET_KEY="your_jwt_secret_here"
|
||||||
# JWT 算法
|
|
||||||
ALGORITHM="HS256"
|
ALGORITHM="HS256"
|
||||||
# JWT 过期时间
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=1440
|
ACCESS_TOKEN_EXPIRE_MINUTES=1440
|
||||||
|
|
||||||
# 服务器地址
|
# OAuth Settings
|
||||||
|
OSU_CLIENT_ID=5
|
||||||
|
OSU_CLIENT_SECRET="FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
||||||
|
OSU_WEB_CLIENT_ID=6
|
||||||
|
# Use `openssl rand -hex 40` to generate a secure key
|
||||||
|
OSU_WEB_CLIENT_SECRET="your_osu_web_client_secret_here"
|
||||||
|
|
||||||
|
# Server Settings
|
||||||
HOST="0.0.0.0"
|
HOST="0.0.0.0"
|
||||||
PORT=8000
|
PORT=8000
|
||||||
# 服务器 URL
|
|
||||||
SERVER_URL="http://localhost:8000"
|
|
||||||
# 额外的 CORS 允许的域名列表
|
|
||||||
CORS_URLS='[]'
|
|
||||||
# 前端 URL,当访问从游戏打开的 URL 时会重定向到这个 URL,为空表示不重定向
|
|
||||||
FRONTEND_URL
|
|
||||||
# 调试模式,生产环境请设置为 false
|
|
||||||
DEBUG=false
|
DEBUG=false
|
||||||
|
CORS_URLS='[]'
|
||||||
|
SERVER_URL="http://localhost:8000"
|
||||||
|
FRONTEND_URL=
|
||||||
|
ENABLE_RATE_LIMIT=true
|
||||||
|
|
||||||
# osu! 登录设置
|
# Fetcher Settings
|
||||||
OSU_CLIENT_ID=5 # lazer client ID
|
|
||||||
OSU_CLIENT_SECRET="FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk" # lazer client secret
|
|
||||||
OSU_WEB_CLIENT_ID=6 # 网页端 client ID
|
|
||||||
OSU_WEB_CLIENT_SECRET="your_osu_web_client_secret_here" # 网页端 client secret,使用 openssl rand -hex 40 生成
|
|
||||||
|
|
||||||
# SignalR 服务器设置
|
|
||||||
SIGNALR_NEGOTIATE_TIMEOUT=30
|
|
||||||
SIGNALR_PING_INTERVAL=15
|
|
||||||
|
|
||||||
# Fetcher 设置
|
|
||||||
FETCHER_CLIENT_ID=""
|
FETCHER_CLIENT_ID=""
|
||||||
FETCHER_CLIENT_SECRET=""
|
FETCHER_CLIENT_SECRET=""
|
||||||
FETCHER_SCOPES=public
|
FETCHER_SCOPES="public"
|
||||||
|
|
||||||
# 日志设置
|
# Logging Settings
|
||||||
LOG_LEVEL="INFO"
|
LOG_LEVEL="INFO"
|
||||||
|
|
||||||
# 邮件服务设置
|
# Verification Settings
|
||||||
SMTP_SERVER="smtp.gmail.com" # SMTP 服务器地址
|
EMAIL_PROVIDER=mailersend
|
||||||
SMTP_PORT=587 # SMTP 端口
|
MAILERSEND_API_KEY=mlsn.xxxxxxxxxxxxxxxxxxx
|
||||||
SMTP_USERNAME="your-email@gmail.com" # 邮箱用户名
|
MAILERSEND_FROM_EMAIL=no-reply@xxxxxxx.mlsender.net
|
||||||
SMTP_PASSWORD="your-app-password" # 邮箱密码或应用专用密码
|
ENABLE_TOTP_VERIFICATION=true
|
||||||
FROM_EMAIL="noreply@your-server.com" # 发送方邮箱
|
TOTP_ISSUER="osu! server"
|
||||||
FROM_NAME="osu! Private Server" # 发送方名称
|
TOTP_SERVICE_NAME="g0v0! Lazer Server"
|
||||||
|
ENABLE_EMAIL_VERIFICATION=false
|
||||||
|
ENABLE_SESSION_VERIFICATION=false
|
||||||
|
ENABLE_MULTI_DEVICE_LOGIN=true
|
||||||
|
MAX_TOKENS_PER_CLIENT=10
|
||||||
|
DEVICE_TRUST_DURATION_DAYS=30
|
||||||
|
SMTP_SERVER="localhost"
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_USERNAME=""
|
||||||
|
SMTP_PASSWORD=""
|
||||||
|
FROM_EMAIL="noreply@example.com"
|
||||||
|
FROM_NAME="osu! server"
|
||||||
|
|
||||||
# 邮件验证功能开关
|
# Sentry Configuration
|
||||||
ENABLE_EMAIL_VERIFICATION=true # 是否启用邮件验证功能(新位置登录时需要邮件验证)
|
SENTRY_DSN=
|
||||||
ENABLE_EMAIL_SENDING=false # 是否真实发送邮件(false时仅模拟发送,输出到日志)
|
|
||||||
|
|
||||||
# Sentry 设置,为空表示不启用
|
# New Relic Configuration
|
||||||
SENTRY_DSN
|
NEW_RELIC_ENVIRONMENT=
|
||||||
|
|
||||||
# GeoIP 配置 - MaxMind License Key(用于 IP 地址地理位置查询)
|
# GeoIP Configuration
|
||||||
MAXMIND_LICENSE_KEY=""
|
MAXMIND_LICENSE_KEY=""
|
||||||
# GeoIP 数据库存储目录
|
|
||||||
GEOIP_DEST_DIR="./geoip"
|
GEOIP_DEST_DIR="./geoip"
|
||||||
# GeoIP 每周更新的星期几(0=周一,6=周日)
|
|
||||||
GEOIP_UPDATE_DAY=1
|
GEOIP_UPDATE_DAY=1
|
||||||
# GeoIP 每周更新时间(小时,0-23)
|
|
||||||
GEOIP_UPDATE_HOUR=2
|
GEOIP_UPDATE_HOUR=2
|
||||||
|
|
||||||
# 游戏设置
|
# Game Settings
|
||||||
ENABLE_RX=false # 启用 RX mod 统计数据
|
ENABLE_RX=false
|
||||||
ENABLE_AP=false # 启用 AP mod Z统计数据
|
ENABLE_AP=false
|
||||||
ENABLE_ALL_MODS_PP=false # 启用所有 Mod 的 PP 计算
|
ENABLE_SUPPORTER_FOR_ALL_USERS=false
|
||||||
ENABLE_SUPPORTER_FOR_ALL_USERS=false # 启用所有新注册用户的支持者状态
|
ENABLE_ALL_BEATMAP_LEADERBOARD=false
|
||||||
ENABLE_ALL_BEATMAP_LEADERBOARD=false # 启用所有谱面的排行榜(没有排行榜的谱面会以 APPROVED 状态返回)
|
ENABLE_ALL_BEATMAP_PP=false
|
||||||
ENABLE_ALL_BEATMAP_PP=false # 允许任何谱面获得 PP
|
SEASONAL_BACKGROUNDS='[]'
|
||||||
SUSPICIOUS_SCORE_CHECK=true # 是否检查可疑的分数,默认开启
|
BEATMAP_TAG_TOP_COUNT=2
|
||||||
SEASONAL_BACKGROUNDS='[]' # 季节背景图 URL 列表
|
OLD_SCORE_PROCESSING_MODE=normal
|
||||||
BANNED_NAME='["mrekk", "vaxei", "btmc", "cookiezi", "peppy", "saragi", "chocomint"]' # 禁止使用的用户名列表
|
|
||||||
|
|
||||||
# 存储服务设置
|
# Beatmap Cache Settings
|
||||||
# 支持的存储类型:local(本地存储)、r2(Cloudflare R2)、s3(AWS S3)
|
ENABLE_BEATMAP_PRELOAD=true
|
||||||
|
BEATMAP_CACHE_EXPIRE_HOURS=24
|
||||||
|
BEATMAPSET_CACHE_EXPIRE_SECONDS=3600
|
||||||
|
|
||||||
|
# Ranking Cache Settings
|
||||||
|
ENABLE_RANKING_CACHE=true
|
||||||
|
RANKING_CACHE_EXPIRE_MINUTES=10
|
||||||
|
RANKING_CACHE_REFRESH_INTERVAL_MINUTES=10
|
||||||
|
RANKING_CACHE_MAX_PAGES=20
|
||||||
|
RANKING_CACHE_TOP_COUNTRIES=20
|
||||||
|
|
||||||
|
# User Cache Settings
|
||||||
|
ENABLE_USER_CACHE_PRELOAD=true
|
||||||
|
USER_CACHE_EXPIRE_SECONDS=300
|
||||||
|
USER_SCORES_CACHE_EXPIRE_SECONDS=60
|
||||||
|
USER_BEATMAPSETS_CACHE_EXPIRE_SECONDS=600
|
||||||
|
USER_CACHE_MAX_PRELOAD_USERS=200
|
||||||
|
USER_CACHE_CONCURRENT_LIMIT=10
|
||||||
|
|
||||||
|
# Anti-cheat Settings
|
||||||
|
SUSPICIOUS_SCORE_CHECK=true
|
||||||
|
BANNED_NAME='["mrekk", "vaxei", "btmc", "cookiezi", "peppy", "saragi", "chocomint"]'
|
||||||
|
ALLOW_DELETE_SCORES=false
|
||||||
|
|
||||||
|
# Beatmap Syncing Settings
|
||||||
|
# POST `/api/private/beatmapsets/{beatmapset_id}/sync?immediate=true` to sync a beatmapset immediately
|
||||||
|
ENABLE_AUTO_BEATMAP_SYNC=false
|
||||||
|
BEATMAP_SYNC_INTERVAL_MINUTES=60
|
||||||
|
|
||||||
|
# Storage Settings
|
||||||
|
# Supported storage services: local, r2, s3
|
||||||
STORAGE_SERVICE="local"
|
STORAGE_SERVICE="local"
|
||||||
|
|
||||||
# 存储服务配置 (JSON 格式)
|
# Local Storage Settings (when STORAGE_SERVICE=local)
|
||||||
# 本地存储配置(当 STORAGE_SERVICE=local 时)
|
# STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
||||||
STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
|
||||||
|
|
||||||
# Cloudflare R2 存储配置(当 STORAGE_SERVICE=r2 时)
|
# Cloudflare R2 Storage Settings (when STORAGE_SERVICE=r2)
|
||||||
# STORAGE_SETTINGS='{
|
# STORAGE_SETTINGS='{
|
||||||
# "r2_account_id": "your_cloudflare_r2_account_id",
|
# "r2_account_id": "your_cloudflare_r2_account_id",
|
||||||
# "r2_access_key_id": "your_r2_access_key_id",
|
# "r2_access_key_id": "your_r2_access_key_id",
|
||||||
@@ -97,11 +125,31 @@ STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
|||||||
# "r2_public_url_base": "https://your-custom-domain.com"
|
# "r2_public_url_base": "https://your-custom-domain.com"
|
||||||
# }'
|
# }'
|
||||||
|
|
||||||
# AWS S3 存储配置(当 STORAGE_SERVICE=s3 时)
|
# AWS S3 Storage Settings (when STORAGE_SERVICE=s3)
|
||||||
# STORAGE_SETTINGS='{
|
# STORAGE_SETTINGS='{
|
||||||
# "s3_access_key_id": "your_aws_access_key_id",
|
# "s3_access_key_id": "your_aws_access_key_id",
|
||||||
# "s3_secret_access_key": "your_aws_secret_access_key",
|
# "s3_secret_access_key": "your_aws_secret_access_key",
|
||||||
# "s3_bucket_name": "your_s3_bucket_name",
|
# "s3_bucket_name": "your_s3_bucket_name",
|
||||||
# "s3_region_name": "us-east-1",
|
# "s3_region_name": "us-east-1",
|
||||||
# "s3_public_url_base": "https://your-custom-domain.com"
|
# "s3_public_url_base": "https://your-custom
|
||||||
# }'
|
|
||||||
|
# Asset Proxy
|
||||||
|
ENABLE_ASSET_PROXY=true
|
||||||
|
CUSTOM_ASSET_DOMAIN=g0v0.top
|
||||||
|
ASSET_PROXY_PREFIX=assets-ppy
|
||||||
|
AVATAR_PROXY_PREFIX=a-ppy
|
||||||
|
BEATMAP_PROXY_PREFIX=b-ppy
|
||||||
|
|
||||||
|
# Spectator Server
|
||||||
|
# Other configurations see https://github.com/GooGuTeam/osu-server-spectator
|
||||||
|
SAVE_REPLAYS=0
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
SHARED_INTEROP_DOMAIN=http://localhost:8000
|
||||||
|
SERVER_PORT=80
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ENABLE_TURNSTILE_VERIFICATION=true
|
||||||
|
TURNSTILE_SECRET_KEY="1x0000000000000000000000000000000AA"
|
||||||
|
TURNSTILE_DEV_MODE=true
|
||||||
|
|||||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
custom: https://afdian.com/a/g0v0_server
|
||||||
184
.github/copilot-instructions.md
vendored
Normal file
184
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
# copilot-instruction
|
||||||
|
|
||||||
|
> 此文件是 AGENTS.md 的复制。一切以 AGENTS.md 为主。
|
||||||
|
|
||||||
|
> 使用自动化与 AI 代理(GitHub Copilot、依赖/CI 机器人,以及仓库中的运行时调度器/worker)的指导原则,适用于 g0v0-server 仓库。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API 参考
|
||||||
|
|
||||||
|
本项目必须保持与公开的 osu! API 兼容。在添加或映射端点时请参考:
|
||||||
|
|
||||||
|
- **v1(旧版):** [https://github.com/ppy/osu-api/wiki](https://github.com/ppy/osu-api/wiki)
|
||||||
|
- **v2(OpenAPI):** [https://osu.ppy.sh/docs/openapi.yaml](https://osu.ppy.sh/docs/openapi.yaml)
|
||||||
|
|
||||||
|
任何在 `app/router/v1/`、`app/router/v2/` 或 `app/router/notification/` 中的实现必须与官方规范保持一致。自定义或实验性的端点应放在 `app/router/private/` 中。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 代理类别
|
||||||
|
|
||||||
|
允许的代理分为三类:
|
||||||
|
|
||||||
|
- **代码生成/补全代理**(如 GitHub Copilot 或其他 LLM)—— **仅当** 有维护者审核并批准输出时允许使用。
|
||||||
|
- **自动维护代理**(如 Dependabot、Renovate、pre-commit.ci)—— 允许使用,但必须遵守严格的 PR 和 CI 政策。
|
||||||
|
- **运行时/后台代理**(调度器、worker)—— 属于产品代码的一部分;必须遵守生命周期、并发和幂等性规范。
|
||||||
|
|
||||||
|
所有由代理生成或建议的更改必须遵守以下规则。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 所有代理的规则
|
||||||
|
|
||||||
|
1. **单一职责的 PR。** 代理的 PR 必须只解决一个问题(一个功能、一个 bug 修复或一次依赖更新)。提交信息应使用 Angular 风格(如 `feat(api): add ...`)。
|
||||||
|
2. **通过 Lint 与 CI 检查。** 每个 PR(包括代理创建的)在合并前必须通过 `pyright`、`ruff`、`pre-commit` 钩子和仓库 CI。PR 中应附带 CI 运行结果链接。
|
||||||
|
3. **绝不可提交敏感信息。** 代理不得提交密钥、密码、token 或真实 `.env` 值。如果检测到可能的敏感信息,代理必须中止并通知指定的维护者。
|
||||||
|
4. **API 位置限制。** 不得在 `app/router/v1` 或 `app/router/v2` 下添加新的公开端点,除非该端点在官方 v1/v2 规范中存在。自定义或实验性端点必须放在 `app/router/private/`。
|
||||||
|
5. **保持公共契约稳定。** 未经批准的迁移计划,不得随意修改响应 schema、路由前缀或其他公共契约。若有变更,PR 中必须包含明确的兼容性说明。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Copilot / LLM 使用
|
||||||
|
|
||||||
|
> 关于在本仓库中使用 GitHub Copilot 和其他基于 LLM 的辅助工具的统一指导。
|
||||||
|
|
||||||
|
### 关键项目结构(需要了解的内容)
|
||||||
|
|
||||||
|
- **应用入口:** `main.py` —— FastAPI 应用,包含启动/关闭生命周期管理(fetchers、GeoIP、调度器、缓存与健康检查、Redis 消息、统计、成就系统)。
|
||||||
|
|
||||||
|
- **路由:** `app/router/` 包含所有路由组。主要的路由包括:
|
||||||
|
- `v1/`(v1 端点)
|
||||||
|
- `v2/`(v2 端点)
|
||||||
|
- `notification/` 路由(聊天/通知子系统)
|
||||||
|
- `auth.py`(认证/token 流程)
|
||||||
|
- `private/`(自定义或实验性的端点)
|
||||||
|
|
||||||
|
**规则:** `v1/` 和 `v2/` 必须与官方 API 对应。仅内部或实验端点应放在 `app/router/private/`。
|
||||||
|
|
||||||
|
- **模型与数据库工具:**
|
||||||
|
- SQLModel/ORM 模型在 `app/database/`。
|
||||||
|
- 非数据库模型在 `app/models/`。
|
||||||
|
- 修改模型/schema 时必须生成 Alembic 迁移,并手动检查生成的 SQL 与索引。
|
||||||
|
|
||||||
|
- **服务层:** `app/service/` 保存领域逻辑(如缓存工具、通知/邮件逻辑)。复杂逻辑应放在 service,而不是路由处理器中。
|
||||||
|
|
||||||
|
- **任务:** `app/tasks/` 保存任务(定时任务、启动任务、关闭任务)。
|
||||||
|
- 均在 `__init__.py` 进行导出。
|
||||||
|
- 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。
|
||||||
|
- 定时任务使用 APScheduler
|
||||||
|
|
||||||
|
- **缓存与依赖:** 使用 `app/dependencies/` 提供的 Redis 依赖和缓存服务(遵循现有 key 命名约定,如 `user:{id}:...`)。
|
||||||
|
|
||||||
|
- **日志:** 使用 `app/log` 提供的日志工具。
|
||||||
|
|
||||||
|
### 实用工作流(提示模式)
|
||||||
|
|
||||||
|
- **添加 v2 端点(正确方式):** 在 `app/router/v2/` 下添加文件,导出路由,实现基于数据库与缓存依赖的异步处理函数。**不得**在 v1/v2 添加非官方端点。
|
||||||
|
- **添加自定义端点:** 放在 `app/router/private/`,保持处理器精简,将业务逻辑放入 `app/service/`。
|
||||||
|
- **鉴权:** 使用 [`app.dependencies.user`](../app/dependencies/user.py) 提供的依赖注入,如 `ClientUser` 和 `get_current_user`,参考下方。
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from fastapi import Security
|
||||||
|
from app.dependencies.user import ClientUser, get_current_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-api")
|
||||||
|
async def _(current_user: Annotated[User, Security(get_current_user, scopes=["public"])]):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-client-api")
|
||||||
|
async def _(current_user: ClientUser):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
- **添加后台任务:** 将任务逻辑写在 `app/service/_job.py`(幂等、可重试)。调度器入口放在 `app/scheduler/_scheduler.py`,并在应用生命周期注册。
|
||||||
|
- **数据库 schema 变更:** 修改 `app/models/` 中的 SQLModel 模型,运行 `alembic revision --autogenerate`,检查迁移并本地测试 `alembic upgrade head` 后再提交。
|
||||||
|
- **缓存写入与响应:** 使用现有的 `UserResp` 模式和 `UserCacheService`;异步缓存写入应使用后台任务。
|
||||||
|
|
||||||
|
### 提示指导(给 LLM/Copilot 的输入)
|
||||||
|
|
||||||
|
- 明确文件位置和限制(如:`Add an async endpoint under app/router/private/... DO NOT add to app/router/v1 or v2`)。
|
||||||
|
- 要求异步处理函数、依赖注入 DB/Redis、复用已有服务/工具、加上类型注解,并生成最小化 pytest 测试样例。
|
||||||
|
|
||||||
|
### 约定与质量要求
|
||||||
|
|
||||||
|
- **使用 Annotated-style 依赖注入** 在路由处理器中。
|
||||||
|
- **提交信息风格:** `type(scope): subject`(Angular 风格)。
|
||||||
|
- **优先异步:** 路由必须为异步函数;避免阻塞事件循环。
|
||||||
|
- **关注点分离:** 业务逻辑应放在 service,而不是路由中。
|
||||||
|
- **错误处理:** 客户端错误用 `HTTPException`,服务端错误使用结构化日志。
|
||||||
|
- **类型与 lint:** 在请求评审前,代码必须通过 `pyright` 和 `ruff` 检查。
|
||||||
|
- **注释:** 避免过多注释,仅为晦涩逻辑添加简洁的“魔法注释”。
|
||||||
|
- **日志:** 使用 `app.log` 提供的 `log` 函数获取 logger 实例。(服务、任务除外)
|
||||||
|
|
||||||
|
### 工具参考
|
||||||
|
|
||||||
|
```
|
||||||
|
uv sync
|
||||||
|
pre-commit install
|
||||||
|
pre-commit run --all-files
|
||||||
|
pyright
|
||||||
|
ruff .
|
||||||
|
alembic revision --autogenerate -m "feat(db): ..."
|
||||||
|
alembic upgrade head
|
||||||
|
uvicorn main:app --reload --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### PR 范围指导
|
||||||
|
|
||||||
|
- 保持 PR 专注:一次只做一件事(如端点或重构,不要混合)。
|
||||||
|
- 不确定时,请参考现有服务,并添加简短说明性注释。
|
||||||
|
|
||||||
|
### PR 审核规则
|
||||||
|
|
||||||
|
> GitHub Copilot PR review 可参考。
|
||||||
|
|
||||||
|
1. 如果 PR 修改了端点,简要说明端点的用途和预期行为。同时检查是否满足上述的 API 位置限制。
|
||||||
|
2. 如果 PR 修改了数据库模型,必须包含 Alembic 迁移。检查迁移的 SQL 语句和索引是否合理。
|
||||||
|
3. 修改的其他功能需要提供简短的说明。
|
||||||
|
4. 提供性能优化的建议(见下文)。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 性能优化提示
|
||||||
|
|
||||||
|
以下为结合本仓库架构(FastAPI + SQLModel/SQLAlchemy、Redis 缓存、后台调度器)总结的性能优化建议:
|
||||||
|
|
||||||
|
### 数据库
|
||||||
|
|
||||||
|
- **仅选择必要字段。** 使用 `select(Model.col1, Model.col2)`,避免 `select(Model)`。
|
||||||
|
|
||||||
|
```py
|
||||||
|
stmt = select(User.id, User.username).where(User.active == True)
|
||||||
|
rows = await session.execute(stmt)
|
||||||
|
```
|
||||||
|
|
||||||
|
- **使用 `select(exists())` 检查存在性。** 避免加载整行:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from sqlalchemy import select, exists
|
||||||
|
exists_stmt = select(exists().where(User.id == some_id))
|
||||||
|
found = await session.scalar(exists_stmt)
|
||||||
|
```
|
||||||
|
|
||||||
|
- **避免 N+1 查询。** 需要关联对象时用 `selectinload`、`joinedload`。
|
||||||
|
|
||||||
|
- **批量操作。** 插入/更新时应批量执行,并放在一个事务中,而不是多个小事务。
|
||||||
|
|
||||||
|
|
||||||
|
### 耗时任务
|
||||||
|
|
||||||
|
- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks)
|
||||||
|
- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 部分 LLM 的额外要求
|
||||||
|
|
||||||
|
### Claude Code
|
||||||
|
|
||||||
|
- 禁止创建额外的测试脚本。
|
||||||
|
|
||||||
61
.github/dependabot.yml
vendored
Normal file
61
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# Python dependencies (uv ecosystem)
|
||||||
|
- package-ecosystem: "uv"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "sunday"
|
||||||
|
time: "09:00"
|
||||||
|
timezone: "Asia/Shanghai"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
labels:
|
||||||
|
- "type/dependencies"
|
||||||
|
commit-message:
|
||||||
|
prefix: "chore"
|
||||||
|
prefix-development: "chore"
|
||||||
|
include: "scope"
|
||||||
|
groups:
|
||||||
|
# Group all patch and minor updates together
|
||||||
|
minor-and-patch:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
|
- "patch"
|
||||||
|
# Separate major updates
|
||||||
|
major:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
update-types:
|
||||||
|
- "major"
|
||||||
|
|
||||||
|
# GitHub Actions
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "sunday"
|
||||||
|
time: "09:00"
|
||||||
|
timezone: "Asia/Shanghai"
|
||||||
|
open-pull-requests-limit: 5
|
||||||
|
labels:
|
||||||
|
- "type/dependencies"
|
||||||
|
commit-message:
|
||||||
|
prefix: "chore"
|
||||||
|
include: "scope"
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "sunday"
|
||||||
|
time: "09:00"
|
||||||
|
timezone: "Asia/Shanghai"
|
||||||
|
open-pull-requests-limit: 5
|
||||||
|
labels:
|
||||||
|
- "type/dependencies"
|
||||||
|
commit-message:
|
||||||
|
prefix: "chore"
|
||||||
|
include: "scope"
|
||||||
57
.github/workflows/docker-publish.yml
vendored
Normal file
57
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
name: Build and Push Docker Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths-ignore:
|
||||||
|
- '*.md'
|
||||||
|
- '**/*.md'
|
||||||
|
- 'docs/**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: mingxuangame/g0v0-server
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=latest
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
platforms: linux/amd64, linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
${{ env.IMAGE_NAME }}:latest
|
||||||
|
${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
56
.github/workflows/generate-configuration-doc.yml
vendored
Normal file
56
.github/workflows/generate-configuration-doc.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
name: Generate configuration Docs to Wiki
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- "app/config.py"
|
||||||
|
- "scripts/generate_config_doc.py"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-wiki:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout main repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
path: project
|
||||||
|
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
repository: ${{ github.repository }}.wiki
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
path: wiki
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install pydantic pydantic-settings
|
||||||
|
|
||||||
|
- name: Generate Markdown
|
||||||
|
run: |
|
||||||
|
cd project
|
||||||
|
python ./scripts/generate_config_doc.py ${{ github.sha }} > ../wiki/Configuration.md
|
||||||
|
|
||||||
|
- name: Commit and push to Wiki
|
||||||
|
run: |
|
||||||
|
cd wiki
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git add .
|
||||||
|
git commit -m "Update configuration docs from Actions [skip ci]" || echo "No changes"
|
||||||
|
git push origin master
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
38
.github/workflows/pyright.yml
vendored
Normal file
38
.github/workflows/pyright.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
name: Pyright Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "app/**"
|
||||||
|
- "main.py"
|
||||||
|
- ".github/workflows/pyright.yml"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "uv.lock"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pyright:
|
||||||
|
name: Pyright Lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- uses: astral-sh/setup-uv@v7
|
||||||
|
with:
|
||||||
|
python-version: "3.13"
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
uv sync --all-extras --locked
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
echo "$(dirname $(uv python find))" >> $GITHUB_PATH
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Run Pyright
|
||||||
|
uses: jakebailey/pyright-action@v2
|
||||||
|
with:
|
||||||
|
pylance-version: latest-release
|
||||||
24
.github/workflows/ruff.yml
vendored
Normal file
24
.github/workflows/ruff.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
name: Ruff Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "app/**"
|
||||||
|
- "main.py"
|
||||||
|
- ".github/workflows/ruff.yml"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "uv.lock"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
ruff:
|
||||||
|
name: Ruff Lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Run Ruff Lint
|
||||||
|
uses: astral-sh/ruff-action@v3
|
||||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -176,7 +176,8 @@ cython_debug/
|
|||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
.idea/
|
||||||
|
./spectator-server/.idea/
|
||||||
|
|
||||||
# Abstra
|
# Abstra
|
||||||
# Abstra is an AI-powered process automation framework.
|
# Abstra is an AI-powered process automation framework.
|
||||||
@@ -215,8 +216,19 @@ bancho.py-master/*
|
|||||||
storage/
|
storage/
|
||||||
replays/
|
replays/
|
||||||
osu-master/*
|
osu-master/*
|
||||||
|
rulesets/
|
||||||
geoip/*
|
geoip/*
|
||||||
newrelic.ini
|
newrelic.ini
|
||||||
logs/
|
logs/
|
||||||
osu-server-spectator-master/*
|
osu-server-spectator-master/*
|
||||||
|
spectator-server/
|
||||||
|
osu-web-master/*
|
||||||
|
osu-web-master/.env.dusk.local.example
|
||||||
|
osu-web-master/.env.example
|
||||||
|
osu-web-master/.env.testing.example
|
||||||
|
config/*
|
||||||
|
!config/
|
||||||
|
!config/.gitkeep
|
||||||
|
osu-web-master/*
|
||||||
|
performance-server
|
||||||
|
plugins/
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
default_install_hook_types: [pre-commit, prepare-commit-msg]
|
default_install_hook_types: [pre-commit, prepare-commit-msg]
|
||||||
ci:
|
ci:
|
||||||
autofix_commit_msg: "chore(deps): auto fix by pre-commit hooks"
|
autofix_commit_msg: "chore(linter): auto fix by pre-commit hooks"
|
||||||
autofix_prs: true
|
autofix_prs: true
|
||||||
autoupdate_branch: master
|
autoupdate_branch: main
|
||||||
autoupdate_schedule: monthly
|
autoupdate_schedule: monthly
|
||||||
autoupdate_commit_msg: "chore(deps): auto update by pre-commit hooks"
|
autoupdate_commit_msg: "chore(deps): auto update by pre-commit hooks"
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.12.2
|
rev: v0.14.10
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
args: [--fix]
|
args: [--fix]
|
||||||
|
|||||||
182
AGENTS.md
Normal file
182
AGENTS.md
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
# AGENTS
|
||||||
|
|
||||||
|
> 使用自动化与 AI 代理(GitHub Copilot、依赖/CI 机器人,以及仓库中的运行时调度器/worker)的指导原则,适用于 g0v0-server 仓库。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API 参考
|
||||||
|
|
||||||
|
本项目必须保持与公开的 osu! API 兼容。在添加或映射端点时请参考:
|
||||||
|
|
||||||
|
- **v1(旧版):** [https://github.com/ppy/osu-api/wiki](https://github.com/ppy/osu-api/wiki)
|
||||||
|
- **v2(OpenAPI):** [https://osu.ppy.sh/docs/openapi.yaml](https://osu.ppy.sh/docs/openapi.yaml)
|
||||||
|
|
||||||
|
任何在 `app/router/v1/`、`app/router/v2/` 或 `app/router/notification/` 中的实现必须与官方规范保持一致。自定义或实验性的端点应放在 `app/router/private/` 中。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 代理类别
|
||||||
|
|
||||||
|
允许的代理分为三类:
|
||||||
|
|
||||||
|
- **代码生成/补全代理**(如 GitHub Copilot 或其他 LLM)—— **仅当** 有维护者审核并批准输出时允许使用。
|
||||||
|
- **自动维护代理**(如 Dependabot、Renovate、pre-commit.ci)—— 允许使用,但必须遵守严格的 PR 和 CI 政策。
|
||||||
|
- **运行时/后台代理**(调度器、worker)—— 属于产品代码的一部分;必须遵守生命周期、并发和幂等性规范。
|
||||||
|
|
||||||
|
所有由代理生成或建议的更改必须遵守以下规则。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 所有代理的规则
|
||||||
|
|
||||||
|
1. **单一职责的 PR。** 代理的 PR 必须只解决一个问题(一个功能、一个 bug 修复或一次依赖更新)。提交信息应使用 Angular 风格(如 `feat(api): add ...`)。
|
||||||
|
2. **通过 Lint 与 CI 检查。** 每个 PR(包括代理创建的)在合并前必须通过 `pyright`、`ruff`、`pre-commit` 钩子和仓库 CI。PR 中应附带 CI 运行结果链接。
|
||||||
|
3. **绝不可提交敏感信息。** 代理不得提交密钥、密码、token 或真实 `.env` 值。如果检测到可能的敏感信息,代理必须中止并通知指定的维护者。
|
||||||
|
4. **API 位置限制。** 不得在 `app/router/v1` 或 `app/router/v2` 下添加新的公开端点,除非该端点在官方 v1/v2 规范中存在。自定义或实验性端点必须放在 `app/router/private/`。
|
||||||
|
5. **保持公共契约稳定。** 未经批准的迁移计划,不得随意修改响应 schema、路由前缀或其他公共契约。若有变更,PR 中必须包含明确的兼容性说明。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Copilot / LLM 使用
|
||||||
|
|
||||||
|
> 关于在本仓库中使用 GitHub Copilot 和其他基于 LLM 的辅助工具的统一指导。
|
||||||
|
|
||||||
|
### 关键项目结构(需要了解的内容)
|
||||||
|
|
||||||
|
- **应用入口:** `main.py` —— FastAPI 应用,包含启动/关闭生命周期管理(fetchers、GeoIP、调度器、缓存与健康检查、Redis 消息、统计、成就系统)。
|
||||||
|
|
||||||
|
- **路由:** `app/router/` 包含所有路由组。主要的路由包括:
|
||||||
|
- `v1/`(v1 端点)
|
||||||
|
- `v2/`(v2 端点)
|
||||||
|
- `notification/` 路由(聊天/通知子系统)
|
||||||
|
- `auth.py`(认证/token 流程)
|
||||||
|
- `private/`(自定义或实验性的端点)
|
||||||
|
|
||||||
|
**规则:** `v1/` 和 `v2/` 必须与官方 API 对应。仅内部或实验端点应放在 `app/router/private/`。
|
||||||
|
|
||||||
|
- **模型与数据库工具:**
|
||||||
|
- SQLModel/ORM 模型在 `app/database/`。
|
||||||
|
- 非数据库模型在 `app/models/`。
|
||||||
|
- 修改模型/schema 时必须生成 Alembic 迁移,并手动检查生成的 SQL 与索引。
|
||||||
|
|
||||||
|
- **服务层:** `app/service/` 保存领域逻辑(如缓存工具、通知/邮件逻辑)。复杂逻辑应放在 service,而不是路由处理器中。
|
||||||
|
|
||||||
|
- **任务:** `app/tasks/` 保存任务(定时任务、启动任务、关闭任务)。
|
||||||
|
- 均在 `__init__.py` 进行导出。
|
||||||
|
- 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。
|
||||||
|
- 定时任务使用 APScheduler
|
||||||
|
|
||||||
|
- **缓存与依赖:** 使用 `app/dependencies/` 提供的 Redis 依赖和缓存服务(遵循现有 key 命名约定,如 `user:{id}:...`)。
|
||||||
|
|
||||||
|
- **日志:** 使用 `app/log` 提供的日志工具。
|
||||||
|
|
||||||
|
### 实用工作流(提示模式)
|
||||||
|
|
||||||
|
- **添加 v2 端点(正确方式):** 在 `app/router/v2/` 下添加文件,导出路由,实现基于数据库与缓存依赖的异步处理函数。**不得**在 v1/v2 添加非官方端点。
|
||||||
|
- **添加自定义端点:** 放在 `app/router/private/`,保持处理器精简,将业务逻辑放入 `app/service/`。
|
||||||
|
- **鉴权:** 使用 [`app.dependencies.user`](./app/dependencies/user.py) 提供的依赖注入,如 `ClientUser` 和 `get_current_user`,参考下方。
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from fastapi import Security
|
||||||
|
from app.dependencies.user import ClientUser, get_current_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-api")
|
||||||
|
async def _(current_user: Annotated[User, Security(get_current_user, scopes=["public"])]):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-client-api")
|
||||||
|
async def _(current_user: ClientUser):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
- **添加后台任务:** 将任务逻辑写在 `app/service/_job.py`(幂等、可重试)。调度器入口放在 `app/scheduler/_scheduler.py`,并在应用生命周期注册。
|
||||||
|
- **数据库 schema 变更:** 修改 `app/models/` 中的 SQLModel 模型,运行 `alembic revision --autogenerate`,检查迁移并本地测试 `alembic upgrade head` 后再提交。
|
||||||
|
- **缓存写入与响应:** 使用现有的 `UserResp` 模式和 `UserCacheService`;异步缓存写入应使用后台任务。
|
||||||
|
|
||||||
|
### 提示指导(给 LLM/Copilot 的输入)
|
||||||
|
|
||||||
|
- 明确文件位置和限制(如:`Add an async endpoint under app/router/private/... DO NOT add to app/router/v1 or v2`)。
|
||||||
|
- 要求异步处理函数、依赖注入 DB/Redis、复用已有服务/工具、加上类型注解,并生成最小化 pytest 测试样例。
|
||||||
|
|
||||||
|
### 约定与质量要求
|
||||||
|
|
||||||
|
- **使用 Annotated-style 依赖注入** 在路由处理器中。
|
||||||
|
- **提交信息风格:** `type(scope): subject`(Angular 风格)。
|
||||||
|
- **优先异步:** 路由必须为异步函数;避免阻塞事件循环。
|
||||||
|
- **关注点分离:** 业务逻辑应放在 service,而不是路由中。
|
||||||
|
- **错误处理:** 客户端错误用 `HTTPException`,服务端错误使用结构化日志。
|
||||||
|
- **类型与 lint:** 在请求评审前,代码必须通过 `pyright` 和 `ruff` 检查。
|
||||||
|
- **注释:** 避免过多注释,仅为晦涩逻辑添加简洁的“魔法注释”。
|
||||||
|
- **日志:** 使用 `app.log` 提供的 `log` 函数获取 logger 实例。(服务、任务除外)
|
||||||
|
|
||||||
|
### 工具参考
|
||||||
|
|
||||||
|
```
|
||||||
|
uv sync
|
||||||
|
pre-commit install
|
||||||
|
pre-commit run --all-files
|
||||||
|
pyright
|
||||||
|
ruff .
|
||||||
|
alembic revision --autogenerate -m "feat(db): ..."
|
||||||
|
alembic upgrade head
|
||||||
|
uvicorn main:app --reload --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### PR 范围指导
|
||||||
|
|
||||||
|
- 保持 PR 专注:一次只做一件事(如端点或重构,不要混合)。
|
||||||
|
- 不确定时,请参考现有服务,并添加简短说明性注释。
|
||||||
|
|
||||||
|
### PR 审核规则
|
||||||
|
|
||||||
|
> GitHub Copilot PR review 可参考。
|
||||||
|
|
||||||
|
1. 如果 PR 修改了端点,简要说明端点的用途和预期行为。同时检查是否满足上述的 API 位置限制。
|
||||||
|
2. 如果 PR 修改了数据库模型,必须包含 Alembic 迁移。检查迁移的 SQL 语句和索引是否合理。
|
||||||
|
3. 修改的其他功能需要提供简短的说明。
|
||||||
|
4. 提供性能优化的建议(见下文)。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 性能优化提示
|
||||||
|
|
||||||
|
以下为结合本仓库架构(FastAPI + SQLModel/SQLAlchemy、Redis 缓存、后台调度器)总结的性能优化建议:
|
||||||
|
|
||||||
|
### 数据库
|
||||||
|
|
||||||
|
- **仅选择必要字段。** 使用 `select(Model.col1, Model.col2)`,避免 `select(Model)`。
|
||||||
|
|
||||||
|
```py
|
||||||
|
stmt = select(User.id, User.username).where(User.active == True)
|
||||||
|
rows = await session.execute(stmt)
|
||||||
|
```
|
||||||
|
|
||||||
|
- **使用 `select(exists())` 检查存在性。** 避免加载整行:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from sqlalchemy import select, exists
|
||||||
|
exists_stmt = select(exists().where(User.id == some_id))
|
||||||
|
found = await session.scalar(exists_stmt)
|
||||||
|
```
|
||||||
|
|
||||||
|
- **避免 N+1 查询。** 需要关联对象时用 `selectinload`、`joinedload`。
|
||||||
|
|
||||||
|
- **批量操作。** 插入/更新时应批量执行,并放在一个事务中,而不是多个小事务。
|
||||||
|
|
||||||
|
|
||||||
|
### 耗时任务
|
||||||
|
|
||||||
|
- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks)
|
||||||
|
- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 部分 LLM 的额外要求
|
||||||
|
|
||||||
|
### Claude Code
|
||||||
|
|
||||||
|
- 禁止创建额外的测试脚本。
|
||||||
|
|
||||||
315
CONTRIBUTING.md
Normal file
315
CONTRIBUTING.md
Normal file
@@ -0,0 +1,315 @@
|
|||||||
|
# 贡献指南
|
||||||
|
|
||||||
|
## 克隆项目
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/GooGuTeam/g0v0-server.git
|
||||||
|
```
|
||||||
|
|
||||||
|
此外,您还需要:
|
||||||
|
|
||||||
|
- clone 旁观服务器到 g0v0-server 的文件夹。
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/GooGuTeam/osu-server-spectator.git spectator-server
|
||||||
|
```
|
||||||
|
|
||||||
|
- clone 表现分计算器到 g0v0-server 的文件夹。
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/GooGuTeam/osu-performance-server.git performance-server
|
||||||
|
```
|
||||||
|
|
||||||
|
- 下载并放置自定义规则集 DLL 到 `rulesets/` 目录(如果需要)。
|
||||||
|
|
||||||
|
## 开发环境
|
||||||
|
|
||||||
|
为了确保一致的开发环境,我们强烈建议使用提供的 Dev Container。这将设置一个容器化的环境,预先安装所有必要的工具和依赖项。
|
||||||
|
|
||||||
|
1. 安装 [Docker](https://www.docker.com/products/docker-desktop/)。
|
||||||
|
2. 在 Visual Studio Code 中安装 [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)。
|
||||||
|
3. 在 VS Code 中打开项目。当被提示时,点击“在容器中重新打开”以启动开发容器。
|
||||||
|
|
||||||
|
## 配置项目
|
||||||
|
|
||||||
|
修改 `.env` 配置(参考 [wiki](https://github.com/GooGuTeam/g0v0-server/wiki/Configuration)),生成并填充 JWT 密钥。
|
||||||
|
|
||||||
|
如果在 Dev Container 运行,请修改 `MYSQL_HOST` 为 `mysql`,`REDIS_URL` 为 `redis://redis/0`。
|
||||||
|
|
||||||
|
## 启动项目
|
||||||
|
|
||||||
|
.devcontainer 文件夹提供了一个启动脚本 `start-dev.sh`,这个脚本会从 `.env` 加载环境变量并同时启动 g0v0-server(端口 `8000`)和 spectator-server(端口 `8006`)。
|
||||||
|
|
||||||
|
Dev Container 提供了 NGINX 进行转发,对外访问端口是 `8080`。
|
||||||
|
|
||||||
|
如果您的服务器没有配置 HTTPS,可以在启动 osu! 的时候指定环境变量 `OSU_INSECURE_REQUESTS=1` 禁用 SSL 检查,或者应用 [osu!lazer wiki](https://github.com/ppy/osu/wiki/Testing-web-server-full-stack-with-osu!#basics) 提供的 diff。
|
||||||
|
|
||||||
|
或者使用下方的命令手动启动:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# g0v0-server
|
||||||
|
uv run uvicorn main:app --host 0.0.0.0 --port 8000 --reload
|
||||||
|
# spectator-server
|
||||||
|
cd spectator-server
|
||||||
|
dotnet run --project osu.Server.Spectator --urls "http://0.0.0.0:8086"
|
||||||
|
```
|
||||||
|
|
||||||
|
## 依赖管理
|
||||||
|
|
||||||
|
使用 `uv` 进行快速高效的 Python 包管理。
|
||||||
|
|
||||||
|
要安装依赖项,请在终端中运行以下命令:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv sync
|
||||||
|
```
|
||||||
|
|
||||||
|
## 开发规范
|
||||||
|
|
||||||
|
### 项目结构
|
||||||
|
|
||||||
|
以下是项目主要目录和文件的结构说明:
|
||||||
|
|
||||||
|
- `main.py`: FastAPI 应用的主入口点,负责初始化和启动服务器。
|
||||||
|
- `pyproject.toml`: 项目配置文件,用于管理依赖项 (uv)、代码格式化 (Ruff) 和类型检查 (Pyright)。
|
||||||
|
- `alembic.ini`: Alembic 数据库迁移工具的配置文件。
|
||||||
|
- `app/`: 存放所有核心应用代码。
|
||||||
|
- `router/`: 包含所有 API 端点的定义,根据 API 版本和功能进行组织。
|
||||||
|
- `service/`: 存放核心业务逻辑,例如用户排名计算、每日挑战处理等。
|
||||||
|
- `database/`: 定义数据库模型 (SQLModel) 和会话管理。
|
||||||
|
- `models/`: 定义非数据库模型和其他模型。
|
||||||
|
- `tasks/`: 包含由 APScheduler 调度的后台任务和启动/关闭任务。
|
||||||
|
- `dependencies/`: 管理 FastAPI 的依赖项注入。
|
||||||
|
- `achievements/`: 存放与成就相关的逻辑。
|
||||||
|
- `storage/`: 存储服务代码。
|
||||||
|
- `fetcher/`: 用于从外部服务(如 osu! 官网)获取数据的模块。
|
||||||
|
- `middleware/`: 定义中间件,例如会话验证。
|
||||||
|
- `helpers/`: 存放辅助函数和工具类。
|
||||||
|
- `config.py`: 应用配置,使用 pydantic-settings 管理。
|
||||||
|
- `calculator.py`: 存放所有的计算逻辑,例如 pp 和等级。
|
||||||
|
- `log.py`: 日志记录模块,提供统一的日志接口。
|
||||||
|
- `const.py`: 定义常量。
|
||||||
|
- `path.py`: 定义跨文件使用的常量。
|
||||||
|
- `migrations/`: 存放 Alembic 生成的数据库迁移脚本。
|
||||||
|
- `static/`: 存放静态文件,如 `mods.json`。
|
||||||
|
|
||||||
|
### 数据库模型定义
|
||||||
|
|
||||||
|
所有的数据库模型定义在 `app.database` 里,并且在 `__init__.py` 中导出。
|
||||||
|
|
||||||
|
本项目使用一种“按需返回”的设计模式,遵循 `Dict` - `Model` - `Table` 结构。详细设计思路请参考[这篇文章](https://blog.mxgame.top/2025/11/22/An-On-Demand-Design-Within-SQLModel/)。
|
||||||
|
|
||||||
|
#### 基本结构
|
||||||
|
|
||||||
|
1. **Dict**: 定义模型转换后的字典结构,用于类型检查。必须在 Model 之前定义。
|
||||||
|
2. **Model**: 继承自 `DatabaseModel[Dict]`,定义字段和计算属性。
|
||||||
|
3. **Table**: 继承自 `Model`,定义数据库表结构。
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import TypedDict, NotRequired
|
||||||
|
from app.database._base import DatabaseModel, OnDemand, included, ondemand
|
||||||
|
from sqlmodel import Field
|
||||||
|
|
||||||
|
# 1. 定义 Dict
|
||||||
|
class UserDict(TypedDict):
|
||||||
|
id: int
|
||||||
|
username: str
|
||||||
|
email: NotRequired[str] # 可选字段
|
||||||
|
followers_count: int # 计算属性
|
||||||
|
|
||||||
|
# 2. 定义 Model
|
||||||
|
class UserModel(DatabaseModel[UserDict]):
|
||||||
|
id: int = Field(primary_key=True)
|
||||||
|
username: str
|
||||||
|
email: OnDemand[str] # 使用 OnDemand 标记可选字段
|
||||||
|
|
||||||
|
# 普通计算属性 (总是返回)
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def followers_count(session: AsyncSession, instance: "User") -> int:
|
||||||
|
return await session.scalar(select(func.count()).where(Follower.followed_id == instance.id))
|
||||||
|
|
||||||
|
# 可选计算属性 (仅在 includes 中指定时返回)
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def some_optional_property(session: AsyncSession, instance: "User") -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
# 3. 定义 Table
|
||||||
|
class User(UserModel, table=True):
|
||||||
|
password: str # 仅在数据库中存在的字段
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 字段类型
|
||||||
|
|
||||||
|
- **普通属性**: 直接定义在 Model 中,总是返回。
|
||||||
|
- **可选属性**: 使用 `OnDemand[T]` 标记,仅在 `includes` 中指定时返回。
|
||||||
|
- **普通计算属性**: 使用 `@included` 装饰的静态方法,总是返回。
|
||||||
|
- **可选计算属性**: 使用 `@ondemand` 装饰的静态方法,仅在 `includes` 中指定时返回。
|
||||||
|
|
||||||
|
#### 使用方法
|
||||||
|
|
||||||
|
**转换模型**:
|
||||||
|
|
||||||
|
使用 `Model.transform` 方法将数据库实例转换为字典:
|
||||||
|
|
||||||
|
```python
|
||||||
|
user = await session.get(User, 1)
|
||||||
|
user_dict = await UserModel.transform(
|
||||||
|
user,
|
||||||
|
includes=["email"], # 指定需要返回的可选字段
|
||||||
|
some_context="foo-bar", # 如果计算属性需要上下文,可以传入额外参数
|
||||||
|
session=session # 可选传入自己的 session
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**API 文档**:
|
||||||
|
|
||||||
|
在 FastAPI 路由中,使用 `Model.generate_typeddict` 生成准确的响应文档:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@router.get("/users/{id}", response_model=UserModel.generate_typeddict(includes=("email",)))
|
||||||
|
async def get_user(id: int) -> dict:
|
||||||
|
...
|
||||||
|
return await UserModel.transform(user, includes=["email"])
|
||||||
|
```
|
||||||
|
|
||||||
|
数据库模块名应与表名相同,定义了多个模型的除外。
|
||||||
|
|
||||||
|
如果你需要使用 Session,使用 `app.dependencies.database` 提供的 `with_db`,注意手动使用 `COMMIT`。
|
||||||
|
|
||||||
|
```python
|
||||||
|
from app.dependencies.database import with_db
|
||||||
|
|
||||||
|
async with with_db() as session:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Redis
|
||||||
|
|
||||||
|
根据你需要的用途选择对应的 Redis 客户端。如果你的用途较为复杂或趋向一个较大的系统,考虑再创建一个 Redis 连接。
|
||||||
|
|
||||||
|
- `redis_client` (db0):标准用途,存储字符串、哈希等常规数据。
|
||||||
|
- `redis_message_client` (db1):用于消息缓存,存储聊天记录等。
|
||||||
|
- `redis_binary_client` (db2):用于存储二进制数据,如音频文件等。
|
||||||
|
- `redis_rate_limit_client` (db3):仅用于 FastAPI-Limiter 使用。
|
||||||
|
|
||||||
|
### API Router
|
||||||
|
|
||||||
|
所有的 API Router 定义在 `app.router` 里:
|
||||||
|
|
||||||
|
- `app/router/v2` 存放所有 osu! v2 API 实现,**不允许添加额外的,原 v2 API 不存在的 Endpoint**
|
||||||
|
- `app/router/notification` **存放所有 osu! v2 API 聊天、通知和 BanchoBot 的实现,不允许添加额外的,原 v2 API 不存在的 Endpoint**
|
||||||
|
- `app/router/v1` 存放所有 osu! v1 API 实现,**不允许添加额外的,原 v1 API 不存在的 Endpoint**
|
||||||
|
- `app/router/auth.py` 存放账户鉴权/登录的 API
|
||||||
|
- `app/router/private` 存放服务器自定义 API (g0v0 API),供其他服务使用
|
||||||
|
|
||||||
|
任何 Router 需要满足:
|
||||||
|
|
||||||
|
- 使用 Annotated-style 的依赖注入
|
||||||
|
- 对于已经存在的依赖注入如 Database 和 Redis,使用 `app.dependencies` 中的实现
|
||||||
|
- 需要拥有文档
|
||||||
|
- 如果返回需要资源代理,使用 `app.helpers.asset_proxy_helper` 的 `asset_proxy_response` 装饰器。
|
||||||
|
- 如果需要记录日志,请使用 `app.log` 提供的 `log` 函数获取一个 logger 实例
|
||||||
|
|
||||||
|
#### 鉴权
|
||||||
|
|
||||||
|
如果这个 Router 可以为公开使用(客户端、前端、OAuth 程序),考虑使用 `Security(get_current_user, scopes=["some_scope"])`,例如:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from fastapi import Security
|
||||||
|
from app.dependencies.user import get_current_user
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-api")
|
||||||
|
async def _(current_user: Annotated[User, Security(get_current_user, scopes=["public"])]):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
其中 scopes 选择请参考 [`app.dependencies.user`](./app/dependencies/user.py) 的 `oauth2_code` 中的 `scopes`。
|
||||||
|
|
||||||
|
如果这个 Router 仅限客户端和前端使用,请使用 `ClientUser` 依赖注入。
|
||||||
|
|
||||||
|
```python
|
||||||
|
from app.dependencies.user import ClientUser
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/some-api")
|
||||||
|
async def _(current_user: ClientUser):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
此外还存在 `get_current_user_and_token` 和 `get_client_user_and_token` 变种,用来同时获得当前用户的 token。
|
||||||
|
|
||||||
|
### Service
|
||||||
|
|
||||||
|
所有的核心业务逻辑放在 `app.service` 里:
|
||||||
|
|
||||||
|
- 业务逻辑需要要以类实现
|
||||||
|
- 日志只需要使用 `app.log` 中的 `logger` 即可。服务器会对 Service 的日志进行包装。
|
||||||
|
|
||||||
|
### 定时任务/启动任务/关闭任务
|
||||||
|
|
||||||
|
均定义在 `app.tasks` 里。
|
||||||
|
|
||||||
|
- 均在 `__init__.py` 进行导出
|
||||||
|
- 对于启动任务/关闭任务,在 `main.py` 的 `lifespan` 调用。
|
||||||
|
- 定时任务使用 APScheduler
|
||||||
|
|
||||||
|
### 耗时任务
|
||||||
|
|
||||||
|
- 如果这个任务来自 API Router,请使用 FastAPI 提供的 [`BackgroundTasks`](https://fastapi.tiangolo.com/tutorial/background-tasks)
|
||||||
|
- 其他情况,使用 `app.utils` 的 `bg_tasks`,它提供了与 FastAPI 的 `BackgroundTasks` 类似的功能。
|
||||||
|
|
||||||
|
### 代码质量和代码检查
|
||||||
|
|
||||||
|
使用 `pre-commit` 在提交之前执行代码质量标准。这确保所有代码都通过 `ruff`(用于代码检查和格式化)和 `pyright`(用于类型检查)的检查。
|
||||||
|
|
||||||
|
#### 设置
|
||||||
|
|
||||||
|
要设置 `pre-commit`,请运行以下命令:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
||||||
|
这将安装 pre-commit 钩子,每次提交时会自动运行。如果任何检查失败,提交将被中止。您需要修复报告的问题并暂存更改,然后再尝试提交。
|
||||||
|
|
||||||
|
pre-commit 不提供 pyright 的 hook,您需要手动运行 `pyright` 检查类型错误。
|
||||||
|
|
||||||
|
### 提交信息指南
|
||||||
|
|
||||||
|
遵循 [AngularJS 提交规范](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commit-message-format) 来编写提交信息。
|
||||||
|
|
||||||
|
**类型** 必须是以下之一:
|
||||||
|
|
||||||
|
- **feat**:新功能
|
||||||
|
- **fix**:错误修复
|
||||||
|
- **docs**:仅文档更改
|
||||||
|
- **style**:不影响代码含义的更改(空格、格式、缺少分号等)
|
||||||
|
- **refactor**:代码重构
|
||||||
|
- **perf**:改善性能的代码更改
|
||||||
|
- **test**:添加缺失的测试或修正现有测试
|
||||||
|
- **chore**:对构建过程或辅助工具和库(如文档生成)的更改
|
||||||
|
- **ci**:持续集成相关的更改
|
||||||
|
- **deploy**: 部署相关的更改
|
||||||
|
|
||||||
|
**范围** 可以是任何指定提交更改位置的内容。例如 `api`、`db`、`auth` 等等。对整个项目的更改使用 `project`。
|
||||||
|
|
||||||
|
**主题** 包含对更改的简洁描述。
|
||||||
|
|
||||||
|
### 持续集成检查
|
||||||
|
|
||||||
|
所有提交应该通过以下 CI 检查:
|
||||||
|
|
||||||
|
- Ruff Lint
|
||||||
|
- Pyright Lint
|
||||||
|
- pre-commit
|
||||||
|
|
||||||
|
## 添加贡献者
|
||||||
|
|
||||||
|
仓库维护者需要使用 [all-contributors-bot](https://allcontributors.org/docs/en/bot/usage) 添加有效贡献者。
|
||||||
|
|
||||||
|
感谢您的贡献!
|
||||||
10
Dockerfile
10
Dockerfile
@@ -2,7 +2,7 @@ FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim AS builder
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y gcc pkg-config default-libmysqlclient-dev \
|
&& apt-get install -y git gcc pkg-config default-libmysqlclient-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||||
|
|
||||||
@@ -17,9 +17,13 @@ COPY pyproject.toml uv.lock ./
|
|||||||
COPY packages/ ./packages/
|
COPY packages/ ./packages/
|
||||||
|
|
||||||
RUN uv sync --frozen --no-dev
|
RUN uv sync --frozen --no-dev
|
||||||
RUN uv pip install rosu-pp-py
|
|
||||||
|
|
||||||
COPY . .
|
COPY alembic.ini ./
|
||||||
|
COPY tools/ ./tools/
|
||||||
|
COPY migrations/ ./migrations/
|
||||||
|
COPY static/ ./static/
|
||||||
|
COPY app/ ./app/
|
||||||
|
COPY main.py ./
|
||||||
|
|
||||||
# ---
|
# ---
|
||||||
|
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim AS builder
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y gcc pkg-config default-libmysqlclient-dev git \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
|
||||||
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
|
||||||
|
|
||||||
ENV PATH="/root/.cargo/bin:${PATH}" \
|
|
||||||
PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1 UV_PROJECT_ENVIRONMENT=/app/.venv
|
|
||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
|
||||||
ENV UV_PROJECT_ENVIRONMENT=/app/.venv
|
|
||||||
|
|
||||||
COPY pyproject.toml uv.lock ./
|
|
||||||
COPY packages/ ./packages/
|
|
||||||
|
|
||||||
RUN uv sync --frozen --no-dev
|
|
||||||
RUN uv pip install git+https://github.com/ppy-sb/rosu-pp-py.git
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# ---
|
|
||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y curl netcat-openbsd \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
ENV PATH="/app/.venv/bin:${PATH}" \
|
|
||||||
PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
|
||||||
|
|
||||||
COPY --from=builder /app/.venv /app/.venv
|
|
||||||
COPY --from=builder /app /app
|
|
||||||
|
|
||||||
RUN mkdir -p /app/logs
|
|
||||||
VOLUME ["/app/logs"]
|
|
||||||
|
|
||||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
|
||||||
RUN chmod +x /app/docker-entrypoint.sh
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
|
||||||
CMD curl -f http://localhost:8000/health || exit 1
|
|
||||||
|
|
||||||
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
|
||||||
CMD ["uv", "run", "--no-sync", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
674
LICENSE
674
LICENSE
@@ -1,21 +1,661 @@
|
|||||||
MIT License
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
Copyright (c) 2025 GooGuTeam
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Preamble
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
copies or substantial portions of the Software.
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
The licenses for most software and other practical works are designed
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
to take away your freedom to share and change the works. By contrast,
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
share and change all versions of a program--to make sure it remains free
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
software for all its users.
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
Developers that use our General Public Licenses protect your rights
|
||||||
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
|
you this License which gives you legal permission to copy, distribute
|
||||||
|
and/or modify the software.
|
||||||
|
|
||||||
|
A secondary benefit of defending all users' freedom is that
|
||||||
|
improvements made in alternate versions of the program, if they
|
||||||
|
receive widespread use, become available for other developers to
|
||||||
|
incorporate. Many developers of free software are heartened and
|
||||||
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
|
The GNU Affero General Public License is designed specifically to
|
||||||
|
ensure that, in such cases, the modified source code becomes available
|
||||||
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
|
An older license, called the Affero General Public License and
|
||||||
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
|
this license.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the work with which it is combined will remain governed by version
|
||||||
|
3 of the GNU General Public License.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License as published
|
||||||
|
by the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If your software can interact with users remotely through a computer
|
||||||
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
|
interface could display a "Source" link that leads users to an archive
|
||||||
|
of the code. There are many ways you could offer source, and different
|
||||||
|
solutions will be better for different programs; see section 13 for the
|
||||||
|
specific requirements.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
212
README.en.md
212
README.en.md
@@ -1,5 +1,12 @@
|
|||||||
# g0v0-server
|
# g0v0-server
|
||||||
|
|
||||||
|
[](https://www.python.org/downloads/)
|
||||||
|
[](https://docs.astral.sh/ruff/)
|
||||||
|
[](https://www.codefactor.io/repository/github/GooGuTeam/g0v0-server)
|
||||||
|
[](https://results.pre-commit.ci/latest/github/GooGuTeam/g0v0-server/main)
|
||||||
|
[](./LICENSE)
|
||||||
|
[](https://discord.gg/AhzJXXWYfF)
|
||||||
|
|
||||||
[简体中文](./README.md) | English
|
[简体中文](./README.md) | English
|
||||||
|
|
||||||
This is an osu! API server implemented with FastAPI + MySQL + Redis, supporting most features of osu! API v1, v2, and osu!lazer.
|
This is an osu! API server implemented with FastAPI + MySQL + Redis, supporting most features of osu! API v1, v2, and osu!lazer.
|
||||||
@@ -8,12 +15,32 @@ This is an osu! API server implemented with FastAPI + MySQL + Redis, supporting
|
|||||||
|
|
||||||
- **OAuth 2.0 Authentication**: Supports password and refresh token flows.
|
- **OAuth 2.0 Authentication**: Supports password and refresh token flows.
|
||||||
- **User Data Management**: Complete user information, statistics, achievements, etc.
|
- **User Data Management**: Complete user information, statistics, achievements, etc.
|
||||||
- **Multi-game Mode Support**: osu! (RX, AP), taiko (RX), catch (RX), mania.
|
- **Multi-game Mode Support**: osu! (RX, AP), taiko (RX), catch (RX), mania and custom rulesets (see below).
|
||||||
- **Database Persistence**: MySQL for storing user data.
|
- **Database Persistence**: MySQL for storing user data.
|
||||||
- **Cache Support**: Redis for caching tokens and session information.
|
- **Cache Support**: Redis for caching tokens and session information.
|
||||||
- **Multiple Storage Backends**: Supports local storage, Cloudflare R2, and AWS S3.
|
- **Multiple Storage Backends**: Supports local storage, Cloudflare R2, and AWS S3.
|
||||||
- **Containerized Deployment**: Docker and Docker Compose support.
|
- **Containerized Deployment**: Docker and Docker Compose support.
|
||||||
|
|
||||||
|
## Supported Rulesets
|
||||||
|
|
||||||
|
**Ruleset**|**ID**|**ShortName**|**PP Algorithm (rosu)**|**PP Algorithm (performance-server)**
|
||||||
|
:-----:|:-----:|:-----:|:-----:|:-----:
|
||||||
|
osu!|`0`|`osu`|✅|✅
|
||||||
|
osu!taiko|`1`|`taiko`|✅|✅
|
||||||
|
osu!catch|`2`|`fruits`|✅|✅
|
||||||
|
osu!mania|`3`|`mania`|✅|✅
|
||||||
|
osu! (RX)|`4`|`osurx`|✅|✅
|
||||||
|
osu! (AP)|`5`|`osuap`|✅|✅
|
||||||
|
osu!taiko (RX)|`6`|`taikorx`|✅|✅
|
||||||
|
osu!catch (RX)|`7`|`fruitsrx`|✅|✅
|
||||||
|
[Sentakki](https://github.com/LumpBloom7/sentakki)|`10`|`Sentakki`|❌|❌
|
||||||
|
[tau](https://github.com/taulazer/tau)|`11`|`tau`|❌|✅
|
||||||
|
[Rush!](https://github.com/Beamographic/rush)|`12`|`rush`|❌|❌
|
||||||
|
[hishigata](https://github.com/LumpBloom7/hishigata)|`13`|`hishigata`|❌|❌
|
||||||
|
[soyokaze!](https://github.com/goodtrailer/soyokaze)|`14`|`soyokaze`|❌|✅
|
||||||
|
|
||||||
|
Go to [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets) to download the custom rulesets modified for g0v0-server.
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
### Using Docker Compose (Recommended)
|
### Using Docker Compose (Recommended)
|
||||||
@@ -25,158 +52,81 @@ This is an osu! API server implemented with FastAPI + MySQL + Redis, supporting
|
|||||||
```
|
```
|
||||||
2. Create a `.env` file
|
2. Create a `.env` file
|
||||||
|
|
||||||
Please see the server configuration below to modify the .env file.
|
Please see [wiki](https://github.com/GooGuTeam/g0v0-server/wiki/Configuration) to modify the .env file.
|
||||||
```bash
|
```bash
|
||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
3. Start the service
|
3. (Optional) Download rulesets
|
||||||
|
Go to [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets/releases/latest) to download the custom rulesets modified for g0v0-server. Place the downloaded DLLs into the `rulesets/` directory to enable custom ruleset support in the spectator server and performance calculator.
|
||||||
|
4. Start the service
|
||||||
```bash
|
```bash
|
||||||
# Standard server
|
# Standard server
|
||||||
docker-compose -f docker-compose.yml up -d
|
docker-compose -f docker-compose.yml up -d
|
||||||
# Enable osu!RX and osu!AP statistics (ppy-sb pp algorithm)
|
# Enable osu!RX and osu!AP statistics (Gu pp algorithm based on ppy-sb pp algorithm)
|
||||||
docker-compose -f docker-compose-osurx.yml up -d
|
docker-compose -f docker-compose-osurx.yml up -d
|
||||||
```
|
```
|
||||||
4. Connect to the server from the game
|
5. Connect to the server from the game
|
||||||
|
|
||||||
Use a [custom osu!lazer client](https://github.com/GooGuTeam/osu), or use [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection), and change the server settings to the server's address.
|
Use a [custom osu!lazer client](https://github.com/GooGuTeam/osu), or use [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection), and change the server settings to the server's address.
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
### Database Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `MYSQL_HOST` | MySQL host address | `localhost` |
|
|
||||||
| `MYSQL_PORT` | MySQL port | `3306` |
|
|
||||||
| `MYSQL_DATABASE` | MySQL database name | `osu_api` |
|
|
||||||
| `MYSQL_USER` | MySQL username | `osu_api` |
|
|
||||||
| `MYSQL_PASSWORD` | MySQL password | `password` |
|
|
||||||
| `MYSQL_ROOT_PASSWORD` | MySQL root password | `password` |
|
|
||||||
| `REDIS_URL` | Redis connection string | `redis://127.0.0.1:6379/0` |
|
|
||||||
|
|
||||||
### JWT Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `JWT_SECRET_KEY` | JWT signing key | `your_jwt_secret_here` |
|
|
||||||
| `ALGORITHM` | JWT algorithm | `HS256` |
|
|
||||||
| `ACCESS_TOKEN_EXPIRE_MINUTES` | Access token expiration time (minutes) | `1440` |
|
|
||||||
|
|
||||||
### Server Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `HOST` | Server listening address | `0.0.0.0` |
|
|
||||||
| `PORT` | Server listening port | `8000` |
|
|
||||||
| `DEBUG` | Debug mode | `false` |
|
|
||||||
| `SERVER_URL` | Server URL | `http://localhost:8000` |
|
|
||||||
| `CORS_URLS` | Additional CORS allowed domain list (JSON format) | `[]` |
|
|
||||||
| `FRONTEND_URL` | Frontend URL, redirects to this URL when accessing URLs opened from the game. Empty means no redirection. | `(null)` |
|
|
||||||
|
|
||||||
### OAuth Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `OSU_CLIENT_ID` | OAuth client ID | `5` |
|
|
||||||
| `OSU_CLIENT_SECRET` | OAuth client secret | `FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk` |
|
|
||||||
| `OSU_WEB_CLIENT_ID` | Web OAuth client ID | `6` |
|
|
||||||
| `OSU_WEB_CLIENT_SECRET` | Web OAuth client secret | `your_osu_web_client_secret_here` |
|
|
||||||
|
|
||||||
### SignalR Server Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `SIGNALR_NEGOTIATE_TIMEOUT` | SignalR negotiation timeout (seconds) | `30` |
|
|
||||||
| `SIGNALR_PING_INTERVAL` | SignalR ping interval (seconds) | `15` |
|
|
||||||
|
|
||||||
### Fetcher Settings
|
|
||||||
|
|
||||||
The Fetcher is used to get data from the official osu! API using OAuth 2.0 authentication.
|
|
||||||
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `FETCHER_CLIENT_ID` | Fetcher client ID | `""` |
|
|
||||||
| `FETCHER_CLIENT_SECRET` | Fetcher client secret | `""` |
|
|
||||||
| `FETCHER_SCOPES` | Fetcher scopes | `public` |
|
|
||||||
|
|
||||||
### Log Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `LOG_LEVEL` | Log level | `INFO` |
|
|
||||||
|
|
||||||
### Sentry Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `SENTRY_DSN` | Sentry DSN, empty to disable Sentry | `(null)` |
|
|
||||||
|
|
||||||
### Game Settings
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `ENABLE_RX` | Enable RX mod statistics | `false` |
|
|
||||||
| `ENABLE_AP` | Enable AP mod statistics | `false` |
|
|
||||||
| `ENABLE_ALL_MODS_PP` | Enable PP calculation for all mods | `false` |
|
|
||||||
| `ENABLE_SUPPORTER_FOR_ALL_USERS` | Enable supporter status for all new users | `false` |
|
|
||||||
| `ENABLE_ALL_BEATMAP_LEADERBOARD` | Enable leaderboards for all beatmaps | `false` |
|
|
||||||
| `ENABLE_ALL_BEATMAP_PP` | Allow any beatmap to grant PP | `false` |
|
|
||||||
| `SUSPICIOUS_SCORE_CHECK` | Enable suspicious score check (star>25 & acc<80 or pp>2300) | `true` |
|
|
||||||
| `SEASONAL_BACKGROUNDS` | List of seasonal background URLs | `[]` |
|
|
||||||
| `BANNED_NAME` | List of banned usernames | `["mrekk", "vaxei", "btmc", "cookiezi", "peppy", "saragi", "chocomint"]` |
|
|
||||||
|
|
||||||
### Storage Service Settings
|
|
||||||
|
|
||||||
Used for storing replay files, avatars, and other static assets.
|
|
||||||
|
|
||||||
| Variable Name | Description | Default Value |
|
|
||||||
|---|---|---|
|
|
||||||
| `STORAGE_SERVICE` | Storage service type: `local`, `r2`, `s3` | `local` |
|
|
||||||
| `STORAGE_SETTINGS` | Storage service configuration (JSON format), see below for configuration | `{"local_storage_path": "./storage"}` |
|
|
||||||
|
|
||||||
## Storage Service Configuration
|
|
||||||
|
|
||||||
### Local Storage (Recommended for development)
|
|
||||||
|
|
||||||
Local storage saves files to the server's local filesystem, suitable for development and small-scale deployments.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="local"
|
|
||||||
STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cloudflare R2 Storage (Recommended for production)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="r2"
|
|
||||||
STORAGE_SETTINGS='{
|
|
||||||
"r2_account_id": "your_cloudflare_account_id",
|
|
||||||
"r2_access_key_id": "your_r2_access_key_id",
|
|
||||||
"r2_secret_access_key": "your_r2_secret_access_key",
|
|
||||||
"r2_bucket_name": "your_bucket_name",
|
|
||||||
"r2_public_url_base": "https://your-custom-domain.com"
|
|
||||||
}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### AWS S3 Storage
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="s3"
|
|
||||||
STORAGE_SETTINGS='{
|
|
||||||
"s3_access_key_id": "your_aws_access_key_id",
|
|
||||||
"s3_secret_access_key": "your_aws_secret_access_key",
|
|
||||||
"s3_bucket_name": "your_s3_bucket_name",
|
|
||||||
"s3_region_name": "us-east-1",
|
|
||||||
"s3_public_url_base": "https://your-custom-domain.com"
|
|
||||||
}'
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note**: In a production environment, be sure to change the default keys and passwords!
|
|
||||||
|
|
||||||
### Updating the Database
|
### Updating the Database
|
||||||
|
|
||||||
Refer to the [Database Migration Guide](https://github.com/GooGuTeam/g0v0-server/wiki/Migrate-Database)
|
Refer to the [Database Migration Guide](https://github.com/GooGuTeam/g0v0-server/wiki/Migrate-Database)
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
Use `openssl rand -hex 32` to generate the JWT secret key to ensure the security of the server and the normal operation of the observer server.
|
||||||
|
|
||||||
|
Use `openssl rand -hex 40` to generate the frontend secret key.
|
||||||
|
|
||||||
|
**If it is in a public network environment, please block external requests to the `/_lio` path.**
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
Visit the [wiki](https://github.com/GooGuTeam/g0v0-server/wiki) for more information.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
MIT License
|
This project is licensed under the **GNU Affero General Public License v3.0 (AGPL-3.0-only)**.
|
||||||
|
Any derivative work, modification, or deployment **MUST clearly and prominently attribute** the original authors:
|
||||||
|
**GooGuTeam - https://github.com/GooGuTeam/g0v0-server**
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
The project is currently in a state of rapid iteration. Issues and Pull Requests are welcome!
|
The project is currently in a state of rapid iteration. Issues and Pull Requests are welcome!
|
||||||
|
|
||||||
|
See [Contributing Guide](./CONTRIBUTING.md) for more information.
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||||
|
[](#contributors-)
|
||||||
|
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||||
|
|
||||||
|
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
<!-- markdownlint-disable -->
|
||||||
|
<table>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/GooGuJiang"><img src="https://avatars.githubusercontent.com/u/74496778?v=4?s=100" width="100px;" alt="咕谷酱"/><br /><sub><b>咕谷酱</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=GooGuJiang" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://blog.mxgame.top/"><img src="https://avatars.githubusercontent.com/u/68982190?v=4?s=100" width="100px;" alt="MingxuanGame"/><br /><sub><b>MingxuanGame</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=MingxuanGame" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/chenjintang-shrimp"><img src="https://avatars.githubusercontent.com/u/110657724?v=4?s=100" width="100px;" alt="陈晋瑭"/><br /><sub><b>陈晋瑭</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=chenjintang-shrimp" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://4ayo.ovh"><img src="https://avatars.githubusercontent.com/u/115783539?v=4?s=100" width="100px;" alt="4ayo"/><br /><sub><b>4ayo</b></sub></a><br /><a href="#ideas-4aya" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/kyiuu1"><img src="https://avatars.githubusercontent.com/u/188347675?v=4?s=100" width="100px;" alt="kyiuu1"/><br /><sub><b>kyiuu1</b></sub></a><br /><a href="#ideas-kyiuu1" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- markdownlint-restore -->
|
||||||
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||||
|
|
||||||
|
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
||||||
|
|
||||||
## Discussion
|
## Discussion
|
||||||
|
|
||||||
- Discord: https://discord.gg/AhzJXXWYfF
|
- Discord: https://discord.gg/AhzJXXWYfF
|
||||||
|
|||||||
234
README.md
234
README.md
@@ -1,6 +1,13 @@
|
|||||||
# g0v0-server
|
# g0v0-server
|
||||||
|
|
||||||
[English](./README.en.md) | 简体中文
|
[](https://www.python.org/downloads/)
|
||||||
|
[](https://docs.astral.sh/ruff/)
|
||||||
|
[](https://www.codefactor.io/repository/github/GooGuTeam/g0v0-server)
|
||||||
|
[](https://results.pre-commit.ci/latest/github/GooGuTeam/g0v0-server/main)
|
||||||
|
[](./LICENSE)
|
||||||
|
[](https://discord.gg/AhzJXXWYfF)
|
||||||
|
|
||||||
|
简体中文 | [English](./README.en.md)
|
||||||
|
|
||||||
这是一个使用 FastAPI + MySQL + Redis 实现的 osu! API 模拟服务器,支持 osu! API v1, v2 和 osu!lazer 的绝大部分功能。
|
这是一个使用 FastAPI + MySQL + Redis 实现的 osu! API 模拟服务器,支持 osu! API v1, v2 和 osu!lazer 的绝大部分功能。
|
||||||
|
|
||||||
@@ -8,11 +15,32 @@
|
|||||||
|
|
||||||
- **OAuth 2.0 认证**: 支持密码流和刷新令牌流
|
- **OAuth 2.0 认证**: 支持密码流和刷新令牌流
|
||||||
- **用户数据管理**: 完整的用户信息、统计数据、成就等
|
- **用户数据管理**: 完整的用户信息、统计数据、成就等
|
||||||
- **多游戏模式支持**: osu! (RX, AP), taiko (RX), catch (RX), mania
|
- **多游戏模式支持**: osu! (RX, AP), taiko (RX), catch (RX), mania 和自定义 ruleset(见下)
|
||||||
- **数据库持久化**: MySQL 存储用户数据
|
- **数据库持久化**: MySQL 存储用户数据
|
||||||
- **缓存支持**: Redis 缓存令牌和会话信息
|
- **缓存支持**: Redis 缓存令牌和会话信息
|
||||||
- **多种存储后端**: 支持本地存储、Cloudflare R2、AWS S3
|
- **多种存储后端**: 支持本地存储、Cloudflare R2、AWS S3
|
||||||
- **容器化部署**: Docker 和 Docker Compose 支持
|
- **容器化部署**: Docker 和 Docker Compose 支持
|
||||||
|
- **资源文件反向代理**: 可以将 osu! 官方的资源链接(头像、谱面封面、音频等)替换为自定义域名。
|
||||||
|
|
||||||
|
## 支持的 ruleset
|
||||||
|
|
||||||
|
**Ruleset**|**ID**|**ShortName**|**PP 算法 (rosu)**|**PP 算法 (performance-server)**
|
||||||
|
:-----:|:-----:|:-----:|:-----:|:-----:
|
||||||
|
osu!|`0`|`osu`|✅|✅
|
||||||
|
osu!taiko|`1`|`taiko`|✅|✅
|
||||||
|
osu!catch|`2`|`fruits`|✅|✅
|
||||||
|
osu!mania|`3`|`mania`|✅|✅
|
||||||
|
osu! (RX)|`4`|`osurx`|✅|✅
|
||||||
|
osu! (AP)|`5`|`osuap`|✅|✅
|
||||||
|
osu!taiko (RX)|`6`|`taikorx`|✅|✅
|
||||||
|
osu!catch (RX)|`7`|`fruitsrx`|✅|✅
|
||||||
|
[Sentakki](https://github.com/LumpBloom7/sentakki)|`10`|`Sentakki`|❌|❌
|
||||||
|
[tau](https://github.com/taulazer/tau)|`11`|`tau`|❌|✅
|
||||||
|
[Rush!](https://github.com/Beamographic/rush)|`12`|`rush`|❌|❌
|
||||||
|
[hishigata](https://github.com/LumpBloom7/hishigata)|`13`|`hishigata`|❌|❌
|
||||||
|
[soyokaze!](https://github.com/goodtrailer/soyokaze)|`14`|`soyokaze`|❌|✅
|
||||||
|
|
||||||
|
前往 [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets/releases/latest) 下载为 g0v0-server 修改的自定义 ruleset。
|
||||||
|
|
||||||
## 快速开始
|
## 快速开始
|
||||||
|
|
||||||
@@ -26,176 +54,86 @@ cd g0v0-server
|
|||||||
|
|
||||||
2. 创建 `.env` 文件
|
2. 创建 `.env` 文件
|
||||||
|
|
||||||
请参考下方的服务器配置修改 .env 文件
|
请参考 [wiki](https://github.com/GooGuTeam/g0v0-server/wiki/Configuration) 来修改 `.env` 文件
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 启动服务
|
3. (可选)下载 rulesets
|
||||||
|
|
||||||
|
前往 [custom-rulesets](https://github.com/GooGuTeam/custom-rulesets/releases/latest) 下载为 g0v0-server 修改的自定义 ruleset。将下载的 DLL 放入 `rulesets/` 目录,以在旁观服务器和表现分计算器中启用自定义 ruleset 支持。
|
||||||
|
|
||||||
|
4. 启动服务
|
||||||
```bash
|
```bash
|
||||||
# 标准服务器
|
# 标准服务器
|
||||||
docker-compose -f docker-compose.yml up -d
|
docker-compose -f docker-compose.yml up -d
|
||||||
# 启用 osu!RX 和 osu!AP 模式 (偏偏要上班 pp 算法)
|
# 启用 osu!RX 和 osu!AP 模式 (基于偏偏要上班 pp 算法的 Gu pp 算法)
|
||||||
docker-compose -f docker-compose-osurx.yml up -d
|
docker-compose -f docker-compose-osurx.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
4. 通过游戏连接服务器
|
5. 通过游戏连接服务器
|
||||||
|
|
||||||
使用[自定义的 osu!lazer 客户端](https://github.com/GooGuTeam/osu),或者使用 [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection),修改服务器设置为服务器的 IP
|
使用[自定义的 osu!lazer 客户端](https://github.com/GooGuTeam/osu),或者使用 [LazerAuthlibInjection](https://github.com/MingxuanGame/LazerAuthlibInjection),修改服务器设置为服务器的 IP
|
||||||
|
|
||||||
## 环境变量配置
|
## 更新数据库
|
||||||
|
|
||||||
### 数据库设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `MYSQL_HOST` | MySQL 主机地址 | `localhost` |
|
|
||||||
| `MYSQL_PORT` | MySQL 端口 | `3306` |
|
|
||||||
| `MYSQL_DATABASE` | MySQL 数据库名 | `osu_api` |
|
|
||||||
| `MYSQL_USER` | MySQL 用户名 | `osu_api` |
|
|
||||||
| `MYSQL_PASSWORD` | MySQL 密码 | `password` |
|
|
||||||
| `MYSQL_ROOT_PASSWORD` | MySQL root 密码 | `password` |
|
|
||||||
| `REDIS_URL` | Redis 连接字符串 | `redis://127.0.0.1:6379/0` |
|
|
||||||
|
|
||||||
### JWT 设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `JWT_SECRET_KEY` | JWT 签名密钥 | `your_jwt_secret_here` |
|
|
||||||
| `ALGORITHM` | JWT 算法 | `HS256` |
|
|
||||||
| `ACCESS_TOKEN_EXPIRE_MINUTES` | 访问令牌过期时间(分钟) | `1440` |
|
|
||||||
|
|
||||||
### 服务器设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `HOST` | 服务器监听地址 | `0.0.0.0` |
|
|
||||||
| `PORT` | 服务器监听端口 | `8000` |
|
|
||||||
| `DEBUG` | 调试模式 | `false` |
|
|
||||||
| `SERVER_URL` | 服务器 URL | `http://localhost:8000` |
|
|
||||||
| `CORS_URLS` | 额外的 CORS 允许的域名列表 (JSON 格式) | `[]` |
|
|
||||||
| `FRONTEND_URL` | 前端 URL,当访问从游戏打开的 URL 时会重定向到这个 URL,为空表示不重定向 | `(null)` |
|
|
||||||
|
|
||||||
### OAuth 设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `OSU_CLIENT_ID` | OAuth 客户端 ID | `5` |
|
|
||||||
| `OSU_CLIENT_SECRET` | OAuth 客户端密钥 | `FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk` |
|
|
||||||
| `OSU_WEB_CLIENT_ID` | Web OAuth 客户端 ID | `6` |
|
|
||||||
| `OSU_WEB_CLIENT_SECRET` | Web OAuth 客户端密钥 | `your_osu_web_client_secret_here`
|
|
||||||
|
|
||||||
### SignalR 服务器设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `SIGNALR_NEGOTIATE_TIMEOUT` | SignalR 协商超时时间(秒) | `30` |
|
|
||||||
| `SIGNALR_PING_INTERVAL` | SignalR ping 间隔(秒) | `15` |
|
|
||||||
|
|
||||||
### Fetcher 设置
|
|
||||||
|
|
||||||
Fetcher 用于从 osu! 官方 API 获取数据,使用 osu! 官方 API 的 OAuth 2.0 认证
|
|
||||||
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `FETCHER_CLIENT_ID` | Fetcher 客户端 ID | `""` |
|
|
||||||
| `FETCHER_CLIENT_SECRET` | Fetcher 客户端密钥 | `""` |
|
|
||||||
| `FETCHER_SCOPES` | Fetcher 权限范围 | `public` |
|
|
||||||
|
|
||||||
### 日志设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `LOG_LEVEL` | 日志级别 | `INFO` |
|
|
||||||
|
|
||||||
### Sentry 设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `SENTRY_DSN` | Sentry DSN,为空不启用 Sentry | `(null)` |
|
|
||||||
|
|
||||||
### GeoIP 配置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `MAXMIND_LICENSE_KEY` | MaxMind License Key(用于下载离线IP库) | `""` |
|
|
||||||
| `GEOIP_DEST_DIR` | GeoIP 数据库存储目录 | `"./geoip"` |
|
|
||||||
| `GEOIP_UPDATE_DAY` | GeoIP 每周更新的星期几(0=周一,6=周日) | `1` |
|
|
||||||
| `GEOIP_UPDATE_HOUR` | GeoIP 每周更新时间(小时,0-23) | `2` |
|
|
||||||
|
|
||||||
### New Relic 设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `NEW_RELIC_ENVIRONMENT` | New Relic 环境标识 | `"production"` 或 `"development"` |
|
|
||||||
|
|
||||||
将 `newrelic.ini` 配置文件放入项目根目录即可自动启用 New Relic 监控。如果配置文件不存在或 newrelic 包未安装,将跳过 New Relic 初始化。可通过环境变量 `NEW_RELIC_ENVIRONMENT` 指定运行环境。
|
|
||||||
|
|
||||||
### 游戏设置
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `ENABLE_RX` | 启用 RX mod 统计数据 | `false` |
|
|
||||||
| `ENABLE_AP` | 启用 AP mod 统计数据 | `false` |
|
|
||||||
| `ENABLE_ALL_MODS_PP` | 启用所有 Mod 的 PP 计算 | `false` |
|
|
||||||
| `ENABLE_SUPPORTER_FOR_ALL_USERS` | 启用所有新注册用户的支持者状态 | `false` |
|
|
||||||
| `ENABLE_ALL_BEATMAP_LEADERBOARD` | 启用所有谱面的排行榜 | `false` |
|
|
||||||
| `ENABLE_ALL_BEATMAP_PP` | 允许任何谱面获得 PP | `false` |
|
|
||||||
| `SUSPICIOUS_SCORE_CHECK` | 启用可疑分数检查(star>25&acc<80 或 pp>2300) | `true` |
|
|
||||||
| `SEASONAL_BACKGROUNDS` | 季节背景图 URL 列表 | `[]` |
|
|
||||||
| `BANNED_NAME` | 禁止使用的用户名列表 | `["mrekk", "vaxei", "btmc", "cookiezi", "peppy", "saragi", "chocomint"]` |
|
|
||||||
|
|
||||||
### 存储服务设置
|
|
||||||
|
|
||||||
用于存储回放文件、头像等静态资源。
|
|
||||||
|
|
||||||
| 变量名 | 描述 | 默认值 |
|
|
||||||
|--------|------|--------|
|
|
||||||
| `STORAGE_SERVICE` | 存储服务类型:`local`、`r2`、`s3` | `local` |
|
|
||||||
| `STORAGE_SETTINGS` | 存储服务配置 (JSON 格式),配置见下 | `{"local_storage_path": "./storage"}` |
|
|
||||||
|
|
||||||
## 存储服务配置
|
|
||||||
|
|
||||||
### 本地存储 (推荐用于开发环境)
|
|
||||||
|
|
||||||
本地存储将文件保存在服务器的本地文件系统中,适合开发和小规模部署。
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="local"
|
|
||||||
STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cloudflare R2 存储 (推荐用于生产环境)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="r2"
|
|
||||||
STORAGE_SETTINGS='{
|
|
||||||
"r2_account_id": "your_cloudflare_account_id",
|
|
||||||
"r2_access_key_id": "your_r2_access_key_id",
|
|
||||||
"r2_secret_access_key": "your_r2_secret_access_key",
|
|
||||||
"r2_bucket_name": "your_bucket_name",
|
|
||||||
"r2_public_url_base": "https://your-custom-domain.com"
|
|
||||||
}'
|
|
||||||
```
|
|
||||||
|
|
||||||
### AWS S3 存储
|
|
||||||
|
|
||||||
```bash
|
|
||||||
STORAGE_SERVICE="s3"
|
|
||||||
STORAGE_SETTINGS='{
|
|
||||||
"s3_access_key_id": "your_aws_access_key_id",
|
|
||||||
"s3_secret_access_key": "your_aws_secret_access_key",
|
|
||||||
"s3_bucket_name": "your_s3_bucket_name",
|
|
||||||
"s3_region_name": "us-east-1",
|
|
||||||
"s3_public_url_base": "https://your-custom-domain.com"
|
|
||||||
}'
|
|
||||||
```
|
|
||||||
|
|
||||||
> **注意**: 在生产环境中,请务必更改默认的密钥和密码!
|
|
||||||
|
|
||||||
### 更新数据库
|
|
||||||
|
|
||||||
参考[数据库迁移指南](https://github.com/GooGuTeam/g0v0-server/wiki/Migrate-Database)
|
参考[数据库迁移指南](https://github.com/GooGuTeam/g0v0-server/wiki/Migrate-Database)
|
||||||
|
|
||||||
|
## 安全
|
||||||
|
|
||||||
|
使用 `openssl rand -hex 32` 生成 JWT 密钥,以保证服务器安全和旁观服务器的正常运行
|
||||||
|
|
||||||
|
使用 `openssl rand -hex 40` 生成前端密钥
|
||||||
|
|
||||||
|
**如果是在公网环境下,请屏蔽对 `/_lio` 路径的外部请求**
|
||||||
|
|
||||||
|
## 文档
|
||||||
|
|
||||||
|
前往 [wiki](https://github.com/GooGuTeam/g0v0-server/wiki) 查看
|
||||||
|
|
||||||
## 许可证
|
## 许可证
|
||||||
|
|
||||||
MIT License
|
本项目采用 **GNU Affero General Public License v3.0 (AGPL-3.0-only)** 授权。
|
||||||
|
任何衍生作品、修改或部署 **必须在显著位置清晰署名** 原始作者:
|
||||||
|
**GooGuTeam - https://github.com/GooGuTeam/g0v0-server**
|
||||||
|
|
||||||
## 贡献
|
## 贡献
|
||||||
|
|
||||||
项目目前处于快速迭代状态,欢迎提交 Issue 和 Pull Request!
|
项目目前处于快速迭代状态,欢迎提交 Issue 和 Pull Request!
|
||||||
|
|
||||||
|
查看 [贡献指南](./CONTRIBUTING.md) 获取更多信息。
|
||||||
|
|
||||||
|
## 贡献者
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||||
|
[](#contributors-)
|
||||||
|
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||||
|
|
||||||
|
感谢所有参与此项目的贡献者! ([emoji key](https://allcontributors.org/docs/en/emoji-key))
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
<!-- markdownlint-disable -->
|
||||||
|
<table>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/GooGuJiang"><img src="https://avatars.githubusercontent.com/u/74496778?v=4?s=100" width="100px;" alt="咕谷酱"/><br /><sub><b>咕谷酱</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=GooGuJiang" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://blog.mxgame.top/"><img src="https://avatars.githubusercontent.com/u/68982190?v=4?s=100" width="100px;" alt="MingxuanGame"/><br /><sub><b>MingxuanGame</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=MingxuanGame" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/chenjintang-shrimp"><img src="https://avatars.githubusercontent.com/u/110657724?v=4?s=100" width="100px;" alt="陈晋瑭"/><br /><sub><b>陈晋瑭</b></sub></a><br /><a href="https://github.com/GooGuTeam/g0v0-server/commits?author=chenjintang-shrimp" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://4ayo.ovh"><img src="https://avatars.githubusercontent.com/u/115783539?v=4?s=100" width="100px;" alt="4ayo"/><br /><sub><b>4ayo</b></sub></a><br /><a href="#ideas-4aya" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/kyiuu1"><img src="https://avatars.githubusercontent.com/u/188347675?v=4?s=100" width="100px;" alt="kyiuu1"/><br /><sub><b>kyiuu1</b></sub></a><br /><a href="#ideas-kyiuu1" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- markdownlint-restore -->
|
||||||
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
|
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||||
|
|
||||||
|
本项目遵循 [all-contributors](https://github.com/all-contributors/all-contributors) 规范。欢迎任何形式的贡献!
|
||||||
|
|
||||||
## 参与讨论
|
## 参与讨论
|
||||||
|
|
||||||
- QQ 群:`1059561526`
|
- QQ 群:`1059561526`
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ script_location = %(here)s/migrations
|
|||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
# for all available tokens
|
# for all available tokens
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory. for multiple paths, the path separator
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from app.database.daily_challenge import DailyChallengeStats
|
from app.database.daily_challenge import DailyChallengeStats
|
||||||
@@ -32,11 +30,9 @@ async def process_streak(
|
|||||||
).first()
|
).first()
|
||||||
if not stats:
|
if not stats:
|
||||||
return False
|
return False
|
||||||
if streak <= stats.daily_streak_best < next_streak:
|
return bool(
|
||||||
return True
|
streak <= stats.daily_streak_best < next_streak or (next_streak == 0 and stats.daily_streak_best >= streak)
|
||||||
elif next_streak == 0 and stats.daily_streak_best >= streak:
|
)
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
MEDALS = {
|
MEDALS = {
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.calculator import get_calculator
|
||||||
from app.database.beatmap import calculate_beatmap_attributes
|
from app.database.beatmap import calculate_beatmap_attributes
|
||||||
from app.database.score import Beatmap, Score
|
from app.database.score import Beatmap, Score
|
||||||
from app.dependencies.database import get_redis
|
from app.dependencies.database import get_redis
|
||||||
@@ -9,11 +8,24 @@ from app.dependencies.fetcher import get_fetcher
|
|||||||
from app.models.achievement import Achievement, Medals
|
from app.models.achievement import Achievement, Medals
|
||||||
from app.models.beatmap import BeatmapRankStatus
|
from app.models.beatmap import BeatmapRankStatus
|
||||||
from app.models.mods import get_speed_rate, mod_to_save
|
from app.models.mods import get_speed_rate, mod_to_save
|
||||||
|
from app.models.performance import DifficultyAttributesUnion
|
||||||
from app.models.score import Rank
|
from app.models.score import Rank
|
||||||
|
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
|
async def _calculate_attributes(score: Score, beatmap: Beatmap) -> DifficultyAttributesUnion | None:
|
||||||
|
fetcher = await get_fetcher()
|
||||||
|
redis = get_redis()
|
||||||
|
mods_ = score.mods.copy()
|
||||||
|
|
||||||
|
if await get_calculator().can_calculate_difficulty(score.gamemode) is False:
|
||||||
|
return None
|
||||||
|
|
||||||
|
attribute = await calculate_beatmap_attributes(beatmap.id, score.gamemode, mods_, redis, fetcher)
|
||||||
|
return attribute
|
||||||
|
|
||||||
|
|
||||||
async def jackpot(
|
async def jackpot(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
score: Score,
|
score: Score,
|
||||||
@@ -65,14 +77,10 @@ async def to_the_core(
|
|||||||
# using either of the mods specified: DT, NC
|
# using either of the mods specified: DT, NC
|
||||||
if not score.passed:
|
if not score.passed:
|
||||||
return False
|
return False
|
||||||
if (
|
if ("Nightcore" not in beatmap.beatmapset.title) and "Nightcore" not in beatmap.beatmapset.artist:
|
||||||
"Nightcore" not in beatmap.beatmapset.title
|
|
||||||
) and "Nightcore" not in beatmap.beatmapset.artist:
|
|
||||||
return False
|
return False
|
||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "DT" not in mods_ or "NC" not in mods_:
|
return not ("DT" not in mods_ or "NC" not in mods_)
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def wysi(
|
async def wysi(
|
||||||
@@ -85,9 +93,7 @@ async def wysi(
|
|||||||
return False
|
return False
|
||||||
if str(round(score.accuracy, ndigits=4))[3:] != "727":
|
if str(round(score.accuracy, ndigits=4))[3:] != "727":
|
||||||
return False
|
return False
|
||||||
if "xi" not in beatmap.beatmapset.artist:
|
return "xi" in beatmap.beatmapset.artist
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def prepared(
|
async def prepared(
|
||||||
@@ -99,9 +105,7 @@ async def prepared(
|
|||||||
if score.rank != Rank.X and score.rank != Rank.XH:
|
if score.rank != Rank.X and score.rank != Rank.XH:
|
||||||
return False
|
return False
|
||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "NF" not in mods_:
|
return "NF" in mods_
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def reckless_adandon(
|
async def reckless_adandon(
|
||||||
@@ -115,15 +119,11 @@ async def reckless_adandon(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "HR" not in mods_ or "SD" not in mods_:
|
if "HR" not in mods_ or "SD" not in mods_:
|
||||||
return False
|
return False
|
||||||
fetcher = await get_fetcher()
|
|
||||||
redis = get_redis()
|
attribute = await _calculate_attributes(score, beatmap)
|
||||||
mods_ = score.mods.copy()
|
if attribute is None:
|
||||||
attribute = await calculate_beatmap_attributes(
|
|
||||||
beatmap.id, score.gamemode, mods_, redis, fetcher
|
|
||||||
)
|
|
||||||
if attribute.star_rating < 3:
|
|
||||||
return False
|
return False
|
||||||
return True
|
return not attribute.star_rating < 3
|
||||||
|
|
||||||
|
|
||||||
async def lights_out(
|
async def lights_out(
|
||||||
@@ -183,12 +183,10 @@ async def slow_and_steady(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "HT" not in mods_ or "PF" not in mods_:
|
if "HT" not in mods_ or "PF" not in mods_:
|
||||||
return False
|
return False
|
||||||
fetcher = await get_fetcher()
|
|
||||||
redis = get_redis()
|
attribute = await _calculate_attributes(score, beatmap)
|
||||||
mods_ = score.mods.copy()
|
if attribute is None:
|
||||||
attribute = await calculate_beatmap_attributes(
|
return False
|
||||||
beatmap.id, score.gamemode, mods_, redis, fetcher
|
|
||||||
)
|
|
||||||
return attribute.star_rating >= 3
|
return attribute.star_rating >= 3
|
||||||
|
|
||||||
|
|
||||||
@@ -218,9 +216,7 @@ async def sognare(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "HT" not in mods_:
|
if "HT" not in mods_:
|
||||||
return False
|
return False
|
||||||
return (
|
return beatmap.beatmapset.artist == "LeaF" and beatmap.beatmapset.title == "Evanescent"
|
||||||
beatmap.beatmapset.artist == "LeaF" and beatmap.beatmapset.title == "Evanescent"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def realtor_extraordinaire(
|
async def realtor_extraordinaire(
|
||||||
@@ -234,10 +230,7 @@ async def realtor_extraordinaire(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if not ("DT" in mods_ or "NC" in mods_) or "HR" not in mods_:
|
if not ("DT" in mods_ or "NC" in mods_) or "HR" not in mods_:
|
||||||
return False
|
return False
|
||||||
return (
|
return beatmap.beatmapset.artist == "cYsmix" and beatmap.beatmapset.title == "House With Legs"
|
||||||
beatmap.beatmapset.artist == "cYsmix"
|
|
||||||
and beatmap.beatmapset.title == "House With Legs"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def impeccable(
|
async def impeccable(
|
||||||
@@ -252,12 +245,10 @@ async def impeccable(
|
|||||||
# DT and NC interchangeable
|
# DT and NC interchangeable
|
||||||
if not ("DT" in mods_ or "NC" in mods_) or "PF" not in mods_:
|
if not ("DT" in mods_ or "NC" in mods_) or "PF" not in mods_:
|
||||||
return False
|
return False
|
||||||
fetcher = await get_fetcher()
|
|
||||||
redis = get_redis()
|
attribute = await _calculate_attributes(score, beatmap)
|
||||||
mods_ = score.mods.copy()
|
if attribute is None:
|
||||||
attribute = await calculate_beatmap_attributes(
|
return False
|
||||||
beatmap.id, score.gamemode, mods_, redis, fetcher
|
|
||||||
)
|
|
||||||
return attribute.star_rating >= 4
|
return attribute.star_rating >= 4
|
||||||
|
|
||||||
|
|
||||||
@@ -274,18 +265,14 @@ async def aeon(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "FL" not in mods_ or "HD" not in mods_ or "HT" not in mods_:
|
if "FL" not in mods_ or "HD" not in mods_ or "HT" not in mods_:
|
||||||
return False
|
return False
|
||||||
if not beatmap.beatmapset.ranked_date or beatmap.beatmapset.ranked_date > datetime(
|
if not beatmap.beatmapset.ranked_date or beatmap.beatmapset.ranked_date > datetime(2012, 1, 1):
|
||||||
2012, 1, 1
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
if beatmap.total_length < 180:
|
if beatmap.total_length < 180:
|
||||||
return False
|
return False
|
||||||
fetcher = await get_fetcher()
|
|
||||||
redis = get_redis()
|
attribute = await _calculate_attributes(score, beatmap)
|
||||||
mods_ = score.mods.copy()
|
if attribute is None:
|
||||||
attribute = await calculate_beatmap_attributes(
|
return False
|
||||||
beatmap.id, score.gamemode, mods_, redis, fetcher
|
|
||||||
)
|
|
||||||
return attribute.star_rating >= 4
|
return attribute.star_rating >= 4
|
||||||
|
|
||||||
|
|
||||||
@@ -297,10 +284,7 @@ async def quick_maths(
|
|||||||
# Get exactly 34 misses on any difficulty of Function Phantom - Variable.
|
# Get exactly 34 misses on any difficulty of Function Phantom - Variable.
|
||||||
if score.nmiss != 34:
|
if score.nmiss != 34:
|
||||||
return False
|
return False
|
||||||
return (
|
return beatmap.beatmapset.artist == "Function Phantom" and beatmap.beatmapset.title == "Variable"
|
||||||
beatmap.beatmapset.artist == "Function Phantom"
|
|
||||||
and beatmap.beatmapset.title == "Variable"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def kaleidoscope(
|
async def kaleidoscope(
|
||||||
@@ -328,8 +312,7 @@ async def valediction(
|
|||||||
return (
|
return (
|
||||||
score.passed
|
score.passed
|
||||||
and beatmap.beatmapset.artist == "a_hisa"
|
and beatmap.beatmapset.artist == "a_hisa"
|
||||||
and beatmap.beatmapset.title
|
and beatmap.beatmapset.title == "Alexithymia | Lupinus | Tokei no Heya to Seishin Sekai"
|
||||||
== "Alexithymia | Lupinus | Tokei no Heya to Seishin Sekai"
|
|
||||||
and score.accuracy >= 0.9
|
and score.accuracy >= 0.9
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -342,9 +325,7 @@ async def right_on_time(
|
|||||||
# Submit a score on Kola Kid - timer on the first minute of any hour
|
# Submit a score on Kola Kid - timer on the first minute of any hour
|
||||||
if not score.passed:
|
if not score.passed:
|
||||||
return False
|
return False
|
||||||
if not (
|
if not (beatmap.beatmapset.artist == "Kola Kid" and beatmap.beatmapset.title == "timer"):
|
||||||
beatmap.beatmapset.artist == "Kola Kid" and beatmap.beatmapset.title == "timer"
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
return score.ended_at.minute == 0
|
return score.ended_at.minute == 0
|
||||||
|
|
||||||
@@ -361,9 +342,7 @@ async def not_again(
|
|||||||
return False
|
return False
|
||||||
if score.accuracy < 0.99:
|
if score.accuracy < 0.99:
|
||||||
return False
|
return False
|
||||||
return (
|
return beatmap.beatmapset.artist == "ARForest" and beatmap.beatmapset.title == "Regret"
|
||||||
beatmap.beatmapset.artist == "ARForest" and beatmap.beatmapset.title == "Regret"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def deliberation(
|
async def deliberation(
|
||||||
@@ -377,18 +356,12 @@ async def deliberation(
|
|||||||
mods_ = mod_to_save(score.mods)
|
mods_ = mod_to_save(score.mods)
|
||||||
if "HT" not in mods_:
|
if "HT" not in mods_:
|
||||||
return False
|
return False
|
||||||
if (
|
if not beatmap.beatmap_status.has_pp() and beatmap.beatmap_status != BeatmapRankStatus.LOVED:
|
||||||
not beatmap.beatmap_status.has_pp()
|
|
||||||
and beatmap.beatmap_status != BeatmapRankStatus.LOVED
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
fetcher = await get_fetcher()
|
attribute = await _calculate_attributes(score, beatmap)
|
||||||
redis = get_redis()
|
if attribute is None:
|
||||||
mods_copy = score.mods.copy()
|
return False
|
||||||
attribute = await calculate_beatmap_attributes(
|
|
||||||
beatmap.id, score.gamemode, mods_copy, redis, fetcher
|
|
||||||
)
|
|
||||||
return attribute.star_rating >= 6
|
return attribute.star_rating >= 6
|
||||||
|
|
||||||
|
|
||||||
@@ -443,11 +416,10 @@ async def by_the_skin_of_the_teeth(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
for mod in score.mods:
|
for mod in score.mods:
|
||||||
if mod.get("acronym") == "AC":
|
if mod.get("acronym") == "AC" and "settings" in mod and "minimum_accuracy" in mod["settings"]:
|
||||||
if "settings" in mod and "minimum_accuracy" in mod["settings"]:
|
target_accuracy = mod["settings"]["minimum_accuracy"]
|
||||||
target_accuracy = mod["settings"]["minimum_accuracy"]
|
if isinstance(target_accuracy, int | float):
|
||||||
if isinstance(target_accuracy, int | float):
|
return abs(score.accuracy - float(target_accuracy)) < 0.0001
|
||||||
return abs(score.accuracy - float(target_accuracy)) < 0.0001
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from app.database.score import Beatmap, Score
|
from app.database.score import Beatmap, Score
|
||||||
@@ -19,9 +17,7 @@ async def process_mod(
|
|||||||
return False
|
return False
|
||||||
if not beatmap.beatmap_status.has_leaderboard():
|
if not beatmap.beatmap_status.has_leaderboard():
|
||||||
return False
|
return False
|
||||||
if len(score.mods) != 1 or score.mods[0]["acronym"] != mod:
|
return not (len(score.mods) != 1 or score.mods[0]["acronym"] != mod)
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def process_category_mod(
|
async def process_category_mod(
|
||||||
@@ -72,7 +68,7 @@ MEDALS: Medals = {
|
|||||||
Achievement(
|
Achievement(
|
||||||
id=93,
|
id=93,
|
||||||
name="Sweet Rave Party",
|
name="Sweet Rave Party",
|
||||||
desc="Founded in the fine tradition of changing things that were just fine as they were.", # noqa: E501
|
desc="Founded in the fine tradition of changing things that were just fine as they were.",
|
||||||
assets_id="all-intro-nightcore",
|
assets_id="all-intro-nightcore",
|
||||||
): partial(process_mod, "NC"),
|
): partial(process_mod, "NC"),
|
||||||
Achievement(
|
Achievement(
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from app.database.score import Beatmap, Score
|
from app.database.score import Beatmap, Score
|
||||||
@@ -16,21 +14,13 @@ async def process_combo(
|
|||||||
score: Score,
|
score: Score,
|
||||||
beatmap: Beatmap,
|
beatmap: Beatmap,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if (
|
if not score.passed or not beatmap.beatmap_status.has_pp() or score.gamemode != GameMode.OSU:
|
||||||
not score.passed
|
|
||||||
or not beatmap.beatmap_status.has_pp()
|
|
||||||
or score.gamemode != GameMode.OSU
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
if combo < 1:
|
if combo < 1:
|
||||||
return False
|
return False
|
||||||
if next_combo != 0 and combo >= next_combo:
|
if next_combo != 0 and combo >= next_combo:
|
||||||
return False
|
return False
|
||||||
if combo <= score.max_combo < next_combo:
|
return bool(combo <= score.max_combo < next_combo or (next_combo == 0 and score.max_combo >= combo))
|
||||||
return True
|
|
||||||
elif next_combo == 0 and score.max_combo >= combo:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
MEDALS: Medals = {
|
MEDALS: Medals = {
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from app.database import UserStatistics
|
from app.database import UserStatistics
|
||||||
@@ -35,11 +33,7 @@ async def process_playcount(
|
|||||||
).first()
|
).first()
|
||||||
if not stats:
|
if not stats:
|
||||||
return False
|
return False
|
||||||
if pc <= stats.play_count < next_pc:
|
return bool(pc <= stats.play_count < next_pc or (next_pc == 0 and stats.play_count >= pc))
|
||||||
return True
|
|
||||||
elif next_pc == 0 and stats.play_count >= pc:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
MEDALS: Medals = {
|
MEDALS: Medals = {
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Literal, cast
|
from typing import Literal, cast
|
||||||
|
|
||||||
@@ -44,14 +42,10 @@ async def process_skill(
|
|||||||
redis = get_redis()
|
redis = get_redis()
|
||||||
mods_ = score.mods.copy()
|
mods_ = score.mods.copy()
|
||||||
mods_.sort(key=lambda x: x["acronym"])
|
mods_.sort(key=lambda x: x["acronym"])
|
||||||
attribute = await calculate_beatmap_attributes(
|
attribute = await calculate_beatmap_attributes(beatmap.id, score.gamemode, mods_, redis, fetcher)
|
||||||
beatmap.id, score.gamemode, mods_, redis, fetcher
|
|
||||||
)
|
|
||||||
if attribute.star_rating < star or attribute.star_rating >= star + 1:
|
if attribute.star_rating < star or attribute.star_rating >= star + 1:
|
||||||
return False
|
return False
|
||||||
if type == "fc" and not score.is_perfect_combo:
|
return not (type == "fc" and not score.is_perfect_combo)
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
MEDALS: Medals = {
|
MEDALS: Medals = {
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from app.database.score import Beatmap, Score
|
from app.database.score import Beatmap, Score
|
||||||
@@ -35,11 +33,7 @@ async def process_tth(
|
|||||||
).first()
|
).first()
|
||||||
if not stats:
|
if not stats:
|
||||||
return False
|
return False
|
||||||
if tth <= stats.total_hits < next_tth:
|
return bool(tth <= stats.total_hits < next_tth or (next_tth == 0 and stats.play_count >= tth))
|
||||||
return True
|
|
||||||
elif next_tth == 0 and stats.play_count >= tth:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
MEDALS: Medals = {
|
MEDALS: Medals = {
|
||||||
|
|||||||
265
app/auth.py
265
app/auth.py
@@ -1,23 +1,26 @@
|
|||||||
from __future__ import annotations
|
from datetime import timedelta
|
||||||
|
|
||||||
from datetime import UTC, datetime, timedelta
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
|
from app.const import BACKUP_CODE_LENGTH
|
||||||
from app.database import (
|
from app.database import (
|
||||||
OAuthToken,
|
OAuthToken,
|
||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from app.log import logger
|
from app.database.auth import TotpKeys
|
||||||
|
from app.log import log
|
||||||
|
from app.models.totp import FinishStatus, StartCreateTotpKeyResp
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
import bcrypt
|
import bcrypt
|
||||||
from jose import JWTError, jwt
|
from jose import JWTError, jwt
|
||||||
from passlib.context import CryptContext
|
from passlib.context import CryptContext
|
||||||
|
import pyotp
|
||||||
from redis.asyncio import Redis
|
from redis.asyncio import Redis
|
||||||
from sqlmodel import select
|
from sqlmodel import col, select
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
# 密码哈希上下文
|
# 密码哈希上下文
|
||||||
@@ -26,6 +29,8 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
|||||||
# bcrypt 缓存(模拟应用状态缓存)
|
# bcrypt 缓存(模拟应用状态缓存)
|
||||||
bcrypt_cache = {}
|
bcrypt_cache = {}
|
||||||
|
|
||||||
|
logger = log("Auth")
|
||||||
|
|
||||||
|
|
||||||
def validate_username(username: str) -> list[str]:
|
def validate_username(username: str) -> list[str]:
|
||||||
"""验证用户名"""
|
"""验证用户名"""
|
||||||
@@ -43,9 +48,7 @@ def validate_username(username: str) -> list[str]:
|
|||||||
|
|
||||||
# 检查用户名格式(只允许字母、数字、下划线、连字符)
|
# 检查用户名格式(只允许字母、数字、下划线、连字符)
|
||||||
if not re.match(r"^[a-zA-Z0-9_-]+$", username):
|
if not re.match(r"^[a-zA-Z0-9_-]+$", username):
|
||||||
errors.append(
|
errors.append("Username can only contain letters, numbers, underscores, and hyphens")
|
||||||
"Username can only contain letters, numbers, underscores, and hyphens"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 检查是否以数字开头
|
# 检查是否以数字开头
|
||||||
if username[0].isdigit():
|
if username[0].isdigit():
|
||||||
@@ -57,6 +60,20 @@ def validate_username(username: str) -> list[str]:
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def validate_password(password: str) -> list[str]:
|
||||||
|
"""验证密码"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
errors.append("Password is required")
|
||||||
|
return errors
|
||||||
|
|
||||||
|
if len(password) < 8:
|
||||||
|
errors.append("Password must be at least 8 characters long")
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
def verify_password_legacy(plain_password: str, bcrypt_hash: str) -> bool:
|
def verify_password_legacy(plain_password: str, bcrypt_hash: str) -> bool:
|
||||||
"""
|
"""
|
||||||
验证密码 - 使用 osu! 的验证方式
|
验证密码 - 使用 osu! 的验证方式
|
||||||
@@ -64,7 +81,7 @@ def verify_password_legacy(plain_password: str, bcrypt_hash: str) -> bool:
|
|||||||
2. MD5哈希 -> bcrypt验证
|
2. MD5哈希 -> bcrypt验证
|
||||||
"""
|
"""
|
||||||
# 1. 明文密码转 MD5
|
# 1. 明文密码转 MD5
|
||||||
pw_md5 = hashlib.md5(plain_password.encode()).hexdigest().encode()
|
pw_md5 = hashlib.md5(plain_password.encode()).hexdigest().encode() # noqa: S324
|
||||||
|
|
||||||
# 2. 检查缓存
|
# 2. 检查缓存
|
||||||
if bcrypt_hash in bcrypt_cache:
|
if bcrypt_hash in bcrypt_cache:
|
||||||
@@ -98,29 +115,29 @@ def verify_password(plain_password: str, hashed_password: str) -> bool:
|
|||||||
def get_password_hash(password: str) -> str:
|
def get_password_hash(password: str) -> str:
|
||||||
"""生成密码哈希 - 使用 osu! 的方式"""
|
"""生成密码哈希 - 使用 osu! 的方式"""
|
||||||
# 1. 明文密码 -> MD5
|
# 1. 明文密码 -> MD5
|
||||||
pw_md5 = hashlib.md5(password.encode()).hexdigest().encode()
|
pw_md5 = hashlib.md5(password.encode()).hexdigest().encode() # noqa: S324
|
||||||
# 2. MD5 -> bcrypt
|
# 2. MD5 -> bcrypt
|
||||||
pw_bcrypt = bcrypt.hashpw(pw_md5, bcrypt.gensalt())
|
pw_bcrypt = bcrypt.hashpw(pw_md5, bcrypt.gensalt())
|
||||||
return pw_bcrypt.decode()
|
return pw_bcrypt.decode()
|
||||||
|
|
||||||
|
|
||||||
async def authenticate_user_legacy(
|
async def authenticate_user_legacy(db: AsyncSession, name: str, password: str) -> User | None:
|
||||||
db: AsyncSession, name: str, password: str
|
|
||||||
) -> User | None:
|
|
||||||
"""
|
"""
|
||||||
验证用户身份 - 使用类似 from_login 的逻辑
|
验证用户身份 - 使用类似 from_login 的逻辑
|
||||||
"""
|
"""
|
||||||
# 1. 明文密码转 MD5
|
# 1. 明文密码转 MD5
|
||||||
pw_md5 = hashlib.md5(password.encode()).hexdigest()
|
pw_md5 = hashlib.md5(password.encode()).hexdigest() # noqa: S324
|
||||||
|
|
||||||
# 2. 根据用户名查找用户
|
# 2. 根据用户名查找用户
|
||||||
statement = select(User).where(User.username == name).options()
|
user = None
|
||||||
user = (await db.exec(statement)).first()
|
user = (await db.exec(select(User).where(User.username == name))).first()
|
||||||
if not user:
|
if user is None:
|
||||||
|
user = (await db.exec(select(User).where(User.email == name))).first()
|
||||||
|
if user is None and name.isdigit():
|
||||||
|
user = (await db.exec(select(User).where(User.id == int(name)))).first()
|
||||||
|
if user is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
await db.refresh(user)
|
|
||||||
|
|
||||||
# 3. 验证密码
|
# 3. 验证密码
|
||||||
if user.pw_bcrypt is None or user.pw_bcrypt == "":
|
if user.pw_bcrypt is None or user.pw_bcrypt == "":
|
||||||
return None
|
return None
|
||||||
@@ -145,9 +162,7 @@ async def authenticate_user_legacy(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def authenticate_user(
|
async def authenticate_user(db: AsyncSession, username: str, password: str) -> User | None:
|
||||||
db: AsyncSession, username: str, password: str
|
|
||||||
) -> User | None:
|
|
||||||
"""验证用户身份"""
|
"""验证用户身份"""
|
||||||
return await authenticate_user_legacy(db, username, password)
|
return await authenticate_user_legacy(db, username, password)
|
||||||
|
|
||||||
@@ -156,16 +171,18 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None) -> s
|
|||||||
"""创建访问令牌"""
|
"""创建访问令牌"""
|
||||||
to_encode = data.copy()
|
to_encode = data.copy()
|
||||||
if expires_delta:
|
if expires_delta:
|
||||||
expire = datetime.now(UTC) + expires_delta
|
expire = utcnow() + expires_delta
|
||||||
else:
|
else:
|
||||||
expire = datetime.now(UTC) + timedelta(
|
expire = utcnow() + timedelta(minutes=settings.access_token_expire_minutes)
|
||||||
minutes=settings.access_token_expire_minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
to_encode.update({"exp": expire, "random": secrets.token_hex(16)})
|
# 添加标准JWT声明
|
||||||
encoded_jwt = jwt.encode(
|
to_encode.update({"exp": expire, "jti": secrets.token_hex(16)})
|
||||||
to_encode, settings.secret_key, algorithm=settings.algorithm
|
if settings.jwt_audience:
|
||||||
)
|
to_encode["aud"] = settings.jwt_audience
|
||||||
|
to_encode["iss"] = str(settings.server_url)
|
||||||
|
|
||||||
|
# 编码JWT
|
||||||
|
encoded_jwt = jwt.encode(to_encode, settings.secret_key, algorithm=settings.algorithm)
|
||||||
return encoded_jwt
|
return encoded_jwt
|
||||||
|
|
||||||
|
|
||||||
@@ -200,9 +217,7 @@ async def invalidate_user_tokens(db: AsyncSession, user_id: int) -> int:
|
|||||||
def verify_token(token: str) -> dict | None:
|
def verify_token(token: str) -> dict | None:
|
||||||
"""验证访问令牌"""
|
"""验证访问令牌"""
|
||||||
try:
|
try:
|
||||||
payload = jwt.decode(
|
payload = jwt.decode(token, settings.secret_key, algorithms=[settings.algorithm])
|
||||||
token, settings.secret_key, algorithms=[settings.algorithm]
|
|
||||||
)
|
|
||||||
return payload
|
return payload
|
||||||
except JWTError:
|
except JWTError:
|
||||||
return None
|
return None
|
||||||
@@ -216,22 +231,46 @@ async def store_token(
|
|||||||
access_token: str,
|
access_token: str,
|
||||||
refresh_token: str,
|
refresh_token: str,
|
||||||
expires_in: int,
|
expires_in: int,
|
||||||
|
refresh_token_expires_in: int,
|
||||||
|
allow_multiple_devices: bool = True,
|
||||||
) -> OAuthToken:
|
) -> OAuthToken:
|
||||||
"""存储令牌到数据库"""
|
"""存储令牌到数据库(支持多设备)"""
|
||||||
expires_at = datetime.utcnow() + timedelta(seconds=expires_in)
|
expires_at = utcnow() + timedelta(seconds=expires_in)
|
||||||
|
refresh_token_expires_at = utcnow() + timedelta(seconds=refresh_token_expires_in)
|
||||||
|
|
||||||
# 删除用户的旧令牌
|
if not allow_multiple_devices:
|
||||||
statement = select(OAuthToken).where(
|
# 旧的行为:删除用户的旧令牌(单设备模式)
|
||||||
OAuthToken.user_id == user_id, OAuthToken.client_id == client_id
|
statement = select(OAuthToken).where(OAuthToken.user_id == user_id, OAuthToken.client_id == client_id)
|
||||||
)
|
old_tokens = (await db.exec(statement)).all()
|
||||||
old_tokens = (await db.exec(statement)).all()
|
for token in old_tokens:
|
||||||
for token in old_tokens:
|
await db.delete(token)
|
||||||
await db.delete(token)
|
else:
|
||||||
|
# 新的行为:只删除过期的令牌,保留有效的令牌(多设备模式)
|
||||||
|
statement = select(OAuthToken).where(
|
||||||
|
OAuthToken.user_id == user_id, OAuthToken.client_id == client_id, OAuthToken.expires_at <= utcnow()
|
||||||
|
)
|
||||||
|
expired_tokens = (await db.exec(statement)).all()
|
||||||
|
for token in expired_tokens:
|
||||||
|
await db.delete(token)
|
||||||
|
|
||||||
|
# 限制每个用户每个客户端的最大令牌数量(防止无限增长)
|
||||||
|
max_tokens_per_client = settings.max_tokens_per_client
|
||||||
|
statement = (
|
||||||
|
select(OAuthToken)
|
||||||
|
.where(OAuthToken.user_id == user_id, OAuthToken.client_id == client_id, OAuthToken.expires_at > utcnow())
|
||||||
|
.order_by(col(OAuthToken.created_at).desc())
|
||||||
|
)
|
||||||
|
|
||||||
|
active_tokens = (await db.exec(statement)).all()
|
||||||
|
if len(active_tokens) >= max_tokens_per_client:
|
||||||
|
# 删除最旧的令牌
|
||||||
|
tokens_to_delete = active_tokens[max_tokens_per_client - 1 :]
|
||||||
|
for token in tokens_to_delete:
|
||||||
|
await db.delete(token)
|
||||||
|
logger.info(f"Cleaned up {len(tokens_to_delete)} old tokens for user {user_id}")
|
||||||
|
|
||||||
# 检查是否有重复的 access_token
|
# 检查是否有重复的 access_token
|
||||||
duplicate_token = (
|
duplicate_token = (await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))).first()
|
||||||
await db.exec(select(OAuthToken).where(OAuthToken.access_token == access_token))
|
|
||||||
).first()
|
|
||||||
if duplicate_token:
|
if duplicate_token:
|
||||||
await db.delete(duplicate_token)
|
await db.delete(duplicate_token)
|
||||||
|
|
||||||
@@ -243,31 +282,30 @@ async def store_token(
|
|||||||
scope=",".join(scopes),
|
scope=",".join(scopes),
|
||||||
refresh_token=refresh_token,
|
refresh_token=refresh_token,
|
||||||
expires_at=expires_at,
|
expires_at=expires_at,
|
||||||
|
refresh_token_expires_at=refresh_token_expires_at,
|
||||||
)
|
)
|
||||||
db.add(token_record)
|
db.add(token_record)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
await db.refresh(token_record)
|
await db.refresh(token_record)
|
||||||
|
|
||||||
|
logger.info(f"Created new token for user {user_id}, client {client_id} (multi-device: {allow_multiple_devices})")
|
||||||
return token_record
|
return token_record
|
||||||
|
|
||||||
|
|
||||||
async def get_token_by_access_token(
|
async def get_token_by_access_token(db: AsyncSession, access_token: str) -> OAuthToken | None:
|
||||||
db: AsyncSession, access_token: str
|
|
||||||
) -> OAuthToken | None:
|
|
||||||
"""根据访问令牌获取令牌记录"""
|
"""根据访问令牌获取令牌记录"""
|
||||||
statement = select(OAuthToken).where(
|
statement = select(OAuthToken).where(
|
||||||
OAuthToken.access_token == access_token,
|
OAuthToken.access_token == access_token,
|
||||||
OAuthToken.expires_at > datetime.utcnow(),
|
OAuthToken.expires_at > utcnow(),
|
||||||
)
|
)
|
||||||
return (await db.exec(statement)).first()
|
return (await db.exec(statement)).first()
|
||||||
|
|
||||||
|
|
||||||
async def get_token_by_refresh_token(
|
async def get_token_by_refresh_token(db: AsyncSession, refresh_token: str) -> OAuthToken | None:
|
||||||
db: AsyncSession, refresh_token: str
|
|
||||||
) -> OAuthToken | None:
|
|
||||||
"""根据刷新令牌获取令牌记录"""
|
"""根据刷新令牌获取令牌记录"""
|
||||||
statement = select(OAuthToken).where(
|
statement = select(OAuthToken).where(
|
||||||
OAuthToken.refresh_token == refresh_token,
|
OAuthToken.refresh_token == refresh_token,
|
||||||
OAuthToken.expires_at > datetime.utcnow(),
|
OAuthToken.refresh_token_expires_at > utcnow(),
|
||||||
)
|
)
|
||||||
return (await db.exec(statement)).first()
|
return (await db.exec(statement)).first()
|
||||||
|
|
||||||
@@ -288,3 +326,124 @@ async def get_user_by_authorization_code(
|
|||||||
await db.refresh(user)
|
await db.refresh(user)
|
||||||
return (user, scopes.split(","))
|
return (user, scopes.split(","))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def totp_redis_key(user: User) -> str:
|
||||||
|
return f"totp:setup:{user.email}"
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_totp_account_label(user: User) -> str:
|
||||||
|
"""生成TOTP账户标签
|
||||||
|
|
||||||
|
根据配置选择使用用户名或邮箱,并添加服务器信息使标签更具描述性
|
||||||
|
"""
|
||||||
|
primary_identifier = user.username if settings.totp_use_username_in_label else user.email
|
||||||
|
|
||||||
|
# 如果配置了服务名称,添加到标签中以便在认证器中区分
|
||||||
|
if settings.totp_service_name:
|
||||||
|
return f"{primary_identifier} ({settings.totp_service_name})"
|
||||||
|
else:
|
||||||
|
return primary_identifier
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_totp_issuer_name() -> str:
|
||||||
|
"""生成TOTP发行者名称
|
||||||
|
|
||||||
|
优先使用自定义的totp_issuer,否则使用服务名称
|
||||||
|
"""
|
||||||
|
if settings.totp_issuer:
|
||||||
|
return settings.totp_issuer
|
||||||
|
elif settings.totp_service_name:
|
||||||
|
return settings.totp_service_name
|
||||||
|
else:
|
||||||
|
# 回退到默认值
|
||||||
|
return "osu! Private Server"
|
||||||
|
|
||||||
|
|
||||||
|
async def start_create_totp_key(user: User, redis: Redis) -> StartCreateTotpKeyResp:
|
||||||
|
secret = pyotp.random_base32()
|
||||||
|
await redis.hset(totp_redis_key(user), mapping={"secret": secret, "fails": 0}) # pyright: ignore[reportGeneralTypeIssues]
|
||||||
|
await redis.expire(totp_redis_key(user), 300)
|
||||||
|
|
||||||
|
# 生成更完整的账户标签和issuer信息
|
||||||
|
account_label = _generate_totp_account_label(user)
|
||||||
|
issuer_name = _generate_totp_issuer_name()
|
||||||
|
|
||||||
|
return StartCreateTotpKeyResp(
|
||||||
|
secret=secret,
|
||||||
|
uri=pyotp.totp.TOTP(secret).provisioning_uri(name=account_label, issuer_name=issuer_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def verify_totp_key(secret: str, code: str) -> bool:
|
||||||
|
return pyotp.TOTP(secret).verify(code, valid_window=1)
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_totp_key_with_replay_protection(user_id: int, secret: str, code: str, redis: Redis) -> bool:
|
||||||
|
"""验证TOTP密钥,并防止密钥重放攻击"""
|
||||||
|
if not pyotp.TOTP(secret).verify(code, valid_window=1):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 防止120秒内重复使用同一密钥(参考osu-web实现)
|
||||||
|
cache_key = f"totp:{user_id}:{code}"
|
||||||
|
if await redis.exists(cache_key):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 设置120秒过期时间
|
||||||
|
await redis.setex(cache_key, 120, "1")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_backup_codes(count=10, length=BACKUP_CODE_LENGTH) -> list[str]:
|
||||||
|
alphabet = string.ascii_uppercase + string.digits
|
||||||
|
return ["".join(secrets.choice(alphabet) for _ in range(length)) for _ in range(count)]
|
||||||
|
|
||||||
|
|
||||||
|
async def _store_totp_key(user: User, secret: str, db: AsyncSession) -> list[str]:
|
||||||
|
backup_codes = _generate_backup_codes()
|
||||||
|
hashed_codes = [bcrypt.hashpw(code.encode(), bcrypt.gensalt()) for code in backup_codes]
|
||||||
|
totp_secret = TotpKeys(user_id=user.id, secret=secret, backup_keys=[code.decode() for code in hashed_codes])
|
||||||
|
db.add(totp_secret)
|
||||||
|
await db.commit()
|
||||||
|
return backup_codes
|
||||||
|
|
||||||
|
|
||||||
|
async def finish_create_totp_key(
|
||||||
|
user: User, code: str, redis: Redis, db: AsyncSession
|
||||||
|
) -> tuple[FinishStatus, list[str]]:
|
||||||
|
data = await redis.hgetall(totp_redis_key(user)) # pyright: ignore[reportGeneralTypeIssues]
|
||||||
|
if not data or "secret" not in data or "fails" not in data:
|
||||||
|
return FinishStatus.INVALID, []
|
||||||
|
|
||||||
|
secret = data["secret"]
|
||||||
|
fails = int(data["fails"])
|
||||||
|
|
||||||
|
if fails >= 3:
|
||||||
|
await redis.delete(totp_redis_key(user)) # pyright: ignore[reportGeneralTypeIssues]
|
||||||
|
return FinishStatus.TOO_MANY_ATTEMPTS, []
|
||||||
|
|
||||||
|
if verify_totp_key(secret, code):
|
||||||
|
await redis.delete(totp_redis_key(user)) # pyright: ignore[reportGeneralTypeIssues]
|
||||||
|
backup_codes = await _store_totp_key(user, secret, db)
|
||||||
|
return FinishStatus.SUCCESS, backup_codes
|
||||||
|
else:
|
||||||
|
fails += 1
|
||||||
|
await redis.hset(totp_redis_key(user), "fails", str(fails)) # pyright: ignore[reportGeneralTypeIssues]
|
||||||
|
return FinishStatus.FAILED, []
|
||||||
|
|
||||||
|
|
||||||
|
async def disable_totp(user: User, db: AsyncSession) -> None:
|
||||||
|
totp = await db.get(TotpKeys, user.id)
|
||||||
|
if totp:
|
||||||
|
await db.delete(totp)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def check_totp_backup_code(totp: TotpKeys, code: str) -> bool:
|
||||||
|
for hashed_code in totp.backup_keys:
|
||||||
|
if bcrypt.checkpw(code.encode(), hashed_code.encode()):
|
||||||
|
copy = totp.backup_keys[:]
|
||||||
|
copy.remove(hashed_code)
|
||||||
|
totp.backup_keys = copy
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|||||||
@@ -1,33 +1,48 @@
|
|||||||
from __future__ import annotations
|
import asyncio
|
||||||
|
|
||||||
from copy import deepcopy
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
import importlib
|
||||||
import math
|
import math
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from app.calculators.performance import PerformanceCalculator
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.log import logger
|
from app.const import MAX_SCORE
|
||||||
from app.models.beatmap import BeatmapAttributes
|
from app.log import log
|
||||||
from app.models.mods import APIMod, parse_enum_to_str
|
from app.models.score import GameMode, HitResult, ScoreStatistics
|
||||||
from app.models.score import GameMode
|
from app.models.scoring_mode import ScoringMode
|
||||||
|
|
||||||
from osupyparser import HitObject, OsuFile
|
from osupyparser import HitObject, OsuFile
|
||||||
from osupyparser.osu.objects import Slider
|
from osupyparser.osu.objects import Slider
|
||||||
|
from redis.asyncio import Redis
|
||||||
from sqlmodel import col, exists, select
|
from sqlmodel import col, exists, select
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
try:
|
|
||||||
import rosu_pp_py as rosu
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError(
|
|
||||||
"rosu-pp-py is not installed. "
|
|
||||||
"Please install it.\n"
|
|
||||||
" Official: uv add rosu-pp-py\n"
|
|
||||||
" ppy-sb: uv add git+https://github.com/ppy-sb/rosu-pp-py.git"
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from app.database.score import Score
|
from app.database.score import Score
|
||||||
|
from app.fetcher import Fetcher
|
||||||
|
|
||||||
|
|
||||||
|
logger = log("Calculator")
|
||||||
|
|
||||||
|
CALCULATOR: PerformanceCalculator | None = None
|
||||||
|
|
||||||
|
|
||||||
|
async def init_calculator():
|
||||||
|
global CALCULATOR
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(f"app.calculators.performance.{settings.calculator}")
|
||||||
|
CALCULATOR = module.PerformanceCalculator(**settings.calculator_config)
|
||||||
|
if CALCULATOR is not None:
|
||||||
|
await CALCULATOR.init()
|
||||||
|
except (ImportError, AttributeError) as e:
|
||||||
|
raise ImportError(f"Failed to import performance calculator for {settings.calculator}") from e
|
||||||
|
return CALCULATOR
|
||||||
|
|
||||||
|
|
||||||
|
def get_calculator() -> PerformanceCalculator:
|
||||||
|
if CALCULATOR is None:
|
||||||
|
raise RuntimeError("Performance calculator is not initialized")
|
||||||
|
return CALCULATOR
|
||||||
|
|
||||||
|
|
||||||
def clamp[T: int | float](n: T, min_value: T, max_value: T) -> T:
|
def clamp[T: int | float](n: T, min_value: T, max_value: T) -> T:
|
||||||
@@ -39,39 +54,83 @@ def clamp[T: int | float](n: T, min_value: T, max_value: T) -> T:
|
|||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
def calculate_beatmap_attribute(
|
def get_display_score(ruleset_id: int, total_score: int, mode: ScoringMode, maximum_statistics: ScoreStatistics) -> int:
|
||||||
beatmap: str,
|
"""
|
||||||
gamemode: GameMode | None = None,
|
Calculate the display score based on the scoring mode.
|
||||||
mods: int | list[APIMod] | list[str] = 0,
|
|
||||||
) -> BeatmapAttributes:
|
Based on: https://github.com/ppy/osu/blob/master/osu.Game/Scoring/Legacy/ScoreInfoExtensions.cs
|
||||||
map = rosu.Beatmap(content=beatmap)
|
|
||||||
if gamemode is not None:
|
Args:
|
||||||
map.convert(gamemode.to_rosu(), mods) # pyright: ignore[reportArgumentType]
|
ruleset_id: The ruleset ID (0=osu!, 1=taiko, 2=catch, 3=mania)
|
||||||
diff = rosu.Difficulty(mods=mods).calculate(map)
|
total_score: The standardised total score
|
||||||
return BeatmapAttributes(
|
mode: The scoring mode (standardised or classic)
|
||||||
star_rating=diff.stars,
|
maximum_statistics: Dictionary of maximum statistics for the score
|
||||||
max_combo=diff.max_combo,
|
|
||||||
aim_difficulty=diff.aim,
|
Returns:
|
||||||
aim_difficult_slider_count=diff.aim_difficult_slider_count,
|
The display score in the requested scoring mode
|
||||||
speed_difficulty=diff.speed,
|
"""
|
||||||
speed_note_count=diff.speed_note_count,
|
if mode == ScoringMode.STANDARDISED:
|
||||||
slider_factor=diff.slider_factor,
|
return total_score
|
||||||
aim_difficult_strain_count=diff.aim_difficult_strain_count,
|
|
||||||
speed_difficult_strain_count=diff.speed_difficult_strain_count,
|
# Calculate max basic judgements
|
||||||
mono_stamina_factor=diff.stamina,
|
max_basic_judgements = sum(
|
||||||
|
count for hit_result, count in maximum_statistics.items() if HitResult(hit_result).is_basic()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return _convert_standardised_to_classic(ruleset_id, total_score, max_basic_judgements)
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_standardised_to_classic(ruleset_id: int, standardised_total_score: int, object_count: int) -> int:
|
||||||
|
"""
|
||||||
|
Convert a standardised score to classic score.
|
||||||
|
|
||||||
|
The coefficients were determined by a least-squares fit to minimise relative error
|
||||||
|
of maximum possible base score across all beatmaps.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ruleset_id: The ruleset ID (0=osu!, 1=taiko, 2=catch, 3=mania)
|
||||||
|
standardised_total_score: The standardised total score
|
||||||
|
object_count: The number of basic hit objects
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The classic score
|
||||||
|
"""
|
||||||
|
if ruleset_id == 0: # osu!
|
||||||
|
return round((object_count**2 * 32.57 + 100000) * standardised_total_score / MAX_SCORE)
|
||||||
|
elif ruleset_id == 1: # taiko
|
||||||
|
return round((object_count * 1109 + 100000) * standardised_total_score / MAX_SCORE)
|
||||||
|
elif ruleset_id == 2: # catch
|
||||||
|
return round((standardised_total_score / MAX_SCORE * object_count) ** 2 * 21.62 + standardised_total_score / 10)
|
||||||
|
else: # mania (ruleset_id == 3) or default
|
||||||
|
return standardised_total_score
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_pp_for_no_calculator(score: "Score", star_rating: float) -> float:
|
||||||
|
# TODO: Improve this algorithm
|
||||||
|
# https://www.desmos.com/calculator/i2aa7qm3o6
|
||||||
|
k = 4.0
|
||||||
|
|
||||||
|
pmax = 1.4 * (star_rating**2.8)
|
||||||
|
b = 0.95 - 0.33 * ((clamp(star_rating, 1, 8) - 1) / 7)
|
||||||
|
|
||||||
|
x = score.total_score / 1000000
|
||||||
|
|
||||||
|
if x < b:
|
||||||
|
# Linear section
|
||||||
|
return pmax * x
|
||||||
|
else:
|
||||||
|
# Exponential reward section
|
||||||
|
x = (x - b) / (1 - b)
|
||||||
|
exp_part = (math.exp(k * x) - 1) / (math.exp(k) - 1)
|
||||||
|
return pmax * (b + (1 - b) * exp_part)
|
||||||
|
|
||||||
|
|
||||||
async def calculate_pp(score: "Score", beatmap: str, session: AsyncSession) -> float:
|
async def calculate_pp(score: "Score", beatmap: str, session: AsyncSession) -> float:
|
||||||
from app.database.beatmap import BannedBeatmaps
|
from app.database.beatmap import BannedBeatmaps
|
||||||
|
|
||||||
if settings.suspicious_score_check:
|
if settings.suspicious_score_check:
|
||||||
beatmap_banned = (
|
beatmap_banned = (
|
||||||
await session.exec(
|
await session.exec(select(exists()).where(col(BannedBeatmaps.beatmap_id) == score.beatmap_id))
|
||||||
select(exists()).where(
|
|
||||||
col(BannedBeatmaps.beatmap_id) == score.beatmap_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
).first()
|
||||||
if beatmap_banned:
|
if beatmap_banned:
|
||||||
return 0
|
return 0
|
||||||
@@ -82,48 +141,25 @@ async def calculate_pp(score: "Score", beatmap: str, session: AsyncSession) -> f
|
|||||||
logger.warning(f"Beatmap {score.beatmap_id} is suspicious, banned")
|
logger.warning(f"Beatmap {score.beatmap_id} is suspicious, banned")
|
||||||
return 0
|
return 0
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception(
|
logger.exception(f"Error checking if beatmap {score.beatmap_id} is suspicious")
|
||||||
f"Error checking if beatmap {score.beatmap_id} is suspicious"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 使用线程池执行计算密集型操作以避免阻塞事件循环
|
if not (await get_calculator().can_calculate_performance(score.gamemode)):
|
||||||
import asyncio
|
if not settings.fallback_no_calculator_pp:
|
||||||
|
return 0
|
||||||
|
star_rating = -1
|
||||||
|
if await get_calculator().can_calculate_difficulty(score.gamemode):
|
||||||
|
star_rating = (await get_calculator().calculate_difficulty(beatmap, score.mods, score.gamemode)).star_rating
|
||||||
|
if star_rating < 0:
|
||||||
|
star_rating = (await score.awaitable_attrs.beatmap).difficulty_rating
|
||||||
|
pp = calculate_pp_for_no_calculator(score, star_rating)
|
||||||
|
else:
|
||||||
|
attrs = await get_calculator().calculate_performance(beatmap, score)
|
||||||
|
pp = attrs.pp
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
if settings.suspicious_score_check and (pp > 3000):
|
||||||
|
|
||||||
def _calculate_pp_sync():
|
|
||||||
map = rosu.Beatmap(content=beatmap)
|
|
||||||
mods = deepcopy(score.mods.copy())
|
|
||||||
parse_enum_to_str(int(score.gamemode), mods)
|
|
||||||
map.convert(score.gamemode.to_rosu(), mods) # pyright: ignore[reportArgumentType]
|
|
||||||
perf = rosu.Performance(
|
|
||||||
mods=mods,
|
|
||||||
lazer=True,
|
|
||||||
accuracy=clamp(score.accuracy * 100, 0, 100),
|
|
||||||
combo=score.max_combo,
|
|
||||||
large_tick_hits=score.nlarge_tick_hit or 0,
|
|
||||||
slider_end_hits=score.nslider_tail_hit or 0,
|
|
||||||
small_tick_hits=score.nsmall_tick_hit or 0,
|
|
||||||
n_geki=score.ngeki,
|
|
||||||
n_katu=score.nkatu,
|
|
||||||
n300=score.n300,
|
|
||||||
n100=score.n100,
|
|
||||||
n50=score.n50,
|
|
||||||
misses=score.nmiss,
|
|
||||||
)
|
|
||||||
return perf.calculate(map)
|
|
||||||
|
|
||||||
# 在线程池中执行计算
|
|
||||||
attrs = await loop.run_in_executor(None, _calculate_pp_sync)
|
|
||||||
pp = attrs.pp
|
|
||||||
|
|
||||||
# mrekk bp1: 2048pp; ppy-sb top1 rxbp1: 2198pp
|
|
||||||
if settings.suspicious_score_check and (
|
|
||||||
(attrs.difficulty.stars > 25 and score.accuracy < 0.8) or pp > 2300
|
|
||||||
):
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"User {score.user_id} played {score.beatmap_id} "
|
f"User {score.user_id} played {score.beatmap_id} "
|
||||||
f"(star={attrs.difficulty.stars}) with {pp=} "
|
f"with {pp=} "
|
||||||
f"acc={score.accuracy}. The score is suspicious and return 0pp"
|
f"acc={score.accuracy}. The score is suspicious and return 0pp"
|
||||||
f"({score.id=})"
|
f"({score.id=})"
|
||||||
)
|
)
|
||||||
@@ -132,31 +168,29 @@ async def calculate_pp(score: "Score", beatmap: str, session: AsyncSession) -> f
|
|||||||
|
|
||||||
|
|
||||||
async def pre_fetch_and_calculate_pp(
|
async def pre_fetch_and_calculate_pp(
|
||||||
score: "Score", beatmap_id: int, session: AsyncSession, redis, fetcher
|
score: "Score", session: AsyncSession, redis: Redis, fetcher: "Fetcher"
|
||||||
) -> float:
|
) -> tuple[float, bool]:
|
||||||
"""
|
"""
|
||||||
优化版PP计算:预先获取beatmap文件并使用缓存
|
优化版PP计算:预先获取beatmap文件并使用缓存
|
||||||
"""
|
"""
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from app.database.beatmap import BannedBeatmaps
|
from app.database.beatmap import BannedBeatmaps
|
||||||
|
|
||||||
|
beatmap_id = score.beatmap_id
|
||||||
|
|
||||||
# 快速检查是否被封禁
|
# 快速检查是否被封禁
|
||||||
if settings.suspicious_score_check:
|
if settings.suspicious_score_check:
|
||||||
beatmap_banned = (
|
beatmap_banned = (
|
||||||
await session.exec(
|
await session.exec(select(exists()).where(col(BannedBeatmaps.beatmap_id) == beatmap_id))
|
||||||
select(exists()).where(col(BannedBeatmaps.beatmap_id) == beatmap_id)
|
|
||||||
)
|
|
||||||
).first()
|
).first()
|
||||||
if beatmap_banned:
|
if beatmap_banned:
|
||||||
return 0
|
return 0, False
|
||||||
|
|
||||||
# 异步获取beatmap原始文件,利用已有的Redis缓存机制
|
# 异步获取beatmap原始文件,利用已有的Redis缓存机制
|
||||||
try:
|
try:
|
||||||
beatmap_raw = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
beatmap_raw = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to fetch beatmap {beatmap_id}: {e}")
|
logger.error(f"Failed to fetch beatmap {beatmap_id}: {e}")
|
||||||
return 0
|
return 0, False
|
||||||
|
|
||||||
# 在获取文件的同时,可以检查可疑beatmap
|
# 在获取文件的同时,可以检查可疑beatmap
|
||||||
if settings.suspicious_score_check:
|
if settings.suspicious_score_check:
|
||||||
@@ -170,82 +204,12 @@ async def pre_fetch_and_calculate_pp(
|
|||||||
if is_sus:
|
if is_sus:
|
||||||
session.add(BannedBeatmaps(beatmap_id=beatmap_id))
|
session.add(BannedBeatmaps(beatmap_id=beatmap_id))
|
||||||
logger.warning(f"Beatmap {beatmap_id} is suspicious, banned")
|
logger.warning(f"Beatmap {beatmap_id} is suspicious, banned")
|
||||||
return 0
|
return 0, True
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception(f"Error checking if beatmap {beatmap_id} is suspicious")
|
logger.exception(f"Error checking if beatmap {beatmap_id} is suspicious")
|
||||||
|
|
||||||
# 调用已优化的PP计算函数
|
# 调用已优化的PP计算函数
|
||||||
return await calculate_pp(score, beatmap_raw, session)
|
return await calculate_pp(score, beatmap_raw, session), True
|
||||||
|
|
||||||
|
|
||||||
async def batch_calculate_pp(
|
|
||||||
scores_data: list[tuple["Score", int]], session: AsyncSession, redis, fetcher
|
|
||||||
) -> list[float]:
|
|
||||||
"""
|
|
||||||
批量计算PP:适用于重新计算或批量处理场景
|
|
||||||
Args:
|
|
||||||
scores_data: [(score, beatmap_id), ...] 的列表
|
|
||||||
Returns:
|
|
||||||
对应的PP值列表
|
|
||||||
"""
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from app.database.beatmap import BannedBeatmaps
|
|
||||||
|
|
||||||
if not scores_data:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# 提取所有唯一的beatmap_id
|
|
||||||
unique_beatmap_ids = list({beatmap_id for _, beatmap_id in scores_data})
|
|
||||||
|
|
||||||
# 批量检查被封禁的beatmap
|
|
||||||
banned_beatmaps = set()
|
|
||||||
if settings.suspicious_score_check:
|
|
||||||
banned_results = await session.exec(
|
|
||||||
select(BannedBeatmaps.beatmap_id).where(
|
|
||||||
col(BannedBeatmaps.beatmap_id).in_(unique_beatmap_ids)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
banned_beatmaps = set(banned_results.all())
|
|
||||||
|
|
||||||
# 并发获取所有需要的beatmap原始文件
|
|
||||||
async def fetch_beatmap_safe(beatmap_id: int) -> tuple[int, str | None]:
|
|
||||||
if beatmap_id in banned_beatmaps:
|
|
||||||
return beatmap_id, None
|
|
||||||
try:
|
|
||||||
content = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
|
||||||
return beatmap_id, content
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to fetch beatmap {beatmap_id}: {e}")
|
|
||||||
return beatmap_id, None
|
|
||||||
|
|
||||||
# 并发获取所有beatmap文件
|
|
||||||
fetch_tasks = [fetch_beatmap_safe(bid) for bid in unique_beatmap_ids]
|
|
||||||
fetch_results = await asyncio.gather(*fetch_tasks, return_exceptions=True)
|
|
||||||
|
|
||||||
# 构建beatmap_id -> content的映射
|
|
||||||
beatmap_contents = {}
|
|
||||||
for result in fetch_results:
|
|
||||||
if isinstance(result, tuple):
|
|
||||||
beatmap_id, content = result
|
|
||||||
beatmap_contents[beatmap_id] = content
|
|
||||||
|
|
||||||
# 为每个score计算PP
|
|
||||||
pp_results = []
|
|
||||||
for score, beatmap_id in scores_data:
|
|
||||||
beatmap_content = beatmap_contents.get(beatmap_id)
|
|
||||||
if beatmap_content is None:
|
|
||||||
pp_results.append(0.0)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
pp = await calculate_pp(score, beatmap_content, session)
|
|
||||||
pp_results.append(pp)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to calculate PP for score {score.id}: {e}")
|
|
||||||
pp_results.append(0.0)
|
|
||||||
|
|
||||||
return pp_results
|
|
||||||
|
|
||||||
|
|
||||||
# https://osu.ppy.sh/wiki/Gameplay/Score/Total_score
|
# https://osu.ppy.sh/wiki/Gameplay/Score/Total_score
|
||||||
@@ -380,9 +344,7 @@ def calculate_score_to_level(total_score: int) -> float:
|
|||||||
level = 0.0
|
level = 0.0
|
||||||
|
|
||||||
while remaining_score > 0:
|
while remaining_score > 0:
|
||||||
next_level_requirement = to_next_level[
|
next_level_requirement = to_next_level[min(len(to_next_level) - 1, round(level))]
|
||||||
min(len(to_next_level) - 1, round(level))
|
|
||||||
]
|
|
||||||
level += min(1, remaining_score / next_level_requirement)
|
level += min(1, remaining_score / next_level_requirement)
|
||||||
remaining_score -= next_level_requirement
|
remaining_score -= next_level_requirement
|
||||||
|
|
||||||
@@ -417,9 +379,7 @@ class Threshold(int, Enum):
|
|||||||
NOTE_POSX_THRESHOLD = 512 # x: [-512,512]
|
NOTE_POSX_THRESHOLD = 512 # x: [-512,512]
|
||||||
NOTE_POSY_THRESHOLD = 384 # y: [-384,384]
|
NOTE_POSY_THRESHOLD = 384 # y: [-384,384]
|
||||||
|
|
||||||
POS_ERROR_THRESHOLD = (
|
POS_ERROR_THRESHOLD = 1280 * 50 # 超过这么多个物件(包括滑条控制点)的位置有问题就毙掉
|
||||||
1280 * 50
|
|
||||||
) # 超过这么多个物件(包括滑条控制点)的位置有问题就毙掉
|
|
||||||
|
|
||||||
SLIDER_REPEAT_THRESHOLD = 5000
|
SLIDER_REPEAT_THRESHOLD = 5000
|
||||||
|
|
||||||
@@ -431,9 +391,8 @@ def too_dense(hit_objects: list[HitObject], per_1s: int, per_10s: int) -> bool:
|
|||||||
if len(hit_objects) > i + per_1s:
|
if len(hit_objects) > i + per_1s:
|
||||||
if hit_objects[i + per_1s].start_time - hit_objects[i].start_time < 1000:
|
if hit_objects[i + per_1s].start_time - hit_objects[i].start_time < 1000:
|
||||||
return True
|
return True
|
||||||
elif len(hit_objects) > i + per_10s:
|
elif len(hit_objects) > i + per_10s and hit_objects[i + per_10s].start_time - hit_objects[i].start_time < 10000:
|
||||||
if hit_objects[i + per_10s].start_time - hit_objects[i].start_time < 10000:
|
return True
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@@ -460,19 +419,13 @@ def slider_is_sus(hit_objects: list[HitObject]) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def is_2b(hit_objects: list[HitObject]) -> bool:
|
def is_2b(hit_objects: list[HitObject]) -> bool:
|
||||||
for i in range(0, len(hit_objects) - 1):
|
return any(hit_objects[i] == hit_objects[i + 1].start_time for i in range(0, len(hit_objects) - 1))
|
||||||
if hit_objects[i] == hit_objects[i + 1].start_time:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_suspicious_beatmap(content: str) -> bool:
|
def is_suspicious_beatmap(content: str) -> bool:
|
||||||
osufile = OsuFile(content=content.encode("utf-8")).parse_file()
|
osufile = OsuFile(content=content.encode("utf-8")).parse_file()
|
||||||
|
|
||||||
if (
|
if osufile.hit_objects[-1].start_time - osufile.hit_objects[0].start_time > 24 * 60 * 60 * 1000:
|
||||||
osufile.hit_objects[-1].start_time - osufile.hit_objects[0].start_time
|
|
||||||
> 24 * 60 * 60 * 1000
|
|
||||||
):
|
|
||||||
return True
|
return True
|
||||||
if osufile.mode == int(GameMode.TAIKO):
|
if osufile.mode == int(GameMode.TAIKO):
|
||||||
if len(osufile.hit_objects) > Threshold.TAIKO_THRESHOLD:
|
if len(osufile.hit_objects) > Threshold.TAIKO_THRESHOLD:
|
||||||
|
|||||||
3
app/calculators/performance/__init__.py
Normal file
3
app/calculators/performance/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from ._base import CalculateError, ConvertError, DifficultyError, PerformanceCalculator, PerformanceError
|
||||||
|
|
||||||
|
__all__ = ["CalculateError", "ConvertError", "DifficultyError", "PerformanceCalculator", "PerformanceError"]
|
||||||
61
app/calculators/performance/_base.py
Normal file
61
app/calculators/performance/_base.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import abc
|
||||||
|
from typing import TYPE_CHECKING, NamedTuple
|
||||||
|
|
||||||
|
from app.models.mods import APIMod
|
||||||
|
from app.models.performance import DifficultyAttributes, PerformanceAttributes
|
||||||
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.database.score import Score
|
||||||
|
|
||||||
|
|
||||||
|
class CalculateError(Exception):
|
||||||
|
"""An error occurred during performance calculation."""
|
||||||
|
|
||||||
|
|
||||||
|
class DifficultyError(CalculateError):
|
||||||
|
"""The difficulty could not be calculated."""
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertError(DifficultyError):
|
||||||
|
"""A beatmap cannot be converted to the specified game mode."""
|
||||||
|
|
||||||
|
|
||||||
|
class PerformanceError(CalculateError):
|
||||||
|
"""The performance could not be calculated."""
|
||||||
|
|
||||||
|
|
||||||
|
class AvailableModes(NamedTuple):
|
||||||
|
has_performance_calculator: set[GameMode]
|
||||||
|
has_difficulty_calculator: set[GameMode]
|
||||||
|
|
||||||
|
|
||||||
|
class PerformanceCalculator(abc.ABC):
|
||||||
|
def __init__(self, **kwargs) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_available_modes(self) -> AvailableModes:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def calculate_performance(self, beatmap_raw: str, score: "Score") -> PerformanceAttributes:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def calculate_difficulty(
|
||||||
|
self, beatmap_raw: str, mods: list[APIMod] | None = None, gamemode: GameMode | None = None
|
||||||
|
) -> DifficultyAttributes:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def can_calculate_performance(self, gamemode: GameMode) -> bool:
|
||||||
|
modes = await self.get_available_modes()
|
||||||
|
return gamemode in modes.has_performance_calculator
|
||||||
|
|
||||||
|
async def can_calculate_difficulty(self, gamemode: GameMode) -> bool:
|
||||||
|
modes = await self.get_available_modes()
|
||||||
|
return gamemode in modes.has_difficulty_calculator
|
||||||
|
|
||||||
|
async def init(self) -> None:
|
||||||
|
"""Initialize the calculator (if needed)."""
|
||||||
|
pass
|
||||||
143
app/calculators/performance/performance_server.py
Normal file
143
app/calculators/performance/performance_server.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
import asyncio
|
||||||
|
import datetime
|
||||||
|
from typing import TYPE_CHECKING, TypedDict, cast
|
||||||
|
|
||||||
|
from app.models.mods import APIMod
|
||||||
|
from app.models.performance import (
|
||||||
|
DifficultyAttributes,
|
||||||
|
DifficultyAttributesUnion,
|
||||||
|
PerformanceAttributes,
|
||||||
|
PerformanceAttributesUnion,
|
||||||
|
)
|
||||||
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
from ._base import (
|
||||||
|
AvailableModes,
|
||||||
|
CalculateError,
|
||||||
|
DifficultyError,
|
||||||
|
PerformanceCalculator as BasePerformanceCalculator,
|
||||||
|
PerformanceError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from httpx import AsyncClient, HTTPError
|
||||||
|
from pydantic import TypeAdapter
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.database.score import Score
|
||||||
|
|
||||||
|
|
||||||
|
class AvailableRulesetResp(TypedDict):
|
||||||
|
has_performance_calculator: list[str]
|
||||||
|
has_difficulty_calculator: list[str]
|
||||||
|
loaded_rulesets: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
class PerformanceServerPerformanceCalculator(BasePerformanceCalculator):
|
||||||
|
def __init__(self, server_url: str = "http://localhost:5225", **kwargs) -> None: # noqa: ARG002
|
||||||
|
self.server_url = server_url
|
||||||
|
|
||||||
|
self._available_modes: AvailableModes | None = None
|
||||||
|
self._modes_lock = asyncio.Lock()
|
||||||
|
self._today = datetime.date.today()
|
||||||
|
|
||||||
|
async def init(self):
|
||||||
|
await self.get_available_modes()
|
||||||
|
|
||||||
|
def _process_modes(self, modes: AvailableRulesetResp) -> AvailableModes:
|
||||||
|
performance_modes = {
|
||||||
|
m for mode in modes["has_performance_calculator"] if (m := GameMode.parse(mode)) is not None
|
||||||
|
}
|
||||||
|
difficulty_modes = {m for mode in modes["has_difficulty_calculator"] if (m := GameMode.parse(mode)) is not None}
|
||||||
|
if GameMode.OSU in performance_modes:
|
||||||
|
performance_modes.add(GameMode.OSURX)
|
||||||
|
performance_modes.add(GameMode.OSUAP)
|
||||||
|
if GameMode.TAIKO in performance_modes:
|
||||||
|
performance_modes.add(GameMode.TAIKORX)
|
||||||
|
if GameMode.FRUITS in performance_modes:
|
||||||
|
performance_modes.add(GameMode.FRUITSRX)
|
||||||
|
|
||||||
|
return AvailableModes(
|
||||||
|
has_performance_calculator=performance_modes,
|
||||||
|
has_difficulty_calculator=difficulty_modes,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_available_modes(self) -> AvailableModes:
|
||||||
|
# https://github.com/GooGuTeam/osu-performance-server#get-available_rulesets
|
||||||
|
if self._available_modes is not None and self._today == datetime.date.today():
|
||||||
|
return self._available_modes
|
||||||
|
async with self._modes_lock, AsyncClient() as client:
|
||||||
|
try:
|
||||||
|
resp = await client.get(f"{self.server_url}/available_rulesets")
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise CalculateError(f"Failed to get available modes: {resp.text}")
|
||||||
|
modes = cast(AvailableRulesetResp, resp.json())
|
||||||
|
result = self._process_modes(modes)
|
||||||
|
|
||||||
|
self._available_modes = result
|
||||||
|
self._today = datetime.date.today()
|
||||||
|
return result
|
||||||
|
except HTTPError as e:
|
||||||
|
raise CalculateError(f"Failed to get available modes: {e}") from e
|
||||||
|
except Exception as e:
|
||||||
|
raise CalculateError(f"Unknown error: {e}") from e
|
||||||
|
|
||||||
|
async def calculate_performance(self, beatmap_raw: str, score: "Score") -> PerformanceAttributes:
|
||||||
|
# https://github.com/GooGuTeam/osu-performance-server#post-performance
|
||||||
|
async with AsyncClient(timeout=15) as client:
|
||||||
|
try:
|
||||||
|
resp = await client.post(
|
||||||
|
f"{self.server_url}/performance",
|
||||||
|
json={
|
||||||
|
"beatmap_id": score.beatmap_id,
|
||||||
|
"beatmap_file": beatmap_raw,
|
||||||
|
"checksum": score.map_md5,
|
||||||
|
"accuracy": score.accuracy,
|
||||||
|
"combo": score.max_combo,
|
||||||
|
"mods": score.mods,
|
||||||
|
"statistics": {
|
||||||
|
"great": score.n300,
|
||||||
|
"ok": score.n100,
|
||||||
|
"meh": score.n50,
|
||||||
|
"miss": score.nmiss,
|
||||||
|
"perfect": score.ngeki,
|
||||||
|
"good": score.nkatu,
|
||||||
|
"large_tick_hit": score.nlarge_tick_hit or 0,
|
||||||
|
"large_tick_miss": score.nlarge_tick_miss or 0,
|
||||||
|
"small_tick_hit": score.nsmall_tick_hit or 0,
|
||||||
|
"slider_tail_hit": score.nslider_tail_hit or 0,
|
||||||
|
},
|
||||||
|
"ruleset": score.gamemode.to_base_ruleset().value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise PerformanceError(f"Failed to calculate performance: {resp.text}")
|
||||||
|
return TypeAdapter(PerformanceAttributesUnion).validate_json(resp.text)
|
||||||
|
except HTTPError as e:
|
||||||
|
raise PerformanceError(f"Failed to calculate performance: {e}") from e
|
||||||
|
except Exception as e:
|
||||||
|
raise CalculateError(f"Unknown error: {e}") from e
|
||||||
|
|
||||||
|
async def calculate_difficulty(
|
||||||
|
self, beatmap_raw: str, mods: list[APIMod] | None = None, gamemode: GameMode | None = None
|
||||||
|
) -> DifficultyAttributes:
|
||||||
|
# https://github.com/GooGuTeam/osu-performance-server#post-difficulty
|
||||||
|
async with AsyncClient(timeout=15) as client:
|
||||||
|
try:
|
||||||
|
resp = await client.post(
|
||||||
|
f"{self.server_url}/difficulty",
|
||||||
|
json={
|
||||||
|
"beatmap_file": beatmap_raw,
|
||||||
|
"mods": mods or [],
|
||||||
|
"ruleset": gamemode.value if gamemode else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise DifficultyError(f"Failed to calculate difficulty: {resp.text}")
|
||||||
|
return TypeAdapter(DifficultyAttributesUnion).validate_json(resp.text)
|
||||||
|
except HTTPError as e:
|
||||||
|
raise DifficultyError(f"Failed to calculate difficulty: {e}") from e
|
||||||
|
except Exception as e:
|
||||||
|
raise DifficultyError(f"Unknown error: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
|
PerformanceCalculator = PerformanceServerPerformanceCalculator
|
||||||
228
app/calculators/performance/rosu.py
Normal file
228
app/calculators/performance/rosu.py
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
from asyncio import get_event_loop
|
||||||
|
from copy import deepcopy
|
||||||
|
from typing import TYPE_CHECKING, ClassVar
|
||||||
|
|
||||||
|
from app.calculator import clamp
|
||||||
|
from app.models.mods import APIMod
|
||||||
|
from app.models.performance import (
|
||||||
|
DifficultyAttributes,
|
||||||
|
ManiaPerformanceAttributes,
|
||||||
|
OsuDifficultyAttributes,
|
||||||
|
OsuPerformanceAttributes,
|
||||||
|
PerformanceAttributes,
|
||||||
|
TaikoDifficultyAttributes,
|
||||||
|
TaikoPerformanceAttributes,
|
||||||
|
)
|
||||||
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
from ._base import (
|
||||||
|
AvailableModes,
|
||||||
|
CalculateError,
|
||||||
|
ConvertError,
|
||||||
|
DifficultyError,
|
||||||
|
PerformanceCalculator as BasePerformanceCalculator,
|
||||||
|
PerformanceError,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.database.score import Score
|
||||||
|
|
||||||
|
try:
|
||||||
|
import rosu_pp_py as rosu
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"rosu-pp-py is not installed. "
|
||||||
|
"Please install it.\n"
|
||||||
|
" Official: uv add rosu-pp-py\n"
|
||||||
|
" gu: uv add git+https://github.com/GooGuTeam/gu-pp-py.git"
|
||||||
|
)
|
||||||
|
|
||||||
|
PERFORMANCE_CLASS = {
|
||||||
|
GameMode.OSU: OsuPerformanceAttributes,
|
||||||
|
GameMode.TAIKO: TaikoPerformanceAttributes,
|
||||||
|
GameMode.MANIA: ManiaPerformanceAttributes,
|
||||||
|
}
|
||||||
|
DIFFICULTY_CLASS = {
|
||||||
|
GameMode.OSU: OsuDifficultyAttributes,
|
||||||
|
GameMode.TAIKO: TaikoDifficultyAttributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
_enum_to_str = {
|
||||||
|
0: {
|
||||||
|
"MR": {"reflection"},
|
||||||
|
"AC": {"accuracy_judge_mode"},
|
||||||
|
"BR": {"direction"},
|
||||||
|
"AD": {"style"},
|
||||||
|
},
|
||||||
|
1: {"AC": {"accuracy_judge_mode"}},
|
||||||
|
2: {"AC": {"accuracy_judge_mode"}},
|
||||||
|
3: {"AC": {"accuracy_judge_mode"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_enum_to_str(ruleset_id: int, mods: list[APIMod]):
|
||||||
|
for mod in mods:
|
||||||
|
if mod["acronym"] in _enum_to_str.get(ruleset_id, {}):
|
||||||
|
for setting in mod.get("settings", {}):
|
||||||
|
if setting in _enum_to_str[ruleset_id][mod["acronym"]]:
|
||||||
|
mod["settings"][setting] = str(mod["settings"][setting]) # pyright: ignore[reportTypedDictNotRequiredAccess]
|
||||||
|
|
||||||
|
|
||||||
|
class RosuPerformanceCalculator(BasePerformanceCalculator):
|
||||||
|
SUPPORT_MODES: ClassVar[set[GameMode]] = {
|
||||||
|
GameMode.OSU,
|
||||||
|
GameMode.TAIKO,
|
||||||
|
GameMode.FRUITS,
|
||||||
|
GameMode.MANIA,
|
||||||
|
GameMode.OSURX,
|
||||||
|
GameMode.OSUAP,
|
||||||
|
GameMode.TAIKORX,
|
||||||
|
GameMode.FRUITSRX,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _to_rosu_mode(cls, mode: GameMode) -> rosu.GameMode:
|
||||||
|
return {
|
||||||
|
GameMode.OSU: rosu.GameMode.Osu,
|
||||||
|
GameMode.TAIKO: rosu.GameMode.Taiko,
|
||||||
|
GameMode.FRUITS: rosu.GameMode.Catch,
|
||||||
|
GameMode.MANIA: rosu.GameMode.Mania,
|
||||||
|
GameMode.OSURX: rosu.GameMode.Osu,
|
||||||
|
GameMode.OSUAP: rosu.GameMode.Osu,
|
||||||
|
GameMode.TAIKORX: rosu.GameMode.Taiko,
|
||||||
|
GameMode.FRUITSRX: rosu.GameMode.Catch,
|
||||||
|
}[mode]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_rosu_mode(cls, mode: rosu.GameMode) -> GameMode:
|
||||||
|
return {
|
||||||
|
rosu.GameMode.Osu: GameMode.OSU,
|
||||||
|
rosu.GameMode.Taiko: GameMode.TAIKO,
|
||||||
|
rosu.GameMode.Catch: GameMode.FRUITS,
|
||||||
|
rosu.GameMode.Mania: GameMode.MANIA,
|
||||||
|
}[mode]
|
||||||
|
|
||||||
|
async def get_available_modes(self) -> AvailableModes:
|
||||||
|
return AvailableModes(
|
||||||
|
has_performance_calculator=self.SUPPORT_MODES,
|
||||||
|
has_difficulty_calculator=self.SUPPORT_MODES,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _perf_attr_to_model(cls, attr: rosu.PerformanceAttributes, gamemode: GameMode) -> PerformanceAttributes:
|
||||||
|
attr_class = PERFORMANCE_CLASS.get(gamemode, PerformanceAttributes)
|
||||||
|
|
||||||
|
if attr_class is OsuPerformanceAttributes:
|
||||||
|
return OsuPerformanceAttributes(
|
||||||
|
pp=attr.pp,
|
||||||
|
aim=attr.pp_aim or 0,
|
||||||
|
speed=attr.pp_speed or 0,
|
||||||
|
accuracy=attr.pp_accuracy or 0,
|
||||||
|
flashlight=attr.pp_flashlight or 0,
|
||||||
|
effective_miss_count=attr.effective_miss_count or 0,
|
||||||
|
speed_deviation=attr.speed_deviation,
|
||||||
|
combo_based_estimated_miss_count=0,
|
||||||
|
score_based_estimated_miss_count=0,
|
||||||
|
aim_estimated_slider_breaks=0,
|
||||||
|
speed_estimated_slider_breaks=0,
|
||||||
|
)
|
||||||
|
elif attr_class is TaikoPerformanceAttributes:
|
||||||
|
return TaikoPerformanceAttributes(
|
||||||
|
pp=attr.pp,
|
||||||
|
difficulty=attr.pp_difficulty or 0,
|
||||||
|
accuracy=attr.pp_accuracy or 0,
|
||||||
|
estimated_unstable_rate=attr.estimated_unstable_rate,
|
||||||
|
)
|
||||||
|
elif attr_class is ManiaPerformanceAttributes:
|
||||||
|
return ManiaPerformanceAttributes(
|
||||||
|
pp=attr.pp,
|
||||||
|
difficulty=attr.pp_difficulty or 0,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return PerformanceAttributes(pp=attr.pp)
|
||||||
|
|
||||||
|
async def calculate_performance(self, beatmap_raw: str, score: "Score") -> PerformanceAttributes:
|
||||||
|
try:
|
||||||
|
map = rosu.Beatmap(content=beatmap_raw)
|
||||||
|
mods = deepcopy(score.mods.copy())
|
||||||
|
_parse_enum_to_str(int(score.gamemode), mods)
|
||||||
|
map.convert(self._to_rosu_mode(score.gamemode), mods) # pyright: ignore[reportArgumentType]
|
||||||
|
perf = rosu.Performance(
|
||||||
|
mods=mods,
|
||||||
|
lazer=True,
|
||||||
|
accuracy=clamp(score.accuracy * 100, 0, 100),
|
||||||
|
combo=score.max_combo,
|
||||||
|
large_tick_hits=score.nlarge_tick_hit or 0,
|
||||||
|
slider_end_hits=score.nslider_tail_hit or 0,
|
||||||
|
small_tick_hits=score.nsmall_tick_hit or 0,
|
||||||
|
n_geki=score.ngeki,
|
||||||
|
n_katu=score.nkatu,
|
||||||
|
n300=score.n300,
|
||||||
|
n100=score.n100,
|
||||||
|
n50=score.n50,
|
||||||
|
misses=score.nmiss,
|
||||||
|
)
|
||||||
|
attr = await get_event_loop().run_in_executor(None, perf.calculate, map)
|
||||||
|
return self._perf_attr_to_model(attr, score.gamemode.to_base_ruleset())
|
||||||
|
except rosu.ParseError as e: # pyright: ignore[reportAttributeAccessIssue]
|
||||||
|
raise PerformanceError(f"Beatmap parse error: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
raise CalculateError(f"Unknown error: {e}") from e
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _diff_attr_to_model(cls, diff: rosu.DifficultyAttributes, gamemode: GameMode) -> DifficultyAttributes:
|
||||||
|
attr_class = DIFFICULTY_CLASS.get(gamemode, DifficultyAttributes)
|
||||||
|
|
||||||
|
if attr_class is OsuDifficultyAttributes:
|
||||||
|
return OsuDifficultyAttributes(
|
||||||
|
star_rating=diff.stars,
|
||||||
|
max_combo=diff.max_combo,
|
||||||
|
aim_difficulty=diff.aim or 0,
|
||||||
|
aim_difficult_slider_count=diff.aim_difficult_slider_count or 0,
|
||||||
|
speed_difficulty=diff.speed or 0,
|
||||||
|
speed_note_count=diff.speed_note_count or 0,
|
||||||
|
slider_factor=diff.slider_factor or 0,
|
||||||
|
aim_difficult_strain_count=diff.aim_difficult_strain_count or 0,
|
||||||
|
speed_difficult_strain_count=diff.speed_difficult_strain_count or 0,
|
||||||
|
flashlight_difficulty=diff.flashlight or 0,
|
||||||
|
aim_top_weighted_slider_factor=0,
|
||||||
|
speed_top_weighted_slider_factor=0,
|
||||||
|
nested_score_per_object=0,
|
||||||
|
legacy_score_base_multiplier=0,
|
||||||
|
maximum_legacy_combo_score=0,
|
||||||
|
)
|
||||||
|
elif attr_class is TaikoDifficultyAttributes:
|
||||||
|
return TaikoDifficultyAttributes(
|
||||||
|
star_rating=diff.stars,
|
||||||
|
max_combo=diff.max_combo,
|
||||||
|
rhythm_difficulty=diff.rhythm or 0,
|
||||||
|
mono_stamina_factor=diff.stamina or 0,
|
||||||
|
consistency_factor=0,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return DifficultyAttributes(
|
||||||
|
star_rating=diff.stars,
|
||||||
|
max_combo=diff.max_combo,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def calculate_difficulty(
|
||||||
|
self, beatmap_raw: str, mods: list[APIMod] | None = None, gamemode: GameMode | None = None
|
||||||
|
) -> DifficultyAttributes:
|
||||||
|
try:
|
||||||
|
map = rosu.Beatmap(content=beatmap_raw)
|
||||||
|
if gamemode is not None:
|
||||||
|
map.convert(self._to_rosu_mode(gamemode), mods) # pyright: ignore[reportArgumentType]
|
||||||
|
diff_calculator = rosu.Difficulty(mods=mods)
|
||||||
|
diff = await get_event_loop().run_in_executor(None, diff_calculator.calculate, map)
|
||||||
|
return self._diff_attr_to_model(
|
||||||
|
diff, gamemode.to_base_ruleset() if gamemode else self._from_rosu_mode(diff.mode)
|
||||||
|
)
|
||||||
|
except rosu.ConvertError as e: # pyright: ignore[reportAttributeAccessIssue]
|
||||||
|
raise ConvertError(f"Beatmap convert error: {e}")
|
||||||
|
except rosu.ParseError as e: # pyright: ignore[reportAttributeAccessIssue]
|
||||||
|
raise DifficultyError(f"Beatmap parse error: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
raise CalculateError(f"Unknown error: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
|
PerformanceCalculator = RosuPerformanceCalculator
|
||||||
804
app/config.py
804
app/config.py
@@ -1,34 +1,16 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Annotated, Any
|
from typing import Annotated, Any, Literal
|
||||||
|
|
||||||
|
from app.models.scoring_mode import ScoringMode
|
||||||
|
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
AliasChoices,
|
AliasChoices,
|
||||||
BeforeValidator,
|
|
||||||
Field,
|
Field,
|
||||||
HttpUrl,
|
HttpUrl,
|
||||||
ValidationInfo,
|
ValidationInfo,
|
||||||
field_validator,
|
field_validator,
|
||||||
)
|
)
|
||||||
from pydantic_settings import BaseSettings, NoDecode, SettingsConfigDict
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
def _parse_list(v):
|
|
||||||
if v is None or v == "" or str(v).strip() in ("[]", "{}"):
|
|
||||||
return []
|
|
||||||
if isinstance(v, list):
|
|
||||||
return v
|
|
||||||
s = str(v).strip()
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
|
|
||||||
parsed = json.loads(s)
|
|
||||||
if isinstance(parsed, list):
|
|
||||||
return parsed
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return [x.strip() for x in s.split(",") if x.strip()]
|
|
||||||
|
|
||||||
|
|
||||||
class AWSS3StorageSettings(BaseSettings):
|
class AWSS3StorageSettings(BaseSettings):
|
||||||
@@ -57,40 +39,248 @@ class StorageServiceType(str, Enum):
|
|||||||
AWS_S3 = "s3"
|
AWS_S3 = "s3"
|
||||||
|
|
||||||
|
|
||||||
|
class OldScoreProcessingMode(str, Enum):
|
||||||
|
STRICT = "strict"
|
||||||
|
NORMAL = "normal"
|
||||||
|
|
||||||
|
|
||||||
|
SPECTATOR_DOC = """
|
||||||
|
## 旁观服务器设置
|
||||||
|
| 变量名 | 描述 | 类型 | 默认值 |
|
||||||
|
|--------|------|--------|--------|
|
||||||
|
| `SAVE_REPLAYS` | 是否保存回放,设置为 `1` 为启用 | boolean | `0` |
|
||||||
|
| `REDIS_HOST` | Redis 服务器地址 | string | `localhost` |
|
||||||
|
| `SHARED_INTEROP_DOMAIN` | API 服务器(即本服务)地址 | string (url) | `http://localhost:8000` |
|
||||||
|
| `SERVER_PORT` | 旁观服务器端口 | integer | `8006` |
|
||||||
|
| `SP_SENTRY_DSN` | 旁观服务器的 Sentry DSN | string | `null` |
|
||||||
|
| `MATCHMAKING_ROOM_ROUNDS` | 匹配对战房间的回合数 | integer | 5 |
|
||||||
|
| `MATCHMAKING_ALLOW_SKIP` | 是否允许用户跳过匹配阶段 | boolean | false |
|
||||||
|
| `MATCHMAKING_LOBBY_UPDATE_RATE` | 更新匹配大厅的频率(以秒为单位) | integer | 5 |
|
||||||
|
| `MATCHMAKING_QUEUE_UPDATE_RATE` | 更新匹配队列的频率(以秒为单位) | integer | 1 |
|
||||||
|
| `MATCHMAKING_QUEUE_BAN_DURATION` | 玩家拒绝邀请后暂时禁止进入匹配队列的时间(以秒为单位) | integer | 60 |
|
||||||
|
| `MATCHMAKING_POOL_SIZE` | 每个匹配房间的谱面数量 | integer | 50 |
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8")
|
model_config = SettingsConfigDict(
|
||||||
|
env_file=".env",
|
||||||
|
env_file_encoding="utf-8",
|
||||||
|
extra="allow",
|
||||||
|
json_schema_extra={
|
||||||
|
"paragraphs_desc": {
|
||||||
|
"Fetcher 设置": "Fetcher 用于从 osu! 官方 API 获取数据,使用 osu! 官方 API 的 OAuth 2.0 认证",
|
||||||
|
"监控设置": (
|
||||||
|
"配置应用的监控选项,如 Sentry 和 New Relic。\n\n"
|
||||||
|
"将 newrelic.ini 配置文件放入项目根目录即可自动启用 New Relic 监控。"
|
||||||
|
"如果配置文件不存在或 newrelic 包未安装,将跳过 New Relic 初始化。"
|
||||||
|
),
|
||||||
|
"存储服务设置": """用于存储回放文件、头像等静态资源。
|
||||||
|
|
||||||
|
### 本地存储 (推荐用于开发环境)
|
||||||
|
|
||||||
|
本地存储将文件保存在服务器的本地文件系统中,适合开发和小规模部署。
|
||||||
|
|
||||||
|
```bash
|
||||||
|
STORAGE_SERVICE="local"
|
||||||
|
STORAGE_SETTINGS='{"local_storage_path": "./storage"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cloudflare R2 存储 (推荐用于生产环境)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
STORAGE_SERVICE="r2"
|
||||||
|
STORAGE_SETTINGS='{
|
||||||
|
"r2_account_id": "your_cloudflare_account_id",
|
||||||
|
"r2_access_key_id": "your_r2_access_key_id",
|
||||||
|
"r2_secret_access_key": "your_r2_secret_access_key",
|
||||||
|
"r2_bucket_name": "your_bucket_name",
|
||||||
|
"r2_public_url_base": "https://your-custom-domain.com"
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### AWS S3 存储
|
||||||
|
|
||||||
|
```bash
|
||||||
|
STORAGE_SERVICE="s3"
|
||||||
|
STORAGE_SETTINGS='{
|
||||||
|
"s3_access_key_id": "your_aws_access_key_id",
|
||||||
|
"s3_secret_access_key": "your_aws_secret_access_key",
|
||||||
|
"s3_bucket_name": "your_s3_bucket_name",
|
||||||
|
"s3_region_name": "us-east-1",
|
||||||
|
"s3_public_url_base": "https://your-custom-domain.com"
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
""",
|
||||||
|
"表现计算设置": """配置表现分计算器及其参数。
|
||||||
|
|
||||||
|
### [osu-performance-server](https://github.com/GooGuTeam/osu-performance-server) (默认)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CALCULATOR="performance_server"
|
||||||
|
CALCULATOR_CONFIG='{
|
||||||
|
"server_url": "http://localhost:5225"
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### rosu-pp-py
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CALCULATOR="rosu"
|
||||||
|
CALCULATOR_CONFIG='{}'
|
||||||
|
```
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# 数据库设置
|
# 数据库设置
|
||||||
mysql_host: str = "localhost"
|
mysql_host: Annotated[
|
||||||
mysql_port: int = 3306
|
str,
|
||||||
mysql_database: str = "osu_api"
|
Field(default="localhost", description="MySQL 服务器地址"),
|
||||||
mysql_user: str = "osu_api"
|
"数据库设置",
|
||||||
mysql_password: str = "password"
|
]
|
||||||
mysql_root_password: str = "password"
|
mysql_port: Annotated[
|
||||||
redis_url: str = "redis://127.0.0.1:6379/0"
|
int,
|
||||||
|
Field(default=3306, description="MySQL 服务器端口"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
mysql_database: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="osu_api", description="MySQL 数据库名称"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
mysql_user: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="osu_api", description="MySQL 用户名"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
mysql_password: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="password", description="MySQL 密码"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
mysql_root_password: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="password", description="MySQL root 密码"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
redis_url: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="redis://127.0.0.1:6379", description="Redis 连接 URL"),
|
||||||
|
"数据库设置",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def database_url(self) -> str:
|
def database_url(self) -> str:
|
||||||
return f"mysql+aiomysql://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}:{self.mysql_port}/{self.mysql_database}"
|
return f"mysql+aiomysql://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}:{self.mysql_port}/{self.mysql_database}"
|
||||||
|
|
||||||
# JWT 设置
|
# JWT 设置
|
||||||
secret_key: str = Field(default="your_jwt_secret_here", alias="jwt_secret_key")
|
secret_key: Annotated[
|
||||||
algorithm: str = "HS256"
|
str,
|
||||||
access_token_expire_minutes: int = 1440
|
Field(
|
||||||
|
default="your_jwt_secret_here",
|
||||||
|
alias="jwt_secret_key",
|
||||||
|
description="JWT 签名密钥",
|
||||||
|
),
|
||||||
|
"JWT 设置",
|
||||||
|
]
|
||||||
|
algorithm: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="HS256", alias="jwt_algorithm", description="JWT 算法"),
|
||||||
|
"JWT 设置",
|
||||||
|
]
|
||||||
|
access_token_expire_minutes: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=1440, description="访问令牌过期时间(分钟)"),
|
||||||
|
"JWT 设置",
|
||||||
|
]
|
||||||
|
refresh_token_expire_minutes: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=21600, description="刷新令牌过期时间(分钟)"),
|
||||||
|
"JWT 设置",
|
||||||
|
] # 15 days
|
||||||
|
jwt_audience: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="5", description="JWT 受众"),
|
||||||
|
"JWT 设置",
|
||||||
|
]
|
||||||
|
jwt_issuer: Annotated[
|
||||||
|
str | None,
|
||||||
|
Field(default=None, description="JWT 签发者"),
|
||||||
|
"JWT 设置",
|
||||||
|
]
|
||||||
|
|
||||||
# OAuth 设置
|
# OAuth 设置
|
||||||
osu_client_id: int = 5
|
osu_client_id: Annotated[
|
||||||
osu_client_secret: str = "FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk"
|
int,
|
||||||
osu_web_client_id: int = 6
|
Field(default=5, description="OAuth 客户端 ID"),
|
||||||
osu_web_client_secret: str = "your_osu_web_client_secret_here"
|
"OAuth 设置",
|
||||||
|
]
|
||||||
|
osu_client_secret: Annotated[
|
||||||
|
str,
|
||||||
|
Field(
|
||||||
|
default="FGc9GAtyHzeQDshWP5Ah7dega8hJACAJpQtw6OXk",
|
||||||
|
description="OAuth 客户端密钥",
|
||||||
|
),
|
||||||
|
"OAuth 设置",
|
||||||
|
]
|
||||||
|
osu_web_client_id: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=6, description="Web OAuth 客户端 ID"),
|
||||||
|
"OAuth 设置",
|
||||||
|
]
|
||||||
|
osu_web_client_secret: Annotated[
|
||||||
|
str,
|
||||||
|
Field(
|
||||||
|
default="your_osu_web_client_secret_here",
|
||||||
|
description="Web OAuth 客户端密钥",
|
||||||
|
),
|
||||||
|
"OAuth 设置",
|
||||||
|
]
|
||||||
|
|
||||||
# 服务器设置
|
# 服务器设置
|
||||||
host: str = "0.0.0.0"
|
host: Annotated[
|
||||||
port: int = 8000
|
str,
|
||||||
debug: bool = False
|
Field(default="0.0.0.0", description="服务器监听地址"), # noqa: S104
|
||||||
cors_urls: list[HttpUrl] = []
|
"服务器设置",
|
||||||
server_url: HttpUrl = HttpUrl("http://localhost:8000")
|
]
|
||||||
frontend_url: HttpUrl | None = None
|
port: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=8000, description="服务器监听端口"),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
debug: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="是否启用调试模式"),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
cors_urls: Annotated[
|
||||||
|
list[HttpUrl],
|
||||||
|
Field(default=[], description="额外的 CORS 允许的域名列表 (JSON 格式)"),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
server_url: Annotated[
|
||||||
|
HttpUrl,
|
||||||
|
Field(
|
||||||
|
default=HttpUrl("http://localhost:8000"),
|
||||||
|
description="服务器 URL",
|
||||||
|
),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
frontend_url: Annotated[
|
||||||
|
HttpUrl | None,
|
||||||
|
Field(
|
||||||
|
default=None,
|
||||||
|
description="前端 URL,当访问从游戏打开的 URL 时会重定向到这个 URL,为空表示不重定向",
|
||||||
|
),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
enable_rate_limit: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="是否启用速率限制"),
|
||||||
|
"服务器设置",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def web_url(self):
|
def web_url(self):
|
||||||
@@ -101,129 +291,463 @@ class Settings(BaseSettings):
|
|||||||
else:
|
else:
|
||||||
return "/"
|
return "/"
|
||||||
|
|
||||||
# SignalR 设置
|
|
||||||
signalr_negotiate_timeout: int = 30
|
|
||||||
signalr_ping_interval: int = 15
|
|
||||||
|
|
||||||
# Fetcher 设置
|
# Fetcher 设置
|
||||||
fetcher_client_id: str = ""
|
fetcher_client_id: Annotated[
|
||||||
fetcher_client_secret: str = ""
|
str,
|
||||||
fetcher_scopes: Annotated[list[str], NoDecode] = ["public"]
|
Field(default="", description="Fetcher 客户端 ID"),
|
||||||
|
"Fetcher 设置",
|
||||||
|
]
|
||||||
|
fetcher_client_secret: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="Fetcher 客户端密钥"),
|
||||||
|
"Fetcher 设置",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
# NOTE: Reserve for user-based-fetcher
|
||||||
def fetcher_callback_url(self) -> str:
|
|
||||||
return f"{self.server_url}fetcher/callback"
|
# fetcher_scopes: Annotated[
|
||||||
|
# list[str],
|
||||||
|
# Field(default=["public"], description="Fetcher 权限范围,以逗号分隔每个权限"),
|
||||||
|
# "Fetcher 设置",
|
||||||
|
# NoDecode,
|
||||||
|
# ]
|
||||||
|
|
||||||
|
# @field_validator("fetcher_scopes", mode="before")
|
||||||
|
# @classmethod
|
||||||
|
# def validate_fetcher_scopes(cls, v: Any) -> list[str]:
|
||||||
|
# if isinstance(v, str):
|
||||||
|
# return v.split(",")
|
||||||
|
# return v
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def fetcher_callback_url(self) -> str:
|
||||||
|
# return f"{self.server_url}fetcher/callback"
|
||||||
|
|
||||||
# 日志设置
|
# 日志设置
|
||||||
log_level: str = "INFO"
|
log_level: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="INFO", description="日志级别"),
|
||||||
|
"日志设置",
|
||||||
|
]
|
||||||
|
|
||||||
# 邮件服务设置
|
# 验证服务设置
|
||||||
smtp_server: str = "localhost"
|
enable_totp_verification: Annotated[bool, Field(default=True, description="是否启用TOTP双因素验证"), "验证服务设置"]
|
||||||
smtp_port: int = 587
|
totp_issuer: Annotated[
|
||||||
smtp_username: str = ""
|
str | None,
|
||||||
smtp_password: str = ""
|
Field(default=None, description="TOTP 认证器中的发行者名称"),
|
||||||
from_email: str = "noreply@example.com"
|
"验证服务设置",
|
||||||
from_name: str = "osu! server"
|
]
|
||||||
|
totp_service_name: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="g0v0! Lazer Server", description="TOTP 认证器中显示的服务名称"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
totp_use_username_in_label: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="在TOTP标签中使用用户名而不是邮箱"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
enable_turnstile_verification: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="是否启用 Cloudflare Turnstile 验证(仅对非 osu! 客户端)"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
turnstile_secret_key: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="Cloudflare Turnstile Secret Key"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
turnstile_dev_mode: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="Turnstile 开发模式(跳过验证,用于本地开发)"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
enable_email_verification: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="是否启用邮件验证功能"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
enable_session_verification: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="是否启用会话验证中间件"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
enable_multi_device_login: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="是否允许多设备同时登录"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
max_tokens_per_client: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=10, description="每个用户每个客户端的最大令牌数量"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
device_trust_duration_days: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=30, description="设备信任持续天数"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
email_provider: Annotated[
|
||||||
|
Literal["smtp", "mailersend"],
|
||||||
|
Field(default="smtp", description="邮件发送提供商:smtp(SMTP)或 mailersend(MailerSend)"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
smtp_server: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="localhost", description="SMTP 服务器地址"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
smtp_port: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=587, description="SMTP 服务器端口"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
smtp_username: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="SMTP 用户名"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
smtp_password: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="SMTP 密码"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
from_email: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="noreply@example.com", description="发件人邮箱"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
from_name: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="osu! server", description="发件人名称"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
mailersend_api_key: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="MailerSend API Key"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
mailersend_from_email: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="", description="MailerSend 发件人邮箱(需要在 MailerSend 中验证)"),
|
||||||
|
"验证服务设置",
|
||||||
|
]
|
||||||
|
|
||||||
# 邮件验证功能开关
|
# 监控配置
|
||||||
enable_email_verification: bool = Field(
|
sentry_dsn: Annotated[
|
||||||
default=True, description="是否启用邮件验证功能"
|
HttpUrl | None,
|
||||||
)
|
Field(default=None, description="Sentry DSN,为空不启用 Sentry"),
|
||||||
enable_email_sending: bool = Field(
|
"监控设置",
|
||||||
default=False, description="是否真实发送邮件(False时仅模拟发送)"
|
]
|
||||||
)
|
new_relic_environment: Annotated[
|
||||||
|
str | None,
|
||||||
# Sentry 配置
|
Field(default=None, description='New Relic 环境标识,设置为 "production" 或 "development"'),
|
||||||
sentry_dsn: HttpUrl | None = None
|
"监控设置",
|
||||||
|
]
|
||||||
|
|
||||||
# GeoIP 配置
|
# GeoIP 配置
|
||||||
maxmind_license_key: str = ""
|
maxmind_license_key: Annotated[
|
||||||
geoip_dest_dir: str = "./geoip"
|
str,
|
||||||
geoip_update_day: int = 1 # 每周更新的星期几(0=周一,6=周日)
|
Field(default="", description="MaxMind License Key(用于下载离线IP库)"),
|
||||||
geoip_update_hour: int = 2 # 每周更新的小时数(0-23)
|
"GeoIP 配置",
|
||||||
|
]
|
||||||
|
geoip_dest_dir: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="./geoip", description="GeoIP 数据库存储目录"),
|
||||||
|
"GeoIP 配置",
|
||||||
|
]
|
||||||
|
geoip_update_day: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=1, description="GeoIP 每周更新的星期几(0=周一,6=周日)"),
|
||||||
|
"GeoIP 配置",
|
||||||
|
]
|
||||||
|
geoip_update_hour: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=2, description="GeoIP 每周更新时间(小时,0-23)"),
|
||||||
|
"GeoIP 配置",
|
||||||
|
]
|
||||||
|
|
||||||
# 游戏设置
|
# 游戏设置
|
||||||
enable_rx: bool = Field(
|
enable_rx: Annotated[
|
||||||
default=False, validation_alias=AliasChoices("enable_rx", "enable_osu_rx")
|
bool,
|
||||||
)
|
Field(
|
||||||
enable_ap: bool = Field(
|
default=False,
|
||||||
default=False, validation_alias=AliasChoices("enable_ap", "enable_osu_ap")
|
validation_alias=AliasChoices("enable_rx", "enable_osu_rx"),
|
||||||
)
|
description="启用 RX mod 统计数据",
|
||||||
enable_all_mods_pp: bool = False
|
),
|
||||||
enable_supporter_for_all_users: bool = False
|
"游戏设置",
|
||||||
enable_all_beatmap_leaderboard: bool = False
|
]
|
||||||
enable_all_beatmap_pp: bool = False
|
enable_ap: Annotated[
|
||||||
# 性能优化设置
|
bool,
|
||||||
enable_beatmap_preload: bool = True
|
Field(
|
||||||
beatmap_cache_expire_hours: int = 24
|
default=False,
|
||||||
max_concurrent_pp_calculations: int = 10
|
validation_alias=AliasChoices("enable_ap", "enable_osu_ap"),
|
||||||
enable_pp_calculation_threading: bool = True
|
description="启用 AP mod 统计数据",
|
||||||
|
),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
enable_supporter_for_all_users: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="启用所有新注册用户的支持者状态"),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
enable_all_beatmap_leaderboard: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="启用所有谱面的排行榜"),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
enable_all_beatmap_pp: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="允许任何谱面获得 PP"),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
seasonal_backgrounds: Annotated[
|
||||||
|
list[str],
|
||||||
|
Field(default=[], description="季节背景图 URL 列表"),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
beatmap_tag_top_count: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=2, description="显示在结算列表的标签所需的最低票数"),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
old_score_processing_mode: Annotated[
|
||||||
|
OldScoreProcessingMode,
|
||||||
|
Field(
|
||||||
|
default=OldScoreProcessingMode.NORMAL,
|
||||||
|
description=(
|
||||||
|
"旧成绩处理模式<br/>strict: 删除所有相关的成绩、pp、统计信息、回放<br/>normal: 删除 pp 和排行榜成绩"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
scoring_mode: Annotated[
|
||||||
|
ScoringMode,
|
||||||
|
Field(
|
||||||
|
default=ScoringMode.STANDARDISED,
|
||||||
|
description="分数计算模式:standardised(标准化)或 classic(经典)",
|
||||||
|
),
|
||||||
|
"游戏设置",
|
||||||
|
]
|
||||||
|
|
||||||
|
# 表现计算设置
|
||||||
|
calculator: Annotated[
|
||||||
|
Literal["rosu", "performance_server"],
|
||||||
|
Field(default="performance_server", description="表现分计算器"),
|
||||||
|
"表现计算设置",
|
||||||
|
]
|
||||||
|
calculator_config: Annotated[
|
||||||
|
dict[str, Any],
|
||||||
|
Field(
|
||||||
|
default={"server_url": "http://localhost:5225"},
|
||||||
|
description="表现分计算器配置 (JSON 格式),具体配置项请参考上方",
|
||||||
|
),
|
||||||
|
"表现计算设置",
|
||||||
|
]
|
||||||
|
fallback_no_calculator_pp: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="当计算器不支持某个模式时,使用简化的 pp 计算方法作为后备"),
|
||||||
|
"表现计算设置",
|
||||||
|
]
|
||||||
|
|
||||||
|
# 谱面缓存设置
|
||||||
|
enable_beatmap_preload: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="启用谱面缓存预加载"),
|
||||||
|
"缓存设置",
|
||||||
|
"谱面缓存",
|
||||||
|
]
|
||||||
|
beatmap_cache_expire_hours: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=24, description="谱面缓存过期时间(小时)"),
|
||||||
|
"缓存设置",
|
||||||
|
"谱面缓存",
|
||||||
|
]
|
||||||
|
beatmapset_cache_expire_seconds: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=3600, description="Beatmapset 缓存过期时间(秒)"),
|
||||||
|
"缓存设置",
|
||||||
|
"谱面缓存",
|
||||||
|
]
|
||||||
|
|
||||||
# 排行榜缓存设置
|
# 排行榜缓存设置
|
||||||
enable_ranking_cache: bool = True
|
enable_ranking_cache: Annotated[
|
||||||
ranking_cache_expire_minutes: int = 10 # 排行榜缓存过期时间(分钟)
|
bool,
|
||||||
ranking_cache_refresh_interval_minutes: int = 10 # 排行榜缓存刷新间隔(分钟)
|
Field(default=True, description="启用排行榜缓存"),
|
||||||
ranking_cache_max_pages: int = 20 # 最多缓存的页数
|
"缓存设置",
|
||||||
ranking_cache_top_countries: int = 20 # 缓存前N个国家的排行榜
|
"排行榜缓存",
|
||||||
|
]
|
||||||
|
ranking_cache_expire_minutes: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=10, description="排行榜缓存过期时间(分钟)"),
|
||||||
|
"缓存设置",
|
||||||
|
"排行榜缓存",
|
||||||
|
]
|
||||||
|
ranking_cache_refresh_interval_minutes: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=10, description="排行榜缓存刷新间隔(分钟)"),
|
||||||
|
"缓存设置",
|
||||||
|
"排行榜缓存",
|
||||||
|
]
|
||||||
|
ranking_cache_max_pages: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=20, description="最多缓存的页数"),
|
||||||
|
"缓存设置",
|
||||||
|
"排行榜缓存",
|
||||||
|
]
|
||||||
|
ranking_cache_top_countries: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=20, description="缓存前N个国家的排行榜"),
|
||||||
|
"缓存设置",
|
||||||
|
"排行榜缓存",
|
||||||
|
]
|
||||||
|
|
||||||
# 用户缓存设置
|
# 用户缓存设置
|
||||||
enable_user_cache_preload: bool = True # 启用用户缓存预加载
|
enable_user_cache_preload: Annotated[
|
||||||
user_cache_expire_seconds: int = 300 # 用户信息缓存过期时间(秒)
|
bool,
|
||||||
user_scores_cache_expire_seconds: int = 60 # 用户成绩缓存过期时间(秒)
|
Field(default=True, description="启用用户缓存预加载"),
|
||||||
user_beatmapsets_cache_expire_seconds: int = 600 # 用户谱面集缓存过期时间(秒)
|
"缓存设置",
|
||||||
user_cache_max_preload_users: int = 200 # 最多预加载的用户数量
|
"用户缓存",
|
||||||
user_cache_concurrent_limit: int = 10 # 并发缓存用户的限制
|
]
|
||||||
|
user_cache_expire_seconds: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=300, description="用户信息缓存过期时间(秒)"),
|
||||||
|
"缓存设置",
|
||||||
|
"用户缓存",
|
||||||
|
]
|
||||||
|
user_scores_cache_expire_seconds: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=60, description="用户成绩缓存过期时间(秒)"),
|
||||||
|
"缓存设置",
|
||||||
|
"用户缓存",
|
||||||
|
]
|
||||||
|
user_beatmapsets_cache_expire_seconds: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=600, description="用户谱面集缓存过期时间(秒)"),
|
||||||
|
"缓存设置",
|
||||||
|
"用户缓存",
|
||||||
|
]
|
||||||
|
user_cache_max_preload_users: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=200, description="最多预加载的用户数量"),
|
||||||
|
"缓存设置",
|
||||||
|
"用户缓存",
|
||||||
|
]
|
||||||
|
|
||||||
|
# 资源代理设置
|
||||||
|
enable_asset_proxy: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="启用资源代理"),
|
||||||
|
"资源代理设置",
|
||||||
|
]
|
||||||
|
custom_asset_domain: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="g0v0.top", description="自定义资源域名"),
|
||||||
|
"资源代理设置",
|
||||||
|
]
|
||||||
|
asset_proxy_prefix: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="assets-ppy", description="assets.ppy.sh 的自定义前缀"),
|
||||||
|
"资源代理设置",
|
||||||
|
]
|
||||||
|
avatar_proxy_prefix: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="a-ppy", description="a.ppy.sh 的自定义前缀"),
|
||||||
|
"资源代理设置",
|
||||||
|
]
|
||||||
|
beatmap_proxy_prefix: Annotated[
|
||||||
|
str,
|
||||||
|
Field(default="b-ppy", description="b.ppy.sh 的自定义前缀"),
|
||||||
|
"资源代理设置",
|
||||||
|
]
|
||||||
|
|
||||||
|
# 谱面同步设置
|
||||||
|
enable_auto_beatmap_sync: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="启用自动谱面同步"),
|
||||||
|
"谱面同步设置",
|
||||||
|
]
|
||||||
|
beatmap_sync_interval_minutes: Annotated[
|
||||||
|
int,
|
||||||
|
Field(default=60, description="自动谱面同步间隔(分钟)"),
|
||||||
|
"谱面同步设置",
|
||||||
|
]
|
||||||
|
|
||||||
# 反作弊设置
|
# 反作弊设置
|
||||||
suspicious_score_check: bool = True
|
suspicious_score_check: Annotated[
|
||||||
seasonal_backgrounds: Annotated[list[str], BeforeValidator(_parse_list)] = []
|
bool,
|
||||||
banned_name: list[str] = [
|
Field(default=True, description="启用可疑分数检查(pp>3000)"),
|
||||||
"mrekk",
|
"反作弊设置",
|
||||||
"vaxei",
|
]
|
||||||
"btmc",
|
banned_name: Annotated[
|
||||||
"cookiezi",
|
list[str],
|
||||||
"peppy",
|
Field(
|
||||||
"saragi",
|
default=[
|
||||||
"chocomint",
|
"mrekk",
|
||||||
|
"vaxei",
|
||||||
|
"btmc",
|
||||||
|
"cookiezi",
|
||||||
|
"peppy",
|
||||||
|
"saragi",
|
||||||
|
"chocomint",
|
||||||
|
],
|
||||||
|
description="禁止使用的用户名列表",
|
||||||
|
),
|
||||||
|
"反作弊设置",
|
||||||
|
]
|
||||||
|
allow_delete_scores: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=False, description="允许用户删除自己的成绩"),
|
||||||
|
"反作弊设置",
|
||||||
|
]
|
||||||
|
check_ruleset_version: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="检查自定义 ruleset 版本"),
|
||||||
|
"反作弊设置",
|
||||||
|
]
|
||||||
|
check_client_version: Annotated[
|
||||||
|
bool,
|
||||||
|
Field(default=True, description="检查客户端版本"),
|
||||||
|
"反作弊设置",
|
||||||
|
]
|
||||||
|
client_version_urls: Annotated[
|
||||||
|
list[str],
|
||||||
|
Field(
|
||||||
|
default=["https://raw.githubusercontent.com/GooGuTeam/g0v0-client-versions/main/version_list.json"],
|
||||||
|
description=(
|
||||||
|
"客户端版本列表 URL, 查看 https://github.com/GooGuTeam/g0v0-client-versions 来添加你自己的客户端"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
"反作弊设置",
|
||||||
]
|
]
|
||||||
|
|
||||||
# 存储设置
|
# 存储设置
|
||||||
storage_service: StorageServiceType = StorageServiceType.LOCAL
|
storage_service: Annotated[
|
||||||
storage_settings: (
|
StorageServiceType,
|
||||||
LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings
|
Field(default=StorageServiceType.LOCAL, description="存储服务类型:local、r2、s3"),
|
||||||
) = LocalStorageSettings()
|
"存储服务设置",
|
||||||
|
]
|
||||||
@field_validator("fetcher_scopes", mode="before")
|
storage_settings: Annotated[
|
||||||
def validate_fetcher_scopes(cls, v: Any) -> list[str]:
|
LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings,
|
||||||
if isinstance(v, str):
|
Field(default=LocalStorageSettings(), description="存储服务配置 (JSON 格式)"),
|
||||||
return v.split(",")
|
"存储服务设置",
|
||||||
return v
|
]
|
||||||
|
|
||||||
@field_validator("storage_settings", mode="after")
|
@field_validator("storage_settings", mode="after")
|
||||||
|
@classmethod
|
||||||
def validate_storage_settings(
|
def validate_storage_settings(
|
||||||
cls,
|
cls,
|
||||||
v: LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings,
|
v: LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings,
|
||||||
info: ValidationInfo,
|
info: ValidationInfo,
|
||||||
) -> LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings:
|
) -> LocalStorageSettings | CloudflareR2Settings | AWSS3StorageSettings:
|
||||||
if info.data.get("storage_service") == StorageServiceType.CLOUDFLARE_R2:
|
service = info.data.get("storage_service")
|
||||||
if not isinstance(v, CloudflareR2Settings):
|
if service == StorageServiceType.CLOUDFLARE_R2 and not isinstance(v, CloudflareR2Settings):
|
||||||
raise ValueError(
|
raise ValueError("When storage_service is 'r2', storage_settings must be CloudflareR2Settings")
|
||||||
"When storage_service is 'r2', "
|
if service == StorageServiceType.LOCAL and not isinstance(v, LocalStorageSettings):
|
||||||
"storage_settings must be CloudflareR2Settings"
|
raise ValueError("When storage_service is 'local', storage_settings must be LocalStorageSettings")
|
||||||
)
|
if service == StorageServiceType.AWS_S3 and not isinstance(v, AWSS3StorageSettings):
|
||||||
elif info.data.get("storage_service") == StorageServiceType.LOCAL:
|
raise ValueError("When storage_service is 's3', storage_settings must be AWSS3StorageSettings")
|
||||||
if not isinstance(v, LocalStorageSettings):
|
|
||||||
raise ValueError(
|
|
||||||
"When storage_service is 'local', "
|
|
||||||
"storage_settings must be LocalStorageSettings"
|
|
||||||
)
|
|
||||||
elif info.data.get("storage_service") == StorageServiceType.AWS_S3:
|
|
||||||
if not isinstance(v, AWSS3StorageSettings):
|
|
||||||
raise ValueError(
|
|
||||||
"When storage_service is 's3', "
|
|
||||||
"storage_settings must be AWSS3StorageSettings"
|
|
||||||
)
|
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
|
||||||
settings = Settings()
|
settings = Settings() # pyright: ignore[reportCallIssue]
|
||||||
|
|||||||
10
app/const.py
10
app/const.py
@@ -1,3 +1,9 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
BANCHOBOT_ID = 2
|
BANCHOBOT_ID = 2
|
||||||
|
|
||||||
|
BACKUP_CODE_LENGTH = 10
|
||||||
|
|
||||||
|
SUPPORT_TOTP_VERIFICATION_VER = 20250913
|
||||||
|
|
||||||
|
# Maximum score in standardised scoring mode
|
||||||
|
# https://github.com/ppy/osu/blob/master/osu.Game/Rulesets/Scoring/ScoreProcessor.cs
|
||||||
|
MAX_SCORE = 1000000
|
||||||
|
|||||||
@@ -1,21 +1,32 @@
|
|||||||
from .achievement import UserAchievement, UserAchievementResp
|
from .achievement import UserAchievement, UserAchievementResp
|
||||||
from .auth import OAuthClient, OAuthToken, V1APIKeys
|
from .auth import OAuthClient, OAuthToken, TotpKeys, V1APIKeys
|
||||||
from .beatmap import (
|
from .beatmap import (
|
||||||
Beatmap,
|
Beatmap,
|
||||||
BeatmapResp,
|
BeatmapDict,
|
||||||
|
BeatmapModel,
|
||||||
)
|
)
|
||||||
from .beatmap_playcounts import BeatmapPlaycounts, BeatmapPlaycountsResp
|
from .beatmap_playcounts import (
|
||||||
|
BeatmapPlaycounts,
|
||||||
|
BeatmapPlaycountsDict,
|
||||||
|
BeatmapPlaycountsModel,
|
||||||
|
)
|
||||||
|
from .beatmap_sync import BeatmapSync
|
||||||
|
from .beatmap_tags import BeatmapTagVote
|
||||||
from .beatmapset import (
|
from .beatmapset import (
|
||||||
Beatmapset,
|
Beatmapset,
|
||||||
BeatmapsetResp,
|
BeatmapsetDict,
|
||||||
|
BeatmapsetModel,
|
||||||
)
|
)
|
||||||
from .best_score import BestScore
|
from .beatmapset_ratings import BeatmapRating
|
||||||
|
from .best_scores import BestScore
|
||||||
from .chat import (
|
from .chat import (
|
||||||
ChannelType,
|
ChannelType,
|
||||||
ChatChannel,
|
ChatChannel,
|
||||||
ChatChannelResp,
|
ChatChannelDict,
|
||||||
|
ChatChannelModel,
|
||||||
ChatMessage,
|
ChatMessage,
|
||||||
ChatMessageResp,
|
ChatMessageDict,
|
||||||
|
ChatMessageModel,
|
||||||
)
|
)
|
||||||
from .counts import (
|
from .counts import (
|
||||||
CountResp,
|
CountResp,
|
||||||
@@ -23,70 +34,94 @@ from .counts import (
|
|||||||
ReplayWatchedCount,
|
ReplayWatchedCount,
|
||||||
)
|
)
|
||||||
from .daily_challenge import DailyChallengeStats, DailyChallengeStatsResp
|
from .daily_challenge import DailyChallengeStats, DailyChallengeStatsResp
|
||||||
from .email_verification import EmailVerification, LoginSession
|
from .events import Event
|
||||||
from .favourite_beatmapset import FavouriteBeatmapset
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
from .lazer_user import (
|
from .item_attempts_count import (
|
||||||
User,
|
ItemAttemptsCount,
|
||||||
UserResp,
|
ItemAttemptsCountDict,
|
||||||
|
ItemAttemptsCountModel,
|
||||||
|
)
|
||||||
|
from .matchmaking import (
|
||||||
|
MatchmakingPool,
|
||||||
|
MatchmakingPoolBeatmap,
|
||||||
|
MatchmakingUserStats,
|
||||||
)
|
)
|
||||||
from .multiplayer_event import MultiplayerEvent, MultiplayerEventResp
|
from .multiplayer_event import MultiplayerEvent, MultiplayerEventResp
|
||||||
from .notification import Notification, UserNotification
|
from .notification import Notification, UserNotification
|
||||||
from .password_reset import PasswordReset
|
from .password_reset import PasswordReset
|
||||||
from .playlist_attempts import (
|
|
||||||
ItemAttemptsCount,
|
|
||||||
ItemAttemptsResp,
|
|
||||||
PlaylistAggregateScore,
|
|
||||||
)
|
|
||||||
from .playlist_best_score import PlaylistBestScore
|
from .playlist_best_score import PlaylistBestScore
|
||||||
from .playlists import Playlist, PlaylistResp
|
from .playlists import Playlist, PlaylistDict, PlaylistModel
|
||||||
from .pp_best_score import PPBestScore
|
|
||||||
from .rank_history import RankHistory, RankHistoryResp, RankTop
|
from .rank_history import RankHistory, RankHistoryResp, RankTop
|
||||||
from .relationship import Relationship, RelationshipResp, RelationshipType
|
from .relationship import Relationship, RelationshipDict, RelationshipModel, RelationshipType
|
||||||
from .room import APIUploadedRoom, Room, RoomResp
|
from .room import APIUploadedRoom, Room, RoomDict, RoomModel
|
||||||
from .room_participated_user import RoomParticipatedUser
|
from .room_participated_user import RoomParticipatedUser
|
||||||
from .score import (
|
from .score import (
|
||||||
MultiplayerScores,
|
MultiplayerScores,
|
||||||
Score,
|
Score,
|
||||||
ScoreAround,
|
ScoreAround,
|
||||||
ScoreBase,
|
ScoreDict,
|
||||||
ScoreResp,
|
ScoreModel,
|
||||||
ScoreStatistics,
|
ScoreStatistics,
|
||||||
)
|
)
|
||||||
from .score_token import ScoreToken, ScoreTokenResp
|
from .score_token import ScoreToken, ScoreTokenResp
|
||||||
|
from .search_beatmapset import SearchBeatmapsetsResp
|
||||||
from .statistics import (
|
from .statistics import (
|
||||||
UserStatistics,
|
UserStatistics,
|
||||||
UserStatisticsResp,
|
UserStatisticsDict,
|
||||||
|
UserStatisticsModel,
|
||||||
|
)
|
||||||
|
from .team import Team, TeamMember, TeamRequest, TeamResp
|
||||||
|
from .total_score_best_scores import TotalScoreBestScore
|
||||||
|
from .user import (
|
||||||
|
User,
|
||||||
|
UserDict,
|
||||||
|
UserModel,
|
||||||
)
|
)
|
||||||
from .team import Team, TeamMember, TeamRequest
|
|
||||||
from .user_account_history import (
|
from .user_account_history import (
|
||||||
UserAccountHistory,
|
UserAccountHistory,
|
||||||
UserAccountHistoryResp,
|
UserAccountHistoryResp,
|
||||||
UserAccountHistoryType,
|
UserAccountHistoryType,
|
||||||
)
|
)
|
||||||
from .user_login_log import UserLoginLog
|
from .user_login_log import UserLoginLog
|
||||||
|
from .user_preference import UserPreference
|
||||||
|
from .verification import EmailVerification, LoginSession, LoginSessionResp, TrustedDevice, TrustedDeviceResp
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"APIUploadedRoom",
|
"APIUploadedRoom",
|
||||||
"Beatmap",
|
"Beatmap",
|
||||||
|
"BeatmapDict",
|
||||||
|
"BeatmapModel",
|
||||||
"BeatmapPlaycounts",
|
"BeatmapPlaycounts",
|
||||||
"BeatmapPlaycountsResp",
|
"BeatmapPlaycountsDict",
|
||||||
"BeatmapResp",
|
"BeatmapPlaycountsModel",
|
||||||
|
"BeatmapRating",
|
||||||
|
"BeatmapSync",
|
||||||
|
"BeatmapTagVote",
|
||||||
"Beatmapset",
|
"Beatmapset",
|
||||||
"BeatmapsetResp",
|
"BeatmapsetDict",
|
||||||
|
"BeatmapsetModel",
|
||||||
"BestScore",
|
"BestScore",
|
||||||
"ChannelType",
|
"ChannelType",
|
||||||
"ChatChannel",
|
"ChatChannel",
|
||||||
"ChatChannelResp",
|
"ChatChannelDict",
|
||||||
|
"ChatChannelModel",
|
||||||
"ChatMessage",
|
"ChatMessage",
|
||||||
"ChatMessageResp",
|
"ChatMessageDict",
|
||||||
|
"ChatMessageModel",
|
||||||
"CountResp",
|
"CountResp",
|
||||||
"DailyChallengeStats",
|
"DailyChallengeStats",
|
||||||
"DailyChallengeStatsResp",
|
"DailyChallengeStatsResp",
|
||||||
"EmailVerification",
|
"EmailVerification",
|
||||||
|
"Event",
|
||||||
"FavouriteBeatmapset",
|
"FavouriteBeatmapset",
|
||||||
"ItemAttemptsCount",
|
"ItemAttemptsCount",
|
||||||
"ItemAttemptsResp",
|
"ItemAttemptsCountDict",
|
||||||
|
"ItemAttemptsCountModel",
|
||||||
"LoginSession",
|
"LoginSession",
|
||||||
|
"LoginSessionResp",
|
||||||
|
"MatchmakingPool",
|
||||||
|
"MatchmakingPoolBeatmap",
|
||||||
|
"MatchmakingUserStats",
|
||||||
"MonthlyPlaycounts",
|
"MonthlyPlaycounts",
|
||||||
"MultiplayerEvent",
|
"MultiplayerEvent",
|
||||||
"MultiplayerEventResp",
|
"MultiplayerEventResp",
|
||||||
@@ -94,47 +129,56 @@ __all__ = [
|
|||||||
"Notification",
|
"Notification",
|
||||||
"OAuthClient",
|
"OAuthClient",
|
||||||
"OAuthToken",
|
"OAuthToken",
|
||||||
"PPBestScore",
|
|
||||||
"PasswordReset",
|
"PasswordReset",
|
||||||
"Playlist",
|
"Playlist",
|
||||||
"PlaylistAggregateScore",
|
|
||||||
"PlaylistBestScore",
|
"PlaylistBestScore",
|
||||||
"PlaylistResp",
|
"PlaylistDict",
|
||||||
|
"PlaylistModel",
|
||||||
"RankHistory",
|
"RankHistory",
|
||||||
"RankHistoryResp",
|
"RankHistoryResp",
|
||||||
"RankTop",
|
"RankTop",
|
||||||
"Relationship",
|
"Relationship",
|
||||||
"RelationshipResp",
|
"RelationshipDict",
|
||||||
|
"RelationshipModel",
|
||||||
"RelationshipType",
|
"RelationshipType",
|
||||||
"ReplayWatchedCount",
|
"ReplayWatchedCount",
|
||||||
"Room",
|
"Room",
|
||||||
|
"RoomDict",
|
||||||
|
"RoomModel",
|
||||||
"RoomParticipatedUser",
|
"RoomParticipatedUser",
|
||||||
"RoomResp",
|
|
||||||
"Score",
|
"Score",
|
||||||
"ScoreAround",
|
"ScoreAround",
|
||||||
"ScoreBase",
|
"ScoreDict",
|
||||||
"ScoreResp",
|
"ScoreModel",
|
||||||
"ScoreStatistics",
|
"ScoreStatistics",
|
||||||
"ScoreToken",
|
"ScoreToken",
|
||||||
"ScoreTokenResp",
|
"ScoreTokenResp",
|
||||||
|
"SearchBeatmapsetsResp",
|
||||||
"Team",
|
"Team",
|
||||||
"TeamMember",
|
"TeamMember",
|
||||||
"TeamRequest",
|
"TeamRequest",
|
||||||
|
"TeamResp",
|
||||||
|
"TotalScoreBestScore",
|
||||||
|
"TotpKeys",
|
||||||
|
"TrustedDevice",
|
||||||
|
"TrustedDeviceResp",
|
||||||
"User",
|
"User",
|
||||||
"UserAccountHistory",
|
"UserAccountHistory",
|
||||||
"UserAccountHistoryResp",
|
"UserAccountHistoryResp",
|
||||||
"UserAccountHistoryType",
|
"UserAccountHistoryType",
|
||||||
"UserAchievement",
|
"UserAchievement",
|
||||||
"UserAchievement",
|
|
||||||
"UserAchievementResp",
|
"UserAchievementResp",
|
||||||
|
"UserDict",
|
||||||
"UserLoginLog",
|
"UserLoginLog",
|
||||||
|
"UserModel",
|
||||||
"UserNotification",
|
"UserNotification",
|
||||||
"UserResp",
|
"UserPreference",
|
||||||
"UserStatistics",
|
"UserStatistics",
|
||||||
"UserStatisticsResp",
|
"UserStatisticsDict",
|
||||||
|
"UserStatisticsModel",
|
||||||
"V1APIKeys",
|
"V1APIKeys",
|
||||||
]
|
]
|
||||||
|
|
||||||
for i in __all__:
|
for i in __all__:
|
||||||
if i.endswith("Resp"):
|
if i.endswith("Model") or i.endswith("Resp"):
|
||||||
globals()[i].model_rebuild() # type: ignore[call-arg]
|
globals()[i].model_rebuild()
|
||||||
|
|||||||
499
app/database/_base.py
Normal file
499
app/database/_base.py
Normal file
@@ -0,0 +1,499 @@
|
|||||||
|
from collections.abc import Awaitable, Callable, Sequence
|
||||||
|
from functools import lru_cache, wraps
|
||||||
|
import inspect
|
||||||
|
import sys
|
||||||
|
from types import NoneType, get_original_bases
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
ClassVar,
|
||||||
|
Concatenate,
|
||||||
|
ForwardRef,
|
||||||
|
ParamSpec,
|
||||||
|
TypedDict,
|
||||||
|
cast,
|
||||||
|
get_args,
|
||||||
|
get_origin,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import type_is_optional
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import async_object_session
|
||||||
|
from sqlmodel import SQLModel
|
||||||
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
from sqlmodel.main import SQLModelMetaclass
|
||||||
|
|
||||||
|
_dict_to_model: dict[type, type["DatabaseModel"]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_evaluate_forwardref(type_: str | ForwardRef, module_name: str) -> Any:
|
||||||
|
"""Safely evaluate a ForwardRef, with fallback to app.database module"""
|
||||||
|
if isinstance(type_, str):
|
||||||
|
type_ = ForwardRef(type_)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return evaluate_forwardref(
|
||||||
|
type_,
|
||||||
|
globalns=vars(sys.modules[module_name]),
|
||||||
|
localns={},
|
||||||
|
)
|
||||||
|
except (NameError, AttributeError, KeyError):
|
||||||
|
# Fallback to app.database module
|
||||||
|
try:
|
||||||
|
import app.database
|
||||||
|
|
||||||
|
return evaluate_forwardref(
|
||||||
|
type_,
|
||||||
|
globalns=vars(app.database),
|
||||||
|
localns={},
|
||||||
|
)
|
||||||
|
except (NameError, AttributeError, KeyError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OnDemand[T]:
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
|
||||||
|
def __get__(self, instance: object | None, owner: Any) -> T: ...
|
||||||
|
|
||||||
|
def __set__(self, instance: Any, value: T) -> None: ...
|
||||||
|
|
||||||
|
def __delete__(self, instance: Any) -> None: ...
|
||||||
|
|
||||||
|
|
||||||
|
class Exclude[T]:
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
|
||||||
|
def __get__(self, instance: object | None, owner: Any) -> T: ...
|
||||||
|
|
||||||
|
def __set__(self, instance: Any, value: T) -> None: ...
|
||||||
|
|
||||||
|
def __delete__(self, instance: Any) -> None: ...
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/fastapi/sqlmodel/blob/main/sqlmodel/_compat.py#L126-L140
|
||||||
|
def _get_annotations(class_dict: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
raw_annotations: dict[str, Any] = class_dict.get("__annotations__", {})
|
||||||
|
if sys.version_info >= (3, 14) and "__annotations__" not in class_dict:
|
||||||
|
# See https://github.com/pydantic/pydantic/pull/11991
|
||||||
|
from annotationlib import (
|
||||||
|
Format,
|
||||||
|
call_annotate_function,
|
||||||
|
get_annotate_from_class_namespace,
|
||||||
|
)
|
||||||
|
|
||||||
|
if annotate := get_annotate_from_class_namespace(class_dict):
|
||||||
|
raw_annotations = call_annotate_function(annotate, format=Format.FORWARDREF)
|
||||||
|
return raw_annotations
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/pydantic/pydantic/blob/main/pydantic/v1/typing.py#L58-L77
|
||||||
|
if sys.version_info < (3, 12, 4):
|
||||||
|
|
||||||
|
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
|
||||||
|
# Even though it is the right signature for python 3.9, mypy complains with
|
||||||
|
# `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast...
|
||||||
|
# Python 3.13/3.12.4+ made `recursive_guard` a kwarg, so name it explicitly to avoid:
|
||||||
|
# TypeError: ForwardRef._evaluate() missing 1 required keyword-only argument: 'recursive_guard'
|
||||||
|
return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set())
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
|
||||||
|
# Pydantic 1.x will not support PEP 695 syntax, but provide `type_params` to avoid
|
||||||
|
# warnings:
|
||||||
|
return cast(Any, type_)._evaluate(globalns, localns, type_params=(), recursive_guard=set())
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseModelMetaclass(SQLModelMetaclass):
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
name: str,
|
||||||
|
bases: tuple[type, ...],
|
||||||
|
namespace: dict[str, Any],
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> "DatabaseModelMetaclass":
|
||||||
|
original_annotations = _get_annotations(namespace)
|
||||||
|
new_annotations = {}
|
||||||
|
ondemands = []
|
||||||
|
excludes = []
|
||||||
|
|
||||||
|
for k, v in original_annotations.items():
|
||||||
|
if get_origin(v) is OnDemand:
|
||||||
|
inner_type = v.__args__[0]
|
||||||
|
new_annotations[k] = inner_type
|
||||||
|
ondemands.append(k)
|
||||||
|
elif get_origin(v) is Exclude:
|
||||||
|
inner_type = v.__args__[0]
|
||||||
|
new_annotations[k] = inner_type
|
||||||
|
excludes.append(k)
|
||||||
|
else:
|
||||||
|
new_annotations[k] = v
|
||||||
|
|
||||||
|
new_class = super().__new__(
|
||||||
|
cls,
|
||||||
|
name,
|
||||||
|
bases,
|
||||||
|
{
|
||||||
|
**namespace,
|
||||||
|
"__annotations__": new_annotations,
|
||||||
|
},
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
new_class._CALCULATED_FIELDS = dict(getattr(new_class, "_CALCULATED_FIELDS", {}))
|
||||||
|
new_class._ONDEMAND_DATABASE_FIELDS = list(getattr(new_class, "_ONDEMAND_DATABASE_FIELDS", [])) + list(
|
||||||
|
ondemands
|
||||||
|
)
|
||||||
|
new_class._ONDEMAND_CALCULATED_FIELDS = dict(getattr(new_class, "_ONDEMAND_CALCULATED_FIELDS", {}))
|
||||||
|
new_class._EXCLUDED_DATABASE_FIELDS = list(getattr(new_class, "_EXCLUDED_DATABASE_FIELDS", [])) + list(excludes)
|
||||||
|
|
||||||
|
for attr_name, attr_value in namespace.items():
|
||||||
|
target = _get_callable_target(attr_value)
|
||||||
|
if target is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if getattr(target, "__included__", False):
|
||||||
|
new_class._CALCULATED_FIELDS[attr_name] = _get_return_type(target)
|
||||||
|
_pre_calculate_context_params(target, attr_value)
|
||||||
|
|
||||||
|
if getattr(target, "__calculated_ondemand__", False):
|
||||||
|
new_class._ONDEMAND_CALCULATED_FIELDS[attr_name] = _get_return_type(target)
|
||||||
|
_pre_calculate_context_params(target, attr_value)
|
||||||
|
|
||||||
|
# Register TDict to DatabaseModel mapping
|
||||||
|
for base in get_original_bases(new_class):
|
||||||
|
cls_name = base.__name__
|
||||||
|
if "DatabaseModel" in cls_name and "[" in cls_name and "]" in cls_name:
|
||||||
|
generic_type_name = cls_name[cls_name.index("[") : cls_name.rindex("]") + 1]
|
||||||
|
generic_type = evaluate_forwardref(
|
||||||
|
ForwardRef(generic_type_name),
|
||||||
|
globalns=vars(sys.modules[new_class.__module__]),
|
||||||
|
localns={},
|
||||||
|
)
|
||||||
|
_dict_to_model[generic_type[0]] = new_class
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
|
||||||
|
def _pre_calculate_context_params(target: Callable, attr_value: Any) -> None:
|
||||||
|
if hasattr(target, "__context_params__"):
|
||||||
|
return
|
||||||
|
|
||||||
|
sig = inspect.signature(target)
|
||||||
|
params = list(sig.parameters.keys())
|
||||||
|
|
||||||
|
start_index = 2
|
||||||
|
if isinstance(attr_value, classmethod):
|
||||||
|
start_index = 3
|
||||||
|
|
||||||
|
context_params = [] if len(params) < start_index else params[start_index:]
|
||||||
|
|
||||||
|
setattr(target, "__context_params__", context_params)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_callable_target(value: Any) -> Callable | None:
|
||||||
|
if isinstance(value, (staticmethod, classmethod)):
|
||||||
|
return value.__func__
|
||||||
|
if inspect.isfunction(value):
|
||||||
|
return value
|
||||||
|
if inspect.ismethod(value):
|
||||||
|
return value.__func__
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _mark_callable(value: Any, flag: str) -> Callable | None:
|
||||||
|
target = _get_callable_target(value)
|
||||||
|
if target is None:
|
||||||
|
return None
|
||||||
|
setattr(target, flag, True)
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def _get_return_type(func: Callable) -> type:
|
||||||
|
sig = inspect.get_annotations(func)
|
||||||
|
return sig.get("return", Any)
|
||||||
|
|
||||||
|
|
||||||
|
P = ParamSpec("P")
|
||||||
|
CalculatedField = Callable[Concatenate[AsyncSession, Any, P], Awaitable[Any]]
|
||||||
|
DecoratorTarget = CalculatedField | staticmethod | classmethod
|
||||||
|
|
||||||
|
|
||||||
|
def included(func: DecoratorTarget) -> DecoratorTarget:
|
||||||
|
marker = _mark_callable(func, "__included__")
|
||||||
|
if marker is None:
|
||||||
|
raise RuntimeError("@included is only usable on callables.")
|
||||||
|
|
||||||
|
@wraps(marker)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
return await marker(*args, **kwargs)
|
||||||
|
|
||||||
|
if isinstance(func, staticmethod):
|
||||||
|
return staticmethod(wrapper)
|
||||||
|
if isinstance(func, classmethod):
|
||||||
|
return classmethod(wrapper)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def ondemand(func: DecoratorTarget) -> DecoratorTarget:
|
||||||
|
marker = _mark_callable(func, "__calculated_ondemand__")
|
||||||
|
if marker is None:
|
||||||
|
raise RuntimeError("@ondemand is only usable on callables.")
|
||||||
|
|
||||||
|
@wraps(marker)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
return await marker(*args, **kwargs)
|
||||||
|
|
||||||
|
if isinstance(func, staticmethod):
|
||||||
|
return staticmethod(wrapper)
|
||||||
|
if isinstance(func, classmethod):
|
||||||
|
return classmethod(wrapper)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
async def call_awaitable_with_context(
|
||||||
|
func: CalculatedField,
|
||||||
|
session: AsyncSession,
|
||||||
|
instance: Any,
|
||||||
|
context: dict[str, Any],
|
||||||
|
) -> Any:
|
||||||
|
context_params: list[str] | None = getattr(func, "__context_params__", None)
|
||||||
|
|
||||||
|
if context_params is None:
|
||||||
|
# Fallback if not pre-calculated
|
||||||
|
sig = inspect.signature(func)
|
||||||
|
if len(sig.parameters) == 2:
|
||||||
|
return await func(session, instance)
|
||||||
|
else:
|
||||||
|
call_params = {}
|
||||||
|
for param in sig.parameters.values():
|
||||||
|
if param.name in context:
|
||||||
|
call_params[param.name] = context[param.name]
|
||||||
|
return await func(session, instance, **call_params)
|
||||||
|
|
||||||
|
if not context_params:
|
||||||
|
return await func(session, instance)
|
||||||
|
|
||||||
|
call_params = {}
|
||||||
|
for name in context_params:
|
||||||
|
if name in context:
|
||||||
|
call_params[name] = context[name]
|
||||||
|
return await func(session, instance, **call_params)
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseModel[TDict](SQLModel, UTCBaseModel, metaclass=DatabaseModelMetaclass):
|
||||||
|
_CALCULATED_FIELDS: ClassVar[dict[str, type]] = {}
|
||||||
|
|
||||||
|
_ONDEMAND_DATABASE_FIELDS: ClassVar[list[str]] = []
|
||||||
|
_ONDEMAND_CALCULATED_FIELDS: ClassVar[dict[str, type]] = {}
|
||||||
|
|
||||||
|
_EXCLUDED_DATABASE_FIELDS: ClassVar[list[str]] = []
|
||||||
|
|
||||||
|
@overload
|
||||||
|
@classmethod
|
||||||
|
async def transform(
|
||||||
|
cls,
|
||||||
|
db_instance: "DatabaseModel",
|
||||||
|
*,
|
||||||
|
session: AsyncSession,
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
**context: Any,
|
||||||
|
) -> TDict: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
@classmethod
|
||||||
|
async def transform(
|
||||||
|
cls,
|
||||||
|
db_instance: "DatabaseModel",
|
||||||
|
*,
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
**context: Any,
|
||||||
|
) -> TDict: ...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def transform(
|
||||||
|
cls,
|
||||||
|
db_instance: "DatabaseModel",
|
||||||
|
*,
|
||||||
|
session: AsyncSession | None = None,
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
**context: Any,
|
||||||
|
) -> TDict:
|
||||||
|
includes = includes.copy() if includes is not None else []
|
||||||
|
session = cast(AsyncSession | None, async_object_session(db_instance)) if session is None else session
|
||||||
|
if session is None:
|
||||||
|
raise RuntimeError("DatabaseModel.transform requires a session-bound instance.")
|
||||||
|
resp_obj = cls.model_validate(db_instance.model_dump())
|
||||||
|
data = resp_obj.model_dump()
|
||||||
|
|
||||||
|
for field in cls._CALCULATED_FIELDS:
|
||||||
|
func = getattr(cls, field)
|
||||||
|
value = await call_awaitable_with_context(func, session, db_instance, context)
|
||||||
|
data[field] = value
|
||||||
|
|
||||||
|
sub_include_map: dict[str, list[str]] = {}
|
||||||
|
for include in [i for i in includes if "." in i]:
|
||||||
|
parent, sub_include = include.split(".", 1)
|
||||||
|
if parent not in sub_include_map:
|
||||||
|
sub_include_map[parent] = []
|
||||||
|
sub_include_map[parent].append(sub_include)
|
||||||
|
includes.remove(include) # pyright: ignore[reportOptionalMemberAccess]
|
||||||
|
|
||||||
|
for field, sub_includes in sub_include_map.items():
|
||||||
|
if field in cls._ONDEMAND_CALCULATED_FIELDS:
|
||||||
|
func = getattr(cls, field)
|
||||||
|
value = await call_awaitable_with_context(
|
||||||
|
func, session, db_instance, {**context, "includes": sub_includes}
|
||||||
|
)
|
||||||
|
data[field] = value
|
||||||
|
|
||||||
|
for include in includes:
|
||||||
|
if include in data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if include in cls._ONDEMAND_CALCULATED_FIELDS:
|
||||||
|
func = getattr(cls, include)
|
||||||
|
value = await call_awaitable_with_context(func, session, db_instance, context)
|
||||||
|
data[include] = value
|
||||||
|
|
||||||
|
for field in cls._ONDEMAND_DATABASE_FIELDS:
|
||||||
|
if field not in includes:
|
||||||
|
del data[field]
|
||||||
|
|
||||||
|
for field in cls._EXCLUDED_DATABASE_FIELDS:
|
||||||
|
if field in data:
|
||||||
|
del data[field]
|
||||||
|
|
||||||
|
return cast(TDict, data)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def transform_many(
|
||||||
|
cls,
|
||||||
|
db_instances: Sequence["DatabaseModel"],
|
||||||
|
*,
|
||||||
|
session: AsyncSession | None = None,
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
**context: Any,
|
||||||
|
) -> list[TDict]:
|
||||||
|
if not db_instances:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# SQLAlchemy AsyncSession is not concurrency-safe, so we cannot use asyncio.gather here
|
||||||
|
# if the transform method performs any database operations using the shared session.
|
||||||
|
# Since we don't know if the transform method (or its calculated fields) will use the DB,
|
||||||
|
# we must execute them serially to be safe.
|
||||||
|
results = []
|
||||||
|
for instance in db_instances:
|
||||||
|
results.append(await cls.transform(instance, session=session, includes=includes, **context))
|
||||||
|
return results
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def generate_typeddict(cls, includes: tuple[str, ...] | None = None) -> type[TypedDict]: # pyright: ignore[reportInvalidTypeForm]
|
||||||
|
def _evaluate_type(field_type: Any, *, resolve_database_model: bool = False, field_name: str = "") -> Any:
|
||||||
|
# Evaluate ForwardRef if present
|
||||||
|
if isinstance(field_type, (str, ForwardRef)):
|
||||||
|
resolved = _safe_evaluate_forwardref(field_type, cls.__module__)
|
||||||
|
if resolved is not None:
|
||||||
|
field_type = resolved
|
||||||
|
|
||||||
|
origin_type = get_origin(field_type)
|
||||||
|
inner_type = field_type
|
||||||
|
args = get_args(field_type)
|
||||||
|
|
||||||
|
is_optional = type_is_optional(field_type) # pyright: ignore[reportArgumentType]
|
||||||
|
if is_optional:
|
||||||
|
inner_type = next((arg for arg in args if arg is not NoneType), field_type)
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if origin_type is list:
|
||||||
|
is_list = True
|
||||||
|
inner_type = args[0]
|
||||||
|
|
||||||
|
# Evaluate ForwardRef in inner_type if present
|
||||||
|
if isinstance(inner_type, (str, ForwardRef)):
|
||||||
|
resolved = _safe_evaluate_forwardref(inner_type, cls.__module__)
|
||||||
|
if resolved is not None:
|
||||||
|
inner_type = resolved
|
||||||
|
|
||||||
|
if not resolve_database_model:
|
||||||
|
if is_optional:
|
||||||
|
return inner_type | None # pyright: ignore[reportOperatorIssue]
|
||||||
|
elif is_list:
|
||||||
|
return list[inner_type]
|
||||||
|
return inner_type
|
||||||
|
|
||||||
|
model_class = None
|
||||||
|
|
||||||
|
# First check if inner_type is directly a DatabaseModel subclass
|
||||||
|
try:
|
||||||
|
if inspect.isclass(inner_type) and issubclass(inner_type, DatabaseModel): # type: ignore
|
||||||
|
model_class = inner_type
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# If not found, look up in _dict_to_model
|
||||||
|
if model_class is None:
|
||||||
|
model_class = _dict_to_model.get(inner_type) # type: ignore
|
||||||
|
|
||||||
|
if model_class is not None:
|
||||||
|
nested_dict = model_class.generate_typeddict(tuple(sub_include_map.get(field_name, ())))
|
||||||
|
resolved_type = list[nested_dict] if is_list else nested_dict # type: ignore
|
||||||
|
|
||||||
|
if is_optional:
|
||||||
|
resolved_type = resolved_type | None # type: ignore
|
||||||
|
|
||||||
|
return resolved_type
|
||||||
|
|
||||||
|
# Fallback: use the resolved inner_type
|
||||||
|
resolved_type = list[inner_type] if is_list else inner_type # type: ignore
|
||||||
|
if is_optional:
|
||||||
|
resolved_type = resolved_type | None # type: ignore
|
||||||
|
return resolved_type
|
||||||
|
|
||||||
|
if includes is None:
|
||||||
|
includes = ()
|
||||||
|
|
||||||
|
# Parse nested includes
|
||||||
|
direct_includes = []
|
||||||
|
sub_include_map: dict[str, list[str]] = {}
|
||||||
|
for include in includes:
|
||||||
|
if "." in include:
|
||||||
|
parent, sub_include = include.split(".", 1)
|
||||||
|
if parent not in sub_include_map:
|
||||||
|
sub_include_map[parent] = []
|
||||||
|
sub_include_map[parent].append(sub_include)
|
||||||
|
if parent not in direct_includes:
|
||||||
|
direct_includes.append(parent)
|
||||||
|
else:
|
||||||
|
direct_includes.append(include)
|
||||||
|
|
||||||
|
fields = {}
|
||||||
|
|
||||||
|
# Process model fields
|
||||||
|
for field_name, field_info in cls.model_fields.items():
|
||||||
|
field_type = field_info.annotation or Any
|
||||||
|
field_type = _evaluate_type(field_type, field_name=field_name)
|
||||||
|
|
||||||
|
if field_name in cls._ONDEMAND_DATABASE_FIELDS and field_name not in direct_includes:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
fields[field_name] = field_type
|
||||||
|
|
||||||
|
# Process calculated fields
|
||||||
|
for field_name, field_type in cls._CALCULATED_FIELDS.items():
|
||||||
|
field_type = _evaluate_type(field_type, resolve_database_model=True, field_name=field_name)
|
||||||
|
fields[field_name] = field_type
|
||||||
|
|
||||||
|
# Process ondemand calculated fields
|
||||||
|
for field_name, field_type in cls._ONDEMAND_CALCULATED_FIELDS.items():
|
||||||
|
if field_name not in direct_includes:
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_type = _evaluate_type(field_type, resolve_database_model=True, field_name=field_name)
|
||||||
|
fields[field_name] = field_type
|
||||||
|
|
||||||
|
return TypedDict(f"{cls.__name__}Dict[{', '.join(includes)}]" if includes else f"{cls.__name__}Dict", fields) # pyright: ignore[reportArgumentType]
|
||||||
@@ -1,10 +1,11 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.models.achievement import MEDALS, Achievement
|
from app.models.achievement import MEDALS, Achievement
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
from app.models.notification import UserAchievementUnlock
|
from app.models.notification import UserAchievementUnlock
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from .events import Event, EventType
|
from .events import Event, EventType
|
||||||
|
|
||||||
@@ -23,23 +24,19 @@ from sqlmodel import (
|
|||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class UserAchievementBase(SQLModel, UTCBaseModel):
|
class UserAchievementBase(SQLModel, UTCBaseModel):
|
||||||
achievement_id: int
|
achievement_id: int
|
||||||
achieved_at: datetime = Field(
|
achieved_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime(timezone=True)))
|
||||||
default=datetime.now(UTC), sa_column=Column(DateTime(timezone=True))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UserAchievement(UserAchievementBase, table=True):
|
class UserAchievement(UserAchievementBase, table=True):
|
||||||
__tablename__ = "lazer_user_achievements" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "lazer_user_achievements"
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")), exclude=True)
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id")), exclude=True
|
|
||||||
)
|
|
||||||
user: "User" = Relationship(back_populates="achievement")
|
user: "User" = Relationship(back_populates="achievement")
|
||||||
|
|
||||||
|
|
||||||
@@ -56,15 +53,11 @@ async def process_achievements(session: AsyncSession, redis: Redis, score_id: in
|
|||||||
if not score:
|
if not score:
|
||||||
return
|
return
|
||||||
achieved = (
|
achieved = (
|
||||||
await session.exec(
|
await session.exec(select(UserAchievement.achievement_id).where(UserAchievement.user_id == score.user_id))
|
||||||
select(UserAchievement.achievement_id).where(
|
|
||||||
UserAchievement.user_id == score.user_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).all()
|
).all()
|
||||||
not_achieved = {k: v for k, v in MEDALS.items() if k.id not in achieved}
|
not_achieved = {k: v for k, v in MEDALS.items() if k.id not in achieved}
|
||||||
result: list[Achievement] = []
|
result: list[Achievement] = []
|
||||||
now = datetime.now(UTC)
|
now = utcnow()
|
||||||
for k, v in not_achieved.items():
|
for k, v in not_achieved.items():
|
||||||
if await v(session, score, score.beatmap):
|
if await v(session, score, score.beatmap):
|
||||||
result.append(k)
|
result.append(k)
|
||||||
@@ -78,16 +71,14 @@ async def process_achievements(session: AsyncSession, redis: Redis, score_id: in
|
|||||||
)
|
)
|
||||||
await redis.publish(
|
await redis.publish(
|
||||||
"chat:notification",
|
"chat:notification",
|
||||||
UserAchievementUnlock.init(
|
UserAchievementUnlock.init(r, score.user_id, score.gamemode).model_dump_json(),
|
||||||
r, score.user_id, score.gamemode
|
|
||||||
).model_dump_json(),
|
|
||||||
)
|
)
|
||||||
event = Event(
|
event = Event(
|
||||||
created_at=now,
|
created_at=now,
|
||||||
type=EventType.ACHIEVEMENT,
|
type=EventType.ACHIEVEMENT,
|
||||||
user_id=score.user_id,
|
user_id=score.user_id,
|
||||||
event_payload={
|
event_payload={
|
||||||
"achievement": {"achievement_id": r.id, "achieved_at": now.isoformat()},
|
"achievement": {"slug": r.assets_id, "name": r.name},
|
||||||
"user": {
|
"user": {
|
||||||
"username": score.user.username,
|
"username": score.user.username,
|
||||||
"url": settings.web_url + "users/" + str(score.user.id),
|
"url": settings.web_url + "users/" + str(score.user.id),
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ import secrets
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from .verification import LoginSession
|
||||||
|
|
||||||
from sqlalchemy import Column, DateTime
|
from sqlalchemy import Column, DateTime
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
@@ -13,49 +16,60 @@ from sqlmodel import (
|
|||||||
Relationship,
|
Relationship,
|
||||||
SQLModel,
|
SQLModel,
|
||||||
Text,
|
Text,
|
||||||
|
text,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class OAuthToken(UTCBaseModel, SQLModel, table=True):
|
class OAuthToken(UTCBaseModel, SQLModel, table=True):
|
||||||
__tablename__ = "oauth_tokens" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "oauth_tokens"
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
id: int = Field(default=None, primary_key=True, index=True)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
client_id: int = Field(index=True)
|
client_id: int = Field(index=True)
|
||||||
access_token: str = Field(max_length=500, unique=True)
|
access_token: str = Field(max_length=500, unique=True)
|
||||||
refresh_token: str = Field(max_length=500, unique=True)
|
refresh_token: str = Field(max_length=500, unique=True)
|
||||||
token_type: str = Field(default="Bearer", max_length=20)
|
token_type: str = Field(default="Bearer", max_length=20)
|
||||||
scope: str = Field(default="*", max_length=100)
|
scope: str = Field(default="*", max_length=100)
|
||||||
expires_at: datetime = Field(sa_column=Column(DateTime))
|
expires_at: datetime = Field(sa_column=Column(DateTime, index=True))
|
||||||
created_at: datetime = Field(
|
refresh_token_expires_at: datetime = Field(sa_column=Column(DateTime, index=True))
|
||||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
)
|
|
||||||
|
|
||||||
user: "User" = Relationship()
|
user: "User" = Relationship()
|
||||||
|
login_session: LoginSession | None = Relationship(back_populates="token", passive_deletes=True)
|
||||||
|
|
||||||
|
|
||||||
class OAuthClient(SQLModel, table=True):
|
class OAuthClient(UTCBaseModel, SQLModel, table=True):
|
||||||
__tablename__ = "oauth_clients" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "oauth_clients"
|
||||||
name: str = Field(max_length=100, index=True)
|
name: str = Field(max_length=100, index=True)
|
||||||
description: str = Field(sa_column=Column(Text), default="")
|
description: str = Field(sa_column=Column(Text), default="")
|
||||||
client_id: int | None = Field(default=None, primary_key=True, index=True)
|
client_id: int | None = Field(default=None, primary_key=True, index=True)
|
||||||
client_secret: str = Field(default_factory=secrets.token_hex, index=True)
|
client_secret: str = Field(default_factory=secrets.token_hex, index=True, exclude=True)
|
||||||
redirect_uris: list[str] = Field(default_factory=list, sa_column=Column(JSON))
|
redirect_uris: list[str] = Field(default_factory=list, sa_column=Column(JSON))
|
||||||
owner_id: int = Field(
|
owner_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True), exclude=True)
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=utcnow,
|
||||||
|
sa_column=Column(DateTime, onupdate=text("CURRENT_TIMESTAMP")),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class V1APIKeys(SQLModel, table=True):
|
class V1APIKeys(SQLModel, table=True):
|
||||||
__tablename__ = "v1_api_keys" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "v1_api_keys"
|
||||||
id: int | None = Field(default=None, primary_key=True)
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
name: str = Field(max_length=100, index=True)
|
name: str = Field(max_length=100, index=True)
|
||||||
key: str = Field(default_factory=secrets.token_hex, index=True)
|
key: str = Field(default_factory=secrets.token_hex, index=True)
|
||||||
owner_id: int = Field(
|
owner_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
|
class TotpKeys(SQLModel, table=True):
|
||||||
|
__tablename__: str = "totp_keys"
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True))
|
||||||
|
secret: str = Field(max_length=100)
|
||||||
|
backup_keys: list[str] = Field(default_factory=list, sa_column=Column(JSON))
|
||||||
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
|
|
||||||
|
user: "User" = Relationship(back_populates="totp_key")
|
||||||
|
|||||||
@@ -1,124 +1,339 @@
|
|||||||
import asyncio
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, ClassVar, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.calculator import calculate_beatmap_attribute
|
from app.calculator import get_calculator
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.database.failtime import FailTime, FailTimeResp
|
from app.models.beatmap import BeatmapRankStatus
|
||||||
from app.models.beatmap import BeatmapAttributes, BeatmapRankStatus
|
|
||||||
from app.models.mods import APIMod
|
from app.models.mods import APIMod
|
||||||
|
from app.models.performance import DifficultyAttributesUnion
|
||||||
from app.models.score import GameMode
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, OnDemand, included, ondemand
|
||||||
from .beatmap_playcounts import BeatmapPlaycounts
|
from .beatmap_playcounts import BeatmapPlaycounts
|
||||||
from .beatmapset import Beatmapset, BeatmapsetResp
|
from .beatmap_tags import BeatmapTagVote
|
||||||
|
from .beatmapset import Beatmapset, BeatmapsetDict, BeatmapsetModel
|
||||||
|
from .failtime import FailTime, FailTimeResp
|
||||||
|
from .user import User, UserDict, UserModel
|
||||||
|
|
||||||
|
from pydantic import BaseModel, TypeAdapter
|
||||||
from redis.asyncio import Redis
|
from redis.asyncio import Redis
|
||||||
from sqlalchemy import Column, DateTime
|
from sqlalchemy import Column, DateTime
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import VARCHAR, Field, Relationship, SQLModel, col, exists, func, select
|
from sqlmodel import VARCHAR, Field, Relationship, SQLModel, col, exists, func, select
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from app.fetcher import Fetcher
|
from app.fetcher import Fetcher
|
||||||
|
|
||||||
from .lazer_user import User
|
|
||||||
|
|
||||||
|
|
||||||
class BeatmapOwner(SQLModel):
|
class BeatmapOwner(SQLModel):
|
||||||
id: int
|
id: int
|
||||||
username: str
|
username: str
|
||||||
|
|
||||||
|
|
||||||
class BeatmapBase(SQLModel):
|
class BeatmapDict(TypedDict):
|
||||||
# Beatmap
|
beatmapset_id: int
|
||||||
url: str
|
difficulty_rating: float
|
||||||
|
id: int
|
||||||
mode: GameMode
|
mode: GameMode
|
||||||
|
total_length: int
|
||||||
|
user_id: int
|
||||||
|
version: str
|
||||||
|
url: str
|
||||||
|
|
||||||
|
checksum: NotRequired[str]
|
||||||
|
max_combo: NotRequired[int | None]
|
||||||
|
ar: NotRequired[float]
|
||||||
|
cs: NotRequired[float]
|
||||||
|
drain: NotRequired[float]
|
||||||
|
accuracy: NotRequired[float]
|
||||||
|
bpm: NotRequired[float]
|
||||||
|
count_circles: NotRequired[int]
|
||||||
|
count_sliders: NotRequired[int]
|
||||||
|
count_spinners: NotRequired[int]
|
||||||
|
deleted_at: NotRequired[datetime | None]
|
||||||
|
hit_length: NotRequired[int]
|
||||||
|
last_updated: NotRequired[datetime]
|
||||||
|
|
||||||
|
status: NotRequired[str]
|
||||||
|
beatmapset: NotRequired[BeatmapsetDict]
|
||||||
|
current_user_playcount: NotRequired[int]
|
||||||
|
current_user_tag_ids: NotRequired[list[int]]
|
||||||
|
failtimes: NotRequired[FailTimeResp]
|
||||||
|
top_tag_ids: NotRequired[list[dict[str, int]]]
|
||||||
|
user: NotRequired[UserDict]
|
||||||
|
convert: NotRequired[bool]
|
||||||
|
is_scoreable: NotRequired[bool]
|
||||||
|
mode_int: NotRequired[int]
|
||||||
|
ranked: NotRequired[int]
|
||||||
|
playcount: NotRequired[int]
|
||||||
|
passcount: NotRequired[int]
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapModel(DatabaseModel[BeatmapDict]):
|
||||||
|
BEATMAP_TRANSFORMER_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"checksum",
|
||||||
|
"accuracy",
|
||||||
|
"ar",
|
||||||
|
"bpm",
|
||||||
|
"convert",
|
||||||
|
"count_circles",
|
||||||
|
"count_sliders",
|
||||||
|
"count_spinners",
|
||||||
|
"cs",
|
||||||
|
"deleted_at",
|
||||||
|
"drain",
|
||||||
|
"hit_length",
|
||||||
|
"is_scoreable",
|
||||||
|
"last_updated",
|
||||||
|
"mode_int",
|
||||||
|
"passcount",
|
||||||
|
"playcount",
|
||||||
|
"ranked",
|
||||||
|
"url",
|
||||||
|
]
|
||||||
|
DEFAULT_API_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"beatmapset.ratings",
|
||||||
|
"current_user_playcount",
|
||||||
|
"failtimes",
|
||||||
|
"max_combo",
|
||||||
|
"owners",
|
||||||
|
]
|
||||||
|
TRANSFORMER_INCLUDES: ClassVar[list[str]] = [*DEFAULT_API_INCLUDES, *BEATMAP_TRANSFORMER_INCLUDES]
|
||||||
|
|
||||||
|
# Beatmap
|
||||||
beatmapset_id: int = Field(foreign_key="beatmapsets.id", index=True)
|
beatmapset_id: int = Field(foreign_key="beatmapsets.id", index=True)
|
||||||
difficulty_rating: float = Field(default=0.0, index=True)
|
difficulty_rating: float = Field(default=0.0, index=True)
|
||||||
|
id: int = Field(primary_key=True, index=True)
|
||||||
|
mode: GameMode
|
||||||
total_length: int
|
total_length: int
|
||||||
user_id: int = Field(index=True)
|
user_id: int = Field(index=True)
|
||||||
version: str = Field(index=True)
|
version: str = Field(index=True)
|
||||||
|
|
||||||
|
url: OnDemand[str]
|
||||||
# optional
|
# optional
|
||||||
checksum: str = Field(sa_column=Column(VARCHAR(32), index=True))
|
checksum: OnDemand[str] = Field(sa_column=Column(VARCHAR(32), index=True))
|
||||||
current_user_playcount: int = Field(default=0)
|
max_combo: OnDemand[int | None] = Field(default=0)
|
||||||
max_combo: int | None = Field(default=0)
|
# TODO: owners
|
||||||
# TODO: failtimes, owners
|
|
||||||
|
|
||||||
# BeatmapExtended
|
# BeatmapExtended
|
||||||
ar: float = Field(default=0.0)
|
ar: OnDemand[float] = Field(default=0.0)
|
||||||
cs: float = Field(default=0.0)
|
cs: OnDemand[float] = Field(default=0.0)
|
||||||
drain: float = Field(default=0.0) # hp
|
drain: OnDemand[float] = Field(default=0.0) # hp
|
||||||
accuracy: float = Field(default=0.0) # od
|
accuracy: OnDemand[float] = Field(default=0.0) # od
|
||||||
bpm: float = Field(default=0.0)
|
bpm: OnDemand[float] = Field(default=0.0)
|
||||||
count_circles: int = Field(default=0)
|
count_circles: OnDemand[int] = Field(default=0)
|
||||||
count_sliders: int = Field(default=0)
|
count_sliders: OnDemand[int] = Field(default=0)
|
||||||
count_spinners: int = Field(default=0)
|
count_spinners: OnDemand[int] = Field(default=0)
|
||||||
deleted_at: datetime | None = Field(default=None, sa_column=Column(DateTime))
|
deleted_at: OnDemand[datetime | None] = Field(default=None, sa_column=Column(DateTime))
|
||||||
hit_length: int = Field(default=0)
|
hit_length: OnDemand[int] = Field(default=0)
|
||||||
last_updated: datetime = Field(sa_column=Column(DateTime, index=True))
|
last_updated: OnDemand[datetime] = Field(sa_column=Column(DateTime, index=True))
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def status(_session: AsyncSession, beatmap: "Beatmap") -> str:
|
||||||
|
if settings.enable_all_beatmap_leaderboard and not beatmap.beatmap_status.has_leaderboard():
|
||||||
|
return BeatmapRankStatus.APPROVED.name.lower()
|
||||||
|
return beatmap.beatmap_status.name.lower()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def beatmapset(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmap: "Beatmap",
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
) -> BeatmapsetDict | None:
|
||||||
|
if beatmap.beatmapset is not None:
|
||||||
|
return await BeatmapsetModel.transform(
|
||||||
|
beatmap.beatmapset, includes=(includes or []) + Beatmapset.BEATMAPSET_TRANSFORMER_INCLUDES
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def current_user_playcount(_session: AsyncSession, beatmap: "Beatmap", user: "User") -> int:
|
||||||
|
playcount = (
|
||||||
|
await _session.exec(
|
||||||
|
select(BeatmapPlaycounts.playcount).where(
|
||||||
|
BeatmapPlaycounts.beatmap_id == beatmap.id, BeatmapPlaycounts.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
return int(playcount or 0)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def current_user_tag_ids(_session: AsyncSession, beatmap: "Beatmap", user: "User | None" = None) -> list[int]:
|
||||||
|
if user is None:
|
||||||
|
return []
|
||||||
|
tag_ids = (
|
||||||
|
await _session.exec(
|
||||||
|
select(BeatmapTagVote.tag_id).where(
|
||||||
|
BeatmapTagVote.beatmap_id == beatmap.id,
|
||||||
|
BeatmapTagVote.user_id == user.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
return list(tag_ids)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def failtimes(_session: AsyncSession, beatmap: "Beatmap") -> FailTimeResp:
|
||||||
|
if beatmap.failtimes is not None:
|
||||||
|
return FailTimeResp.from_db(beatmap.failtimes)
|
||||||
|
return FailTimeResp()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def top_tag_ids(_session: AsyncSession, beatmap: "Beatmap") -> list[dict[str, int]]:
|
||||||
|
all_votes = (
|
||||||
|
await _session.exec(
|
||||||
|
select(BeatmapTagVote.tag_id, func.count().label("vote_count"))
|
||||||
|
.where(BeatmapTagVote.beatmap_id == beatmap.id)
|
||||||
|
.group_by(col(BeatmapTagVote.tag_id))
|
||||||
|
.having(func.count() > settings.beatmap_tag_top_count)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
top_tag_ids: list[dict[str, int]] = []
|
||||||
|
for id, votes in all_votes:
|
||||||
|
top_tag_ids.append({"tag_id": id, "count": votes})
|
||||||
|
top_tag_ids.sort(key=lambda x: x["count"], reverse=True)
|
||||||
|
return top_tag_ids
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def user(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmap: "Beatmap",
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
) -> UserDict | None:
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
user = await _session.get(User, beatmap.user_id)
|
||||||
|
if user is None:
|
||||||
|
return None
|
||||||
|
return await UserModel.transform(user, includes=includes)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def convert(_session: AsyncSession, _beatmap: "Beatmap") -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def is_scoreable(_session: AsyncSession, beatmap: "Beatmap") -> bool:
|
||||||
|
beatmap_status = beatmap.beatmap_status
|
||||||
|
if settings.enable_all_beatmap_leaderboard:
|
||||||
|
return True
|
||||||
|
return beatmap_status.has_leaderboard()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def mode_int(_session: AsyncSession, beatmap: "Beatmap") -> int:
|
||||||
|
return int(beatmap.mode)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def ranked(_session: AsyncSession, beatmap: "Beatmap") -> int:
|
||||||
|
beatmap_status = beatmap.beatmap_status
|
||||||
|
if settings.enable_all_beatmap_leaderboard and not beatmap_status.has_leaderboard():
|
||||||
|
return BeatmapRankStatus.APPROVED.value
|
||||||
|
return beatmap_status.value
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def playcount(_session: AsyncSession, beatmap: "Beatmap") -> int:
|
||||||
|
result = (
|
||||||
|
await _session.exec(
|
||||||
|
select(func.sum(BeatmapPlaycounts.playcount)).where(BeatmapPlaycounts.beatmap_id == beatmap.id)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
return int(result or 0)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def passcount(_session: AsyncSession, beatmap: "Beatmap") -> int:
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
return (
|
||||||
|
await _session.exec(
|
||||||
|
select(func.count())
|
||||||
|
.select_from(Score)
|
||||||
|
.where(
|
||||||
|
Score.beatmap_id == beatmap.id,
|
||||||
|
col(Score.passed).is_(True),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
|
||||||
|
|
||||||
class Beatmap(BeatmapBase, table=True):
|
class Beatmap(AsyncAttrs, BeatmapModel, table=True):
|
||||||
__tablename__ = "beatmaps" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "beatmaps"
|
||||||
id: int = Field(primary_key=True, index=True)
|
|
||||||
beatmapset_id: int = Field(foreign_key="beatmapsets.id", index=True)
|
|
||||||
beatmap_status: BeatmapRankStatus = Field(index=True)
|
beatmap_status: BeatmapRankStatus = Field(index=True)
|
||||||
# optional
|
# optional
|
||||||
beatmapset: Beatmapset = Relationship(
|
beatmapset: "Beatmapset" = Relationship(back_populates="beatmaps", sa_relationship_kwargs={"lazy": "joined"})
|
||||||
back_populates="beatmaps", sa_relationship_kwargs={"lazy": "joined"}
|
failtimes: FailTime | None = Relationship(back_populates="beatmap", sa_relationship_kwargs={"lazy": "joined"})
|
||||||
)
|
|
||||||
failtimes: FailTime | None = Relationship(
|
|
||||||
back_populates="beatmap", sa_relationship_kwargs={"lazy": "joined"}
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_resp(cls, session: AsyncSession, resp: "BeatmapResp") -> "Beatmap":
|
async def from_resp_no_save(cls, _session: AsyncSession, resp: BeatmapDict) -> "Beatmap":
|
||||||
d = resp.model_dump()
|
d = {k: v for k, v in resp.items() if k != "beatmapset"}
|
||||||
del d["beatmapset"]
|
beatmapset_id = resp.get("beatmapset_id")
|
||||||
beatmap = Beatmap.model_validate(
|
bid = resp.get("id")
|
||||||
|
ranked = resp.get("ranked")
|
||||||
|
if beatmapset_id is None or bid is None or ranked is None:
|
||||||
|
raise ValueError("beatmapset_id, id and ranked are required")
|
||||||
|
beatmap = cls.model_validate(
|
||||||
{
|
{
|
||||||
**d,
|
**d,
|
||||||
"beatmapset_id": resp.beatmapset_id,
|
"beatmapset_id": beatmapset_id,
|
||||||
"id": resp.id,
|
"id": bid,
|
||||||
"beatmap_status": BeatmapRankStatus(resp.ranked),
|
"beatmap_status": BeatmapRankStatus(ranked),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if not (
|
|
||||||
await session.exec(select(exists()).where(Beatmap.id == resp.id))
|
|
||||||
).first():
|
|
||||||
session.add(beatmap)
|
|
||||||
await session.commit()
|
|
||||||
beatmap = (
|
|
||||||
await session.exec(select(Beatmap).where(Beatmap.id == resp.id))
|
|
||||||
).first()
|
|
||||||
assert beatmap is not None, "Beatmap should not be None after commit"
|
|
||||||
return beatmap
|
return beatmap
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_resp_batch(
|
async def from_resp(cls, session: AsyncSession, resp: BeatmapDict) -> "Beatmap":
|
||||||
cls, session: AsyncSession, inp: list["BeatmapResp"], from_: int = 0
|
beatmap = await cls.from_resp_no_save(session, resp)
|
||||||
) -> list["Beatmap"]:
|
resp_id = resp.get("id")
|
||||||
|
if resp_id is None:
|
||||||
|
raise ValueError("id is required")
|
||||||
|
if not (await session.exec(select(exists()).where(Beatmap.id == resp_id))).first():
|
||||||
|
session.add(beatmap)
|
||||||
|
await session.commit()
|
||||||
|
return (await session.exec(select(Beatmap).where(Beatmap.id == resp_id))).one()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_resp_batch(cls, session: AsyncSession, inp: list[BeatmapDict], from_: int = 0) -> list["Beatmap"]:
|
||||||
beatmaps = []
|
beatmaps = []
|
||||||
for resp in inp:
|
for resp_dict in inp:
|
||||||
if resp.id == from_:
|
bid = resp_dict.get("id")
|
||||||
|
if bid == from_ or bid is None:
|
||||||
continue
|
continue
|
||||||
d = resp.model_dump()
|
|
||||||
del d["beatmapset"]
|
beatmapset_id = resp_dict.get("beatmapset_id")
|
||||||
|
ranked = resp_dict.get("ranked")
|
||||||
|
if beatmapset_id is None or ranked is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 创建 beatmap 字典,移除 beatmapset
|
||||||
|
d = {k: v for k, v in resp_dict.items() if k != "beatmapset"}
|
||||||
|
|
||||||
beatmap = Beatmap.model_validate(
|
beatmap = Beatmap.model_validate(
|
||||||
{
|
{
|
||||||
**d,
|
**d,
|
||||||
"beatmapset_id": resp.beatmapset_id,
|
"beatmapset_id": beatmapset_id,
|
||||||
"id": resp.id,
|
"id": bid,
|
||||||
"beatmap_status": BeatmapRankStatus(resp.ranked),
|
"beatmap_status": BeatmapRankStatus(ranked),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if not (
|
if not (await session.exec(select(exists()).where(Beatmap.id == bid))).first():
|
||||||
await session.exec(select(exists()).where(Beatmap.id == resp.id))
|
|
||||||
).first():
|
|
||||||
session.add(beatmap)
|
session.add(beatmap)
|
||||||
beatmaps.append(beatmap)
|
beatmaps.append(beatmap)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
for beatmap in beatmaps:
|
||||||
|
await session.refresh(beatmap)
|
||||||
return beatmaps
|
return beatmaps
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -129,96 +344,35 @@ class Beatmap(BeatmapBase, table=True):
|
|||||||
bid: int | None = None,
|
bid: int | None = None,
|
||||||
md5: str | None = None,
|
md5: str | None = None,
|
||||||
) -> "Beatmap":
|
) -> "Beatmap":
|
||||||
beatmap = (
|
stmt = select(Beatmap)
|
||||||
await session.exec(
|
if bid is not None:
|
||||||
select(Beatmap).where(
|
stmt = stmt.where(Beatmap.id == bid)
|
||||||
Beatmap.id == bid if bid is not None else Beatmap.checksum == md5
|
elif md5 is not None:
|
||||||
)
|
stmt = stmt.where(Beatmap.checksum == md5)
|
||||||
)
|
else:
|
||||||
).first()
|
raise ValueError("Either bid or md5 must be provided")
|
||||||
|
beatmap = (await session.exec(stmt)).first()
|
||||||
if not beatmap:
|
if not beatmap:
|
||||||
resp = await fetcher.get_beatmap(bid, md5)
|
resp = await fetcher.get_beatmap(bid, md5)
|
||||||
r = await session.exec(
|
beatmapset_id = resp.get("beatmapset_id")
|
||||||
select(Beatmapset.id).where(Beatmapset.id == resp.beatmapset_id)
|
if beatmapset_id is None:
|
||||||
)
|
raise ValueError("beatmapset_id is required")
|
||||||
|
r = await session.exec(select(Beatmapset.id).where(Beatmapset.id == beatmapset_id))
|
||||||
if not r.first():
|
if not r.first():
|
||||||
set_resp = await fetcher.get_beatmapset(resp.beatmapset_id)
|
set_resp = await fetcher.get_beatmapset(beatmapset_id)
|
||||||
await Beatmapset.from_resp(session, set_resp, from_=resp.id)
|
resp_id = resp.get("id")
|
||||||
|
await Beatmapset.from_resp(session, set_resp, from_=resp_id or 0)
|
||||||
return await Beatmap.from_resp(session, resp)
|
return await Beatmap.from_resp(session, resp)
|
||||||
return beatmap
|
return beatmap
|
||||||
|
|
||||||
|
|
||||||
class BeatmapResp(BeatmapBase):
|
class APIBeatmapTag(BaseModel):
|
||||||
id: int
|
tag_id: int
|
||||||
beatmapset_id: int
|
count: int
|
||||||
beatmapset: BeatmapsetResp | None = None
|
|
||||||
convert: bool = False
|
|
||||||
is_scoreable: bool
|
|
||||||
status: str
|
|
||||||
mode_int: int
|
|
||||||
ranked: int
|
|
||||||
url: str = ""
|
|
||||||
playcount: int = 0
|
|
||||||
passcount: int = 0
|
|
||||||
failtimes: FailTimeResp | None = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
beatmap: Beatmap,
|
|
||||||
query_mode: GameMode | None = None,
|
|
||||||
from_set: bool = False,
|
|
||||||
session: AsyncSession | None = None,
|
|
||||||
user: "User | None" = None,
|
|
||||||
) -> "BeatmapResp":
|
|
||||||
from .score import Score
|
|
||||||
|
|
||||||
beatmap_ = beatmap.model_dump()
|
|
||||||
beatmap_status = beatmap.beatmap_status
|
|
||||||
if query_mode is not None and beatmap.mode != query_mode:
|
|
||||||
beatmap_["convert"] = True
|
|
||||||
beatmap_["is_scoreable"] = beatmap_status.has_leaderboard()
|
|
||||||
if (
|
|
||||||
settings.enable_all_beatmap_leaderboard
|
|
||||||
and not beatmap_status.has_leaderboard()
|
|
||||||
):
|
|
||||||
beatmap_["ranked"] = BeatmapRankStatus.APPROVED.value
|
|
||||||
beatmap_["status"] = BeatmapRankStatus.APPROVED.name.lower()
|
|
||||||
else:
|
|
||||||
beatmap_["status"] = beatmap_status.name.lower()
|
|
||||||
beatmap_["ranked"] = beatmap_status.value
|
|
||||||
beatmap_["mode_int"] = int(beatmap.mode)
|
|
||||||
if not from_set:
|
|
||||||
beatmap_["beatmapset"] = await BeatmapsetResp.from_db(
|
|
||||||
beatmap.beatmapset, session=session, user=user
|
|
||||||
)
|
|
||||||
if beatmap.failtimes is not None:
|
|
||||||
beatmap_["failtimes"] = FailTimeResp.from_db(beatmap.failtimes)
|
|
||||||
else:
|
|
||||||
beatmap_["failtimes"] = FailTimeResp()
|
|
||||||
if session:
|
|
||||||
beatmap_["playcount"] = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(BeatmapPlaycounts)
|
|
||||||
.where(BeatmapPlaycounts.beatmap_id == beatmap.id)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
beatmap_["passcount"] = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(Score)
|
|
||||||
.where(
|
|
||||||
Score.beatmap_id == beatmap.id,
|
|
||||||
col(Score.passed).is_(True),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
return cls.model_validate(beatmap_)
|
|
||||||
|
|
||||||
|
|
||||||
class BannedBeatmaps(SQLModel, table=True):
|
class BannedBeatmaps(SQLModel, table=True):
|
||||||
__tablename__ = "banned_beatmaps" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "banned_beatmaps"
|
||||||
id: int | None = Field(primary_key=True, index=True, default=None)
|
id: int | None = Field(primary_key=True, index=True, default=None)
|
||||||
beatmap_id: int = Field(index=True)
|
beatmap_id: int = Field(index=True)
|
||||||
|
|
||||||
@@ -229,16 +383,31 @@ async def calculate_beatmap_attributes(
|
|||||||
mods_: list[APIMod],
|
mods_: list[APIMod],
|
||||||
redis: Redis,
|
redis: Redis,
|
||||||
fetcher: "Fetcher",
|
fetcher: "Fetcher",
|
||||||
):
|
) -> DifficultyAttributesUnion:
|
||||||
key = (
|
key = f"beatmap:{beatmap_id}:{ruleset}:{hashlib.sha256(str(mods_).encode()).hexdigest()}:attributes"
|
||||||
f"beatmap:{beatmap_id}:{ruleset}:"
|
|
||||||
f"{hashlib.md5(str(mods_).encode()).hexdigest()}:attributes"
|
|
||||||
)
|
|
||||||
if await redis.exists(key):
|
if await redis.exists(key):
|
||||||
return BeatmapAttributes.model_validate_json(await redis.get(key)) # pyright: ignore[reportArgumentType]
|
return TypeAdapter(DifficultyAttributesUnion).validate_json(await redis.get(key))
|
||||||
resp = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
resp = await fetcher.get_or_fetch_beatmap_raw(redis, beatmap_id)
|
||||||
attr = await asyncio.get_event_loop().run_in_executor(
|
|
||||||
None, calculate_beatmap_attribute, resp, ruleset, mods_
|
attr = await get_calculator().calculate_difficulty(resp, mods_, ruleset)
|
||||||
)
|
|
||||||
await redis.set(key, attr.model_dump_json())
|
await redis.set(key, attr.model_dump_json())
|
||||||
return attr
|
return attr
|
||||||
|
|
||||||
|
|
||||||
|
async def clear_cached_beatmap_raws(redis: Redis, beatmaps: list[int] = []):
|
||||||
|
"""清理缓存的 beatmap 原始数据,使用非阻塞方式"""
|
||||||
|
if beatmaps:
|
||||||
|
# 分批删除,避免一次删除太多 key 导致阻塞
|
||||||
|
batch_size = 50
|
||||||
|
for i in range(0, len(beatmaps), batch_size):
|
||||||
|
batch = beatmaps[i : i + batch_size]
|
||||||
|
keys = [f"beatmap:{bid}:raw" for bid in batch]
|
||||||
|
# 使用 unlink 而不是 delete(非阻塞,更快)
|
||||||
|
try:
|
||||||
|
await redis.unlink(*keys)
|
||||||
|
except Exception:
|
||||||
|
# 如果 unlink 不支持,回退到 delete
|
||||||
|
await redis.delete(*keys)
|
||||||
|
return
|
||||||
|
|
||||||
|
await redis.delete("beatmap:*:raw")
|
||||||
|
|||||||
@@ -1,67 +1,78 @@
|
|||||||
from datetime import UTC, datetime
|
from typing import TYPE_CHECKING, NotRequired, TypedDict
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from app.database.events import Event, EventType
|
from app.config import settings
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, included
|
||||||
|
from .events import Event, EventType
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
BigInteger,
|
BigInteger,
|
||||||
Column,
|
Column,
|
||||||
Field,
|
Field,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
|
Index,
|
||||||
Relationship,
|
Relationship,
|
||||||
SQLModel,
|
|
||||||
select,
|
select,
|
||||||
)
|
)
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .beatmap import Beatmap, BeatmapResp
|
from .beatmap import Beatmap, BeatmapDict
|
||||||
from .beatmapset import BeatmapsetResp
|
from .beatmapset import BeatmapsetDict
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class BeatmapPlaycounts(AsyncAttrs, SQLModel, table=True):
|
class BeatmapPlaycountsDict(TypedDict):
|
||||||
__tablename__ = "beatmap_playcounts" # pyright: ignore[reportAssignmentType]
|
user_id: int
|
||||||
|
beatmap_id: int
|
||||||
|
count: NotRequired[int]
|
||||||
|
beatmap: NotRequired["BeatmapDict"]
|
||||||
|
beatmapset: NotRequired["BeatmapsetDict"]
|
||||||
|
|
||||||
id: int | None = Field(
|
|
||||||
default=None,
|
class BeatmapPlaycountsModel(AsyncAttrs, DatabaseModel[BeatmapPlaycountsDict]):
|
||||||
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
id: int = Field(default=None, sa_column=Column(BigInteger, primary_key=True, autoincrement=True), exclude=True)
|
||||||
)
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
user_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
||||||
playcount: int = Field(default=0)
|
playcount: int = Field(default=0, exclude=True)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def count(_session: AsyncSession, obj: "BeatmapPlaycounts") -> int:
|
||||||
|
return obj.playcount
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def beatmap(
|
||||||
|
_session: AsyncSession, obj: "BeatmapPlaycounts", includes: list[str] | None = None
|
||||||
|
) -> "BeatmapDict":
|
||||||
|
from .beatmap import BeatmapModel
|
||||||
|
|
||||||
|
await obj.awaitable_attrs.beatmap
|
||||||
|
return await BeatmapModel.transform(obj.beatmap, includes=includes)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def beatmapset(
|
||||||
|
_session: AsyncSession, obj: "BeatmapPlaycounts", includes: list[str] | None = None
|
||||||
|
) -> "BeatmapsetDict":
|
||||||
|
from .beatmap import BeatmapsetModel
|
||||||
|
|
||||||
|
await obj.awaitable_attrs.beatmap
|
||||||
|
return await BeatmapsetModel.transform(obj.beatmap.beatmapset, includes=includes)
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapPlaycounts(BeatmapPlaycountsModel, table=True):
|
||||||
|
__tablename__: str = "beatmap_playcounts"
|
||||||
|
__table_args__ = (Index("idx_beatmap_playcounts_playcount_id", "playcount", "id"),)
|
||||||
|
|
||||||
user: "User" = Relationship()
|
user: "User" = Relationship()
|
||||||
beatmap: "Beatmap" = Relationship()
|
beatmap: "Beatmap" = Relationship()
|
||||||
|
|
||||||
|
|
||||||
class BeatmapPlaycountsResp(BaseModel):
|
async def process_beatmap_playcount(session: AsyncSession, user_id: int, beatmap_id: int) -> None:
|
||||||
beatmap_id: int
|
|
||||||
beatmap: "BeatmapResp | None" = None
|
|
||||||
beatmapset: "BeatmapsetResp | None" = None
|
|
||||||
count: int
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(cls, db_model: BeatmapPlaycounts) -> "BeatmapPlaycountsResp":
|
|
||||||
from .beatmap import BeatmapResp
|
|
||||||
from .beatmapset import BeatmapsetResp
|
|
||||||
|
|
||||||
await db_model.awaitable_attrs.beatmap
|
|
||||||
return cls(
|
|
||||||
beatmap_id=db_model.beatmap_id,
|
|
||||||
count=db_model.playcount,
|
|
||||||
beatmap=await BeatmapResp.from_db(db_model.beatmap),
|
|
||||||
beatmapset=await BeatmapsetResp.from_db(db_model.beatmap.beatmapset),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def process_beatmap_playcount(
|
|
||||||
session: AsyncSession, user_id: int, beatmap_id: int
|
|
||||||
) -> None:
|
|
||||||
existing_playcount = (
|
existing_playcount = (
|
||||||
await session.exec(
|
await session.exec(
|
||||||
select(BeatmapPlaycounts).where(
|
select(BeatmapPlaycounts).where(
|
||||||
@@ -75,7 +86,7 @@ async def process_beatmap_playcount(
|
|||||||
existing_playcount.playcount += 1
|
existing_playcount.playcount += 1
|
||||||
if existing_playcount.playcount % 100 == 0:
|
if existing_playcount.playcount % 100 == 0:
|
||||||
playcount_event = Event(
|
playcount_event = Event(
|
||||||
created_at=datetime.now(UTC),
|
created_at=utcnow(),
|
||||||
type=EventType.BEATMAP_PLAYCOUNT,
|
type=EventType.BEATMAP_PLAYCOUNT,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
)
|
)
|
||||||
@@ -84,12 +95,10 @@ async def process_beatmap_playcount(
|
|||||||
"count": existing_playcount.playcount,
|
"count": existing_playcount.playcount,
|
||||||
"beatmap": {
|
"beatmap": {
|
||||||
"title": existing_playcount.beatmap.version,
|
"title": existing_playcount.beatmap.version,
|
||||||
"url": existing_playcount.beatmap.url,
|
"url": existing_playcount.beatmap.url.replace("https://osu.ppy.sh/", settings.web_url),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
session.add(playcount_event)
|
session.add(playcount_event)
|
||||||
else:
|
else:
|
||||||
new_playcount = BeatmapPlaycounts(
|
new_playcount = BeatmapPlaycounts(user_id=user_id, beatmap_id=beatmap_id, playcount=1)
|
||||||
user_id=user_id, beatmap_id=beatmap_id, playcount=1
|
|
||||||
)
|
|
||||||
session.add(new_playcount)
|
session.add(new_playcount)
|
||||||
|
|||||||
23
app/database/beatmap_sync.py
Normal file
23
app/database/beatmap_sync.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
from app.models.beatmap import BeatmapRankStatus
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from sqlmodel import JSON, Column, DateTime, Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class SavedBeatmapMeta(TypedDict):
|
||||||
|
beatmap_id: int
|
||||||
|
md5: str
|
||||||
|
is_deleted: bool
|
||||||
|
beatmap_status: BeatmapRankStatus
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapSync(SQLModel, table=True):
|
||||||
|
beatmapset_id: int = Field(primary_key=True, foreign_key="beatmapsets.id")
|
||||||
|
beatmaps: list[SavedBeatmapMeta] = Field(sa_column=Column(JSON))
|
||||||
|
beatmap_status: BeatmapRankStatus = Field(index=True)
|
||||||
|
consecutive_no_change: int = Field(default=0)
|
||||||
|
next_sync_time: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime, index=True))
|
||||||
|
updated_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime, index=True))
|
||||||
8
app/database/beatmap_tags.py
Normal file
8
app/database/beatmap_tags.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapTagVote(SQLModel, table=True):
|
||||||
|
__tablename__: str = "beatmap_tags"
|
||||||
|
tag_id: int = Field(primary_key=True, index=True, default=None)
|
||||||
|
beatmap_id: int = Field(primary_key=True, index=True, default=None)
|
||||||
|
user_id: int = Field(primary_key=True, index=True, default=None)
|
||||||
@@ -1,13 +1,15 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING, NotRequired, Self, TypedDict
|
from typing import TYPE_CHECKING, Any, ClassVar, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.models.beatmap import BeatmapRankStatus, Genre, Language
|
from app.models.beatmap import BeatmapRankStatus, Genre, Language
|
||||||
from app.models.score import GameMode
|
from app.models.score import GameMode
|
||||||
|
|
||||||
from .lazer_user import BASE_INCLUDES, User, UserResp
|
from ._base import DatabaseModel, OnDemand, included, ondemand
|
||||||
|
from .beatmap_playcounts import BeatmapPlaycounts
|
||||||
|
from .user import User, UserDict
|
||||||
|
|
||||||
from pydantic import BaseModel, field_validator, model_validator
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import JSON, Boolean, Column, DateTime, Text
|
from sqlalchemy import JSON, Boolean, Column, DateTime, Text
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import Field, Relationship, SQLModel, col, exists, func, select
|
from sqlmodel import Field, Relationship, SQLModel, col, exists, func, select
|
||||||
@@ -16,7 +18,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from app.fetcher import Fetcher
|
from app.fetcher import Fetcher
|
||||||
|
|
||||||
from .beatmap import Beatmap, BeatmapResp
|
from .beatmap import Beatmap, BeatmapDict
|
||||||
from .favourite_beatmapset import FavouriteBeatmapset
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
|
||||||
|
|
||||||
@@ -67,60 +69,387 @@ class BeatmapTranslationText(BaseModel):
|
|||||||
id: int | None = None
|
id: int | None = None
|
||||||
|
|
||||||
|
|
||||||
class BeatmapsetBase(SQLModel):
|
class BeatmapsetDict(TypedDict):
|
||||||
|
id: int
|
||||||
|
artist: str
|
||||||
|
artist_unicode: str
|
||||||
|
covers: BeatmapCovers | None
|
||||||
|
creator: str
|
||||||
|
nsfw: bool
|
||||||
|
preview_url: str
|
||||||
|
source: str
|
||||||
|
spotlight: bool
|
||||||
|
title: str
|
||||||
|
title_unicode: str
|
||||||
|
track_id: int | None
|
||||||
|
user_id: int
|
||||||
|
video: bool
|
||||||
|
current_nominations: list[BeatmapNomination] | None
|
||||||
|
description: BeatmapDescription | None
|
||||||
|
pack_tags: list[str]
|
||||||
|
|
||||||
|
bpm: NotRequired[float]
|
||||||
|
can_be_hyped: NotRequired[bool]
|
||||||
|
discussion_locked: NotRequired[bool]
|
||||||
|
last_updated: NotRequired[datetime]
|
||||||
|
ranked_date: NotRequired[datetime | None]
|
||||||
|
storyboard: NotRequired[bool]
|
||||||
|
submitted_date: NotRequired[datetime]
|
||||||
|
tags: NotRequired[str]
|
||||||
|
discussion_enabled: NotRequired[bool]
|
||||||
|
legacy_thread_url: NotRequired[str | None]
|
||||||
|
status: NotRequired[str]
|
||||||
|
ranked: NotRequired[int]
|
||||||
|
is_scoreable: NotRequired[bool]
|
||||||
|
favourite_count: NotRequired[int]
|
||||||
|
genre_id: NotRequired[int]
|
||||||
|
hype: NotRequired[BeatmapHype]
|
||||||
|
language_id: NotRequired[int]
|
||||||
|
play_count: NotRequired[int]
|
||||||
|
availability: NotRequired[BeatmapAvailability]
|
||||||
|
beatmaps: NotRequired[list["BeatmapDict"]]
|
||||||
|
has_favourited: NotRequired[bool]
|
||||||
|
recent_favourites: NotRequired[list[UserDict]]
|
||||||
|
genre: NotRequired[BeatmapTranslationText]
|
||||||
|
language: NotRequired[BeatmapTranslationText]
|
||||||
|
nominations: NotRequired["BeatmapNominations"]
|
||||||
|
ratings: NotRequired[list[int]]
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapsetModel(DatabaseModel[BeatmapsetDict]):
|
||||||
|
BEATMAPSET_TRANSFORMER_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"availability",
|
||||||
|
"has_favourited",
|
||||||
|
"bpm",
|
||||||
|
"deleted_atcan_be_hyped",
|
||||||
|
"discussion_locked",
|
||||||
|
"is_scoreable",
|
||||||
|
"last_updated",
|
||||||
|
"legacy_thread_url",
|
||||||
|
"ranked",
|
||||||
|
"ranked_date",
|
||||||
|
"submitted_date",
|
||||||
|
"tags",
|
||||||
|
"rating",
|
||||||
|
"storyboard",
|
||||||
|
]
|
||||||
|
API_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
*BEATMAPSET_TRANSFORMER_INCLUDES,
|
||||||
|
"beatmaps.current_user_playcount",
|
||||||
|
"beatmaps.current_user_tag_ids",
|
||||||
|
"beatmaps.max_combo",
|
||||||
|
"current_nominations",
|
||||||
|
"current_user_attributes",
|
||||||
|
"description",
|
||||||
|
"genre",
|
||||||
|
"language",
|
||||||
|
"pack_tags",
|
||||||
|
"ratings",
|
||||||
|
"recent_favourites",
|
||||||
|
"related_tags",
|
||||||
|
"related_users",
|
||||||
|
"user",
|
||||||
|
"version_count",
|
||||||
|
*[
|
||||||
|
f"beatmaps.{inc}"
|
||||||
|
for inc in {
|
||||||
|
"failtimes",
|
||||||
|
"owners",
|
||||||
|
"top_tag_ids",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
# Beatmapset
|
# Beatmapset
|
||||||
|
id: int = Field(default=None, primary_key=True, index=True)
|
||||||
artist: str = Field(index=True)
|
artist: str = Field(index=True)
|
||||||
artist_unicode: str = Field(index=True)
|
artist_unicode: str = Field(index=True)
|
||||||
covers: BeatmapCovers | None = Field(sa_column=Column(JSON))
|
covers: BeatmapCovers | None = Field(sa_column=Column(JSON))
|
||||||
creator: str = Field(index=True)
|
creator: str = Field(index=True)
|
||||||
nsfw: bool = Field(default=False, sa_column=Column(Boolean))
|
nsfw: bool = Field(default=False, sa_column=Column(Boolean))
|
||||||
play_count: int = Field(index=True)
|
|
||||||
preview_url: str
|
preview_url: str
|
||||||
source: str = Field(default="")
|
source: str = Field(default="")
|
||||||
|
|
||||||
spotlight: bool = Field(default=False, sa_column=Column(Boolean))
|
spotlight: bool = Field(default=False, sa_column=Column(Boolean))
|
||||||
title: str = Field(index=True)
|
title: str = Field(index=True)
|
||||||
title_unicode: str = Field(index=True)
|
title_unicode: str = Field(index=True)
|
||||||
|
track_id: int | None = Field(default=None, index=True) # feature artist?
|
||||||
user_id: int = Field(index=True)
|
user_id: int = Field(index=True)
|
||||||
video: bool = Field(sa_column=Column(Boolean, index=True))
|
video: bool = Field(sa_column=Column(Boolean, index=True))
|
||||||
|
|
||||||
# optional
|
# optional
|
||||||
# converts: list[Beatmap] = Relationship(back_populates="beatmapset")
|
# converts: list[Beatmap] = Relationship(back_populates="beatmapset")
|
||||||
current_nominations: list[BeatmapNomination] | None = Field(
|
current_nominations: OnDemand[list[BeatmapNomination] | None] = Field(None, sa_column=Column(JSON))
|
||||||
None, sa_column=Column(JSON)
|
description: OnDemand[BeatmapDescription | None] = Field(default=None, sa_column=Column(JSON))
|
||||||
)
|
|
||||||
description: BeatmapDescription | None = Field(default=None, sa_column=Column(JSON))
|
|
||||||
# TODO: discussions: list[BeatmapsetDiscussion] = None
|
# TODO: discussions: list[BeatmapsetDiscussion] = None
|
||||||
# TODO: current_user_attributes: Optional[CurrentUserAttributes] = None
|
# TODO: current_user_attributes: Optional[CurrentUserAttributes] = None
|
||||||
# TODO: events: Optional[list[BeatmapsetEvent]] = None
|
# TODO: events: Optional[list[BeatmapsetEvent]] = None
|
||||||
|
|
||||||
pack_tags: list[str] = Field(default=[], sa_column=Column(JSON))
|
pack_tags: OnDemand[list[str]] = Field(default=[], sa_column=Column(JSON))
|
||||||
ratings: list[int] | None = Field(default=None, sa_column=Column(JSON))
|
|
||||||
# TODO: related_users: Optional[list[User]] = None
|
# TODO: related_users: Optional[list[User]] = None
|
||||||
# TODO: user: Optional[User] = Field(default=None)
|
# TODO: user: Optional[User] = Field(default=None)
|
||||||
track_id: int | None = Field(default=None, index=True) # feature artist?
|
|
||||||
|
|
||||||
# BeatmapsetExtended
|
# BeatmapsetExtended
|
||||||
bpm: float = Field(default=0.0)
|
bpm: OnDemand[float] = Field(default=0.0)
|
||||||
can_be_hyped: bool = Field(default=False, sa_column=Column(Boolean))
|
can_be_hyped: OnDemand[bool] = Field(default=False, sa_column=Column(Boolean))
|
||||||
discussion_locked: bool = Field(default=False, sa_column=Column(Boolean))
|
discussion_locked: OnDemand[bool] = Field(default=False, sa_column=Column(Boolean))
|
||||||
last_updated: datetime = Field(sa_column=Column(DateTime, index=True))
|
last_updated: OnDemand[datetime] = Field(sa_column=Column(DateTime, index=True))
|
||||||
ranked_date: datetime | None = Field(
|
ranked_date: OnDemand[datetime | None] = Field(default=None, sa_column=Column(DateTime, index=True))
|
||||||
default=None, sa_column=Column(DateTime, index=True)
|
storyboard: OnDemand[bool] = Field(default=False, sa_column=Column(Boolean, index=True))
|
||||||
)
|
submitted_date: OnDemand[datetime] = Field(sa_column=Column(DateTime, index=True))
|
||||||
storyboard: bool = Field(default=False, sa_column=Column(Boolean, index=True))
|
tags: OnDemand[str] = Field(default="", sa_column=Column(Text))
|
||||||
submitted_date: datetime = Field(sa_column=Column(DateTime, index=True))
|
|
||||||
tags: str = Field(default="", sa_column=Column(Text))
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def legacy_thread_url(
|
||||||
|
_session: AsyncSession,
|
||||||
|
_beatmapset: "Beatmapset",
|
||||||
|
) -> str | None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def discussion_enabled(
|
||||||
|
_session: AsyncSession,
|
||||||
|
_beatmapset: "Beatmapset",
|
||||||
|
) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def status(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> str:
|
||||||
|
beatmap_status = beatmapset.beatmap_status
|
||||||
|
if settings.enable_all_beatmap_leaderboard and not beatmap_status.has_leaderboard():
|
||||||
|
return BeatmapRankStatus.APPROVED.name.lower()
|
||||||
|
return beatmap_status.name.lower()
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def ranked(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> int:
|
||||||
|
beatmap_status = beatmapset.beatmap_status
|
||||||
|
if settings.enable_all_beatmap_leaderboard and not beatmap_status.has_leaderboard():
|
||||||
|
return BeatmapRankStatus.APPROVED.value
|
||||||
|
return beatmap_status.value
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def is_scoreable(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> bool:
|
||||||
|
beatmap_status = beatmapset.beatmap_status
|
||||||
|
if settings.enable_all_beatmap_leaderboard:
|
||||||
|
return True
|
||||||
|
return beatmap_status.has_leaderboard()
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def favourite_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> int:
|
||||||
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
|
||||||
|
count = await session.exec(
|
||||||
|
select(func.count())
|
||||||
|
.select_from(FavouriteBeatmapset)
|
||||||
|
.where(FavouriteBeatmapset.beatmapset_id == beatmapset.id)
|
||||||
|
)
|
||||||
|
return count.one()
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def genre_id(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> int:
|
||||||
|
return beatmapset.beatmap_genre.value
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def hype(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> BeatmapHype:
|
||||||
|
return BeatmapHype(current=beatmapset.hype_current, required=beatmapset.hype_required)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def language_id(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> int:
|
||||||
|
return beatmapset.beatmap_language.value
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def play_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> int:
|
||||||
|
from .beatmap import Beatmap
|
||||||
|
|
||||||
|
playcount = await session.exec(
|
||||||
|
select(func.sum(BeatmapPlaycounts.playcount)).where(
|
||||||
|
col(BeatmapPlaycounts.beatmap).has(col(Beatmap.beatmapset_id) == beatmapset.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return int(playcount.first() or 0)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def availability(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> BeatmapAvailability:
|
||||||
|
return BeatmapAvailability(
|
||||||
|
more_information=beatmapset.availability_info,
|
||||||
|
download_disabled=beatmapset.download_disabled,
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def beatmaps(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
user: "User | None" = None,
|
||||||
|
) -> list["BeatmapDict"]:
|
||||||
|
from .beatmap import BeatmapModel
|
||||||
|
|
||||||
|
return [
|
||||||
|
await BeatmapModel.transform(
|
||||||
|
beatmap, includes=(includes or []) + BeatmapModel.BEATMAP_TRANSFORMER_INCLUDES, user=user
|
||||||
|
)
|
||||||
|
for beatmap in await beatmapset.awaitable_attrs.beatmaps
|
||||||
|
]
|
||||||
|
|
||||||
|
# @ondemand
|
||||||
|
# @staticmethod
|
||||||
|
# async def current_nominations(
|
||||||
|
# _session: AsyncSession,
|
||||||
|
# beatmapset: "Beatmapset",
|
||||||
|
# ) -> list[BeatmapNomination] | None:
|
||||||
|
# return beatmapset.current_nominations or []
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def has_favourited(
|
||||||
|
session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
user: User | None = None,
|
||||||
|
) -> bool:
|
||||||
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
|
||||||
|
if session is None:
|
||||||
|
return False
|
||||||
|
query = select(FavouriteBeatmapset).where(FavouriteBeatmapset.beatmapset_id == beatmapset.id)
|
||||||
|
if user is not None:
|
||||||
|
query = query.where(FavouriteBeatmapset.user_id == user.id)
|
||||||
|
existing = (await session.exec(query)).first()
|
||||||
|
return existing is not None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def recent_favourites(
|
||||||
|
session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
) -> list[UserDict]:
|
||||||
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
|
||||||
|
recent_favourites = (
|
||||||
|
await session.exec(
|
||||||
|
select(FavouriteBeatmapset)
|
||||||
|
.where(FavouriteBeatmapset.beatmapset_id == beatmapset.id)
|
||||||
|
.order_by(col(FavouriteBeatmapset.date).desc())
|
||||||
|
.limit(50)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
return [
|
||||||
|
await User.transform(
|
||||||
|
(await favourite.awaitable_attrs.user),
|
||||||
|
includes=includes,
|
||||||
|
)
|
||||||
|
for favourite in recent_favourites
|
||||||
|
]
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def genre(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> BeatmapTranslationText:
|
||||||
|
return BeatmapTranslationText(
|
||||||
|
name=beatmapset.beatmap_genre.name,
|
||||||
|
id=beatmapset.beatmap_genre.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def language(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> BeatmapTranslationText:
|
||||||
|
return BeatmapTranslationText(
|
||||||
|
name=beatmapset.beatmap_language.name,
|
||||||
|
id=beatmapset.beatmap_language.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def nominations(
|
||||||
|
_session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> BeatmapNominations:
|
||||||
|
return BeatmapNominations(
|
||||||
|
required=beatmapset.nominations_required,
|
||||||
|
current=beatmapset.nominations_current,
|
||||||
|
)
|
||||||
|
|
||||||
|
# @ondemand
|
||||||
|
# @staticmethod
|
||||||
|
# async def user(
|
||||||
|
# session: AsyncSession,
|
||||||
|
# beatmapset: Beatmapset,
|
||||||
|
# includes: list[str] | None = None,
|
||||||
|
# ) -> dict[str, Any] | None:
|
||||||
|
# db_user = await session.get(User, beatmapset.user_id)
|
||||||
|
# if not db_user:
|
||||||
|
# return None
|
||||||
|
# return await UserResp.transform(db_user, includes=includes)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def ratings(
|
||||||
|
session: AsyncSession,
|
||||||
|
beatmapset: "Beatmapset",
|
||||||
|
) -> list[int]:
|
||||||
|
# Provide a stable default shape if no session is available
|
||||||
|
if session is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
from .beatmapset_ratings import BeatmapRating
|
||||||
|
|
||||||
|
beatmapset_all_ratings = (
|
||||||
|
await session.exec(select(BeatmapRating).where(BeatmapRating.beatmapset_id == beatmapset.id))
|
||||||
|
).all()
|
||||||
|
ratings_list = [0] * 11
|
||||||
|
for rating in beatmapset_all_ratings:
|
||||||
|
ratings_list[rating.rating] += 1
|
||||||
|
return ratings_list
|
||||||
|
|
||||||
|
|
||||||
class Beatmapset(AsyncAttrs, BeatmapsetBase, table=True):
|
class Beatmapset(AsyncAttrs, BeatmapsetModel, table=True):
|
||||||
__tablename__ = "beatmapsets" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "beatmapsets"
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
|
||||||
# Beatmapset
|
# Beatmapset
|
||||||
beatmap_status: BeatmapRankStatus = Field(
|
beatmap_status: BeatmapRankStatus = Field(default=BeatmapRankStatus.GRAVEYARD, index=True)
|
||||||
default=BeatmapRankStatus.GRAVEYARD, index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# optional
|
# optional
|
||||||
beatmaps: list["Beatmap"] = Relationship(back_populates="beatmapset")
|
beatmaps: list["Beatmap"] = Relationship(back_populates="beatmapset")
|
||||||
@@ -137,206 +466,76 @@ class Beatmapset(AsyncAttrs, BeatmapsetBase, table=True):
|
|||||||
favourites: list["FavouriteBeatmapset"] = Relationship(back_populates="beatmapset")
|
favourites: list["FavouriteBeatmapset"] = Relationship(back_populates="beatmapset")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_resp(
|
async def from_resp_no_save(cls, resp: BeatmapsetDict) -> "Beatmapset":
|
||||||
cls, session: AsyncSession, resp: "BeatmapsetResp", from_: int = 0
|
# make a shallow copy so we can mutate safely
|
||||||
) -> "Beatmapset":
|
d: dict[str, Any] = dict(resp)
|
||||||
from .beatmap import Beatmap
|
|
||||||
|
# nominations = resp.get("nominations")
|
||||||
|
# if nominations is not None:
|
||||||
|
# d["nominations_required"] = nominations.required
|
||||||
|
# d["nominations_current"] = nominations.current
|
||||||
|
|
||||||
|
hype = resp.get("hype")
|
||||||
|
if hype is not None:
|
||||||
|
d["hype_current"] = hype.current
|
||||||
|
d["hype_required"] = hype.required
|
||||||
|
|
||||||
|
genre_id = resp.get("genre_id")
|
||||||
|
genre = resp.get("genre")
|
||||||
|
if genre_id is not None:
|
||||||
|
d["beatmap_genre"] = Genre(genre_id)
|
||||||
|
elif genre is not None:
|
||||||
|
d["beatmap_genre"] = Genre(genre.id)
|
||||||
|
|
||||||
|
language_id = resp.get("language_id")
|
||||||
|
language = resp.get("language")
|
||||||
|
if language_id is not None:
|
||||||
|
d["beatmap_language"] = Language(language_id)
|
||||||
|
elif language is not None:
|
||||||
|
d["beatmap_language"] = Language(language.id)
|
||||||
|
|
||||||
|
availability = resp.get("availability")
|
||||||
|
ranked = resp.get("ranked")
|
||||||
|
if ranked is None:
|
||||||
|
raise ValueError("ranked field is required")
|
||||||
|
|
||||||
d = resp.model_dump()
|
|
||||||
update = {}
|
|
||||||
if resp.nominations:
|
|
||||||
update["nominations_required"] = resp.nominations.required
|
|
||||||
update["nominations_current"] = resp.nominations.current
|
|
||||||
if resp.hype:
|
|
||||||
update["hype_current"] = resp.hype.current
|
|
||||||
update["hype_required"] = resp.hype.required
|
|
||||||
if resp.genre_id:
|
|
||||||
update["beatmap_genre"] = Genre(resp.genre_id)
|
|
||||||
elif resp.genre:
|
|
||||||
update["beatmap_genre"] = Genre(resp.genre.id)
|
|
||||||
if resp.language_id:
|
|
||||||
update["beatmap_language"] = Language(resp.language_id)
|
|
||||||
elif resp.language:
|
|
||||||
update["beatmap_language"] = Language(resp.language.id)
|
|
||||||
beatmapset = Beatmapset.model_validate(
|
beatmapset = Beatmapset.model_validate(
|
||||||
{
|
{
|
||||||
**d,
|
**d,
|
||||||
"id": resp.id,
|
"beatmap_status": BeatmapRankStatus(ranked),
|
||||||
"beatmap_status": BeatmapRankStatus(resp.ranked),
|
"availability_info": availability.more_information if availability is not None else None,
|
||||||
"availability_info": resp.availability.more_information,
|
"download_disabled": bool(availability.download_disabled) if availability is not None else False,
|
||||||
"download_disabled": resp.availability.download_disabled or False,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if not (
|
|
||||||
await session.exec(select(exists()).where(Beatmapset.id == resp.id))
|
|
||||||
).first():
|
|
||||||
session.add(beatmapset)
|
|
||||||
await session.commit()
|
|
||||||
await Beatmap.from_resp_batch(session, resp.beatmaps, from_=from_)
|
|
||||||
return beatmapset
|
return beatmapset
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def get_or_fetch(
|
async def from_resp(
|
||||||
cls, session: AsyncSession, fetcher: "Fetcher", sid: int
|
cls,
|
||||||
|
session: AsyncSession,
|
||||||
|
resp: BeatmapsetDict,
|
||||||
|
from_: int = 0,
|
||||||
) -> "Beatmapset":
|
) -> "Beatmapset":
|
||||||
|
from .beatmap import Beatmap
|
||||||
|
|
||||||
|
beatmapset_id = resp["id"]
|
||||||
|
beatmapset = await cls.from_resp_no_save(resp)
|
||||||
|
if not (await session.exec(select(exists()).where(Beatmapset.id == beatmapset_id))).first():
|
||||||
|
session.add(beatmapset)
|
||||||
|
await session.commit()
|
||||||
|
beatmaps = resp.get("beatmaps", [])
|
||||||
|
await Beatmap.from_resp_batch(session, beatmaps, from_=from_)
|
||||||
|
beatmapset = (await session.exec(select(Beatmapset).where(Beatmapset.id == beatmapset_id))).one()
|
||||||
|
return beatmapset
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get_or_fetch(cls, session: AsyncSession, fetcher: "Fetcher", sid: int) -> "Beatmapset":
|
||||||
|
from app.service.beatmapset_update_service import get_beatmapset_update_service
|
||||||
|
|
||||||
beatmapset = await session.get(Beatmapset, sid)
|
beatmapset = await session.get(Beatmapset, sid)
|
||||||
if not beatmapset:
|
if not beatmapset:
|
||||||
resp = await fetcher.get_beatmapset(sid)
|
resp = await fetcher.get_beatmapset(sid)
|
||||||
beatmapset = await cls.from_resp(session, resp)
|
beatmapset = await cls.from_resp(session, resp)
|
||||||
|
await get_beatmapset_update_service().add(resp)
|
||||||
|
await session.refresh(beatmapset)
|
||||||
return beatmapset
|
return beatmapset
|
||||||
|
|
||||||
|
|
||||||
class BeatmapsetResp(BeatmapsetBase):
|
|
||||||
id: int
|
|
||||||
beatmaps: list["BeatmapResp"] = Field(default_factory=list)
|
|
||||||
discussion_enabled: bool = True
|
|
||||||
status: str
|
|
||||||
ranked: int
|
|
||||||
legacy_thread_url: str | None = ""
|
|
||||||
is_scoreable: bool
|
|
||||||
hype: BeatmapHype | None = None
|
|
||||||
availability: BeatmapAvailability
|
|
||||||
genre: BeatmapTranslationText | None = None
|
|
||||||
genre_id: int
|
|
||||||
language: BeatmapTranslationText | None = None
|
|
||||||
language_id: int
|
|
||||||
nominations: BeatmapNominations | None = None
|
|
||||||
has_favourited: bool = False
|
|
||||||
favourite_count: int = 0
|
|
||||||
recent_favourites: list[UserResp] = Field(default_factory=list)
|
|
||||||
|
|
||||||
@field_validator(
|
|
||||||
"nsfw",
|
|
||||||
"spotlight",
|
|
||||||
"video",
|
|
||||||
"can_be_hyped",
|
|
||||||
"discussion_locked",
|
|
||||||
"storyboard",
|
|
||||||
"discussion_enabled",
|
|
||||||
"is_scoreable",
|
|
||||||
"has_favourited",
|
|
||||||
mode="before",
|
|
||||||
)
|
|
||||||
@classmethod
|
|
||||||
def validate_bool_fields(cls, v):
|
|
||||||
"""将整数 0/1 转换为布尔值,处理数据库中的布尔字段"""
|
|
||||||
if isinstance(v, int):
|
|
||||||
return bool(v)
|
|
||||||
return v
|
|
||||||
|
|
||||||
@model_validator(mode="after")
|
|
||||||
def fix_genre_language(self) -> Self:
|
|
||||||
if self.genre is None:
|
|
||||||
self.genre = BeatmapTranslationText(
|
|
||||||
name=Genre(self.genre_id).name, id=self.genre_id
|
|
||||||
)
|
|
||||||
if self.language is None:
|
|
||||||
self.language = BeatmapTranslationText(
|
|
||||||
name=Language(self.language_id).name, id=self.language_id
|
|
||||||
)
|
|
||||||
return self
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
beatmapset: Beatmapset,
|
|
||||||
include: list[str] = [],
|
|
||||||
session: AsyncSession | None = None,
|
|
||||||
user: User | None = None,
|
|
||||||
) -> "BeatmapsetResp":
|
|
||||||
from .beatmap import BeatmapResp
|
|
||||||
from .favourite_beatmapset import FavouriteBeatmapset
|
|
||||||
|
|
||||||
update = {
|
|
||||||
"beatmaps": [
|
|
||||||
await BeatmapResp.from_db(beatmap, from_set=True, session=session)
|
|
||||||
for beatmap in await beatmapset.awaitable_attrs.beatmaps
|
|
||||||
],
|
|
||||||
"hype": BeatmapHype(
|
|
||||||
current=beatmapset.hype_current, required=beatmapset.hype_required
|
|
||||||
),
|
|
||||||
"availability": BeatmapAvailability(
|
|
||||||
more_information=beatmapset.availability_info,
|
|
||||||
download_disabled=beatmapset.download_disabled,
|
|
||||||
),
|
|
||||||
"genre": BeatmapTranslationText(
|
|
||||||
name=beatmapset.beatmap_genre.name,
|
|
||||||
id=beatmapset.beatmap_genre.value,
|
|
||||||
),
|
|
||||||
"language": BeatmapTranslationText(
|
|
||||||
name=beatmapset.beatmap_language.name,
|
|
||||||
id=beatmapset.beatmap_language.value,
|
|
||||||
),
|
|
||||||
"genre_id": beatmapset.beatmap_genre.value,
|
|
||||||
"language_id": beatmapset.beatmap_language.value,
|
|
||||||
"nominations": BeatmapNominations(
|
|
||||||
required=beatmapset.nominations_required,
|
|
||||||
current=beatmapset.nominations_current,
|
|
||||||
),
|
|
||||||
"is_scoreable": beatmapset.beatmap_status.has_leaderboard(),
|
|
||||||
**beatmapset.model_dump(),
|
|
||||||
}
|
|
||||||
|
|
||||||
# 确保 ratings 字段不为 null,避免客户端崩溃
|
|
||||||
if update.get("ratings") is None:
|
|
||||||
update["ratings"] = []
|
|
||||||
|
|
||||||
beatmap_status = beatmapset.beatmap_status
|
|
||||||
if (
|
|
||||||
settings.enable_all_beatmap_leaderboard
|
|
||||||
and not beatmap_status.has_leaderboard()
|
|
||||||
):
|
|
||||||
update["status"] = BeatmapRankStatus.APPROVED.name.lower()
|
|
||||||
update["ranked"] = BeatmapRankStatus.APPROVED.value
|
|
||||||
else:
|
|
||||||
update["status"] = beatmap_status.name.lower()
|
|
||||||
update["ranked"] = beatmap_status.value
|
|
||||||
|
|
||||||
if session and user:
|
|
||||||
existing_favourite = (
|
|
||||||
await session.exec(
|
|
||||||
select(FavouriteBeatmapset).where(
|
|
||||||
FavouriteBeatmapset.beatmapset_id == beatmapset.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
update["has_favourited"] = existing_favourite is not None
|
|
||||||
|
|
||||||
if session and "recent_favourites" in include:
|
|
||||||
recent_favourites = (
|
|
||||||
await session.exec(
|
|
||||||
select(FavouriteBeatmapset)
|
|
||||||
.where(
|
|
||||||
FavouriteBeatmapset.beatmapset_id == beatmapset.id,
|
|
||||||
)
|
|
||||||
.order_by(col(FavouriteBeatmapset.date).desc())
|
|
||||||
.limit(50)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
update["recent_favourites"] = [
|
|
||||||
await UserResp.from_db(
|
|
||||||
await favourite.awaitable_attrs.user,
|
|
||||||
session=session,
|
|
||||||
include=BASE_INCLUDES,
|
|
||||||
)
|
|
||||||
for favourite in recent_favourites
|
|
||||||
]
|
|
||||||
|
|
||||||
if session:
|
|
||||||
update["favourite_count"] = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(FavouriteBeatmapset)
|
|
||||||
.where(FavouriteBeatmapset.beatmapset_id == beatmapset.id)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
return cls.model_validate(
|
|
||||||
update,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SearchBeatmapsetsResp(SQLModel):
|
|
||||||
beatmapsets: list[BeatmapsetResp]
|
|
||||||
total: int
|
|
||||||
cursor: dict[str, int | float] | None = None
|
|
||||||
cursor_string: str | None = None
|
|
||||||
|
|||||||
18
app/database/beatmapset_ratings.py
Normal file
18
app/database/beatmapset_ratings.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from .beatmapset import Beatmapset
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
from sqlmodel import BigInteger, Column, Field, ForeignKey, Relationship, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapRating(SQLModel, table=True):
|
||||||
|
__tablename__: str = "beatmap_ratings"
|
||||||
|
id: int | None = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
||||||
|
)
|
||||||
|
beatmapset_id: int = Field(foreign_key="beatmapsets.id", index=True)
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
|
rating: int
|
||||||
|
|
||||||
|
beatmapset: Beatmapset = Relationship()
|
||||||
|
user: User = Relationship()
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from app.models.score import GameMode, Rank
|
|
||||||
|
|
||||||
from .lazer_user import User
|
|
||||||
|
|
||||||
from sqlmodel import (
|
|
||||||
JSON,
|
|
||||||
BigInteger,
|
|
||||||
Column,
|
|
||||||
Field,
|
|
||||||
ForeignKey,
|
|
||||||
Relationship,
|
|
||||||
SQLModel,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .beatmap import Beatmap
|
|
||||||
from .score import Score
|
|
||||||
|
|
||||||
|
|
||||||
class BestScore(SQLModel, table=True):
|
|
||||||
__tablename__ = "total_score_best_scores" # pyright: ignore[reportAssignmentType]
|
|
||||||
user_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
score_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True)
|
|
||||||
)
|
|
||||||
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
|
||||||
gamemode: GameMode = Field(index=True)
|
|
||||||
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
|
||||||
mods: list[str] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
sa_column=Column(JSON),
|
|
||||||
)
|
|
||||||
rank: Rank
|
|
||||||
|
|
||||||
user: User = Relationship()
|
|
||||||
score: "Score" = Relationship(
|
|
||||||
sa_relationship_kwargs={
|
|
||||||
"foreign_keys": "[BestScore.score_id]",
|
|
||||||
"lazy": "joined",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
beatmap: "Beatmap" = Relationship()
|
|
||||||
57
app/database/best_scores.py
Normal file
57
app/database/best_scores.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
from .statistics import UserStatistics
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
from sqlmodel import (
|
||||||
|
BigInteger,
|
||||||
|
Column,
|
||||||
|
Field,
|
||||||
|
Float,
|
||||||
|
ForeignKey,
|
||||||
|
Relationship,
|
||||||
|
SQLModel,
|
||||||
|
select,
|
||||||
|
)
|
||||||
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .beatmap import Beatmap
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
|
||||||
|
class BestScore(SQLModel, table=True):
|
||||||
|
__tablename__: str = "best_scores"
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
|
score_id: int = Field(sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True))
|
||||||
|
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
||||||
|
gamemode: GameMode = Field(index=True)
|
||||||
|
pp: float = Field(
|
||||||
|
sa_column=Column(Float, default=0),
|
||||||
|
)
|
||||||
|
acc: float = Field(
|
||||||
|
sa_column=Column(Float, default=0),
|
||||||
|
)
|
||||||
|
|
||||||
|
user: User = Relationship()
|
||||||
|
score: "Score" = Relationship(
|
||||||
|
back_populates="ranked_score",
|
||||||
|
)
|
||||||
|
beatmap: "Beatmap" = Relationship()
|
||||||
|
|
||||||
|
async def delete(self, session: AsyncSession):
|
||||||
|
from .score import calculate_user_pp
|
||||||
|
|
||||||
|
gamemode = self.gamemode
|
||||||
|
user_id = self.user_id
|
||||||
|
await session.delete(self)
|
||||||
|
await session.flush()
|
||||||
|
|
||||||
|
statistics = await session.exec(
|
||||||
|
select(UserStatistics).where(UserStatistics.user_id == user_id, UserStatistics.mode == gamemode)
|
||||||
|
)
|
||||||
|
statistics = statistics.first()
|
||||||
|
if statistics:
|
||||||
|
statistics.pp, statistics.hit_accuracy = await calculate_user_pp(session, statistics.user_id, gamemode)
|
||||||
@@ -1,12 +1,14 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Self
|
from typing import TYPE_CHECKING, ClassVar, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.database.lazer_user import RANKING_INCLUDES, User, UserResp
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, included, ondemand
|
||||||
|
from .user import User, UserDict, UserModel
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from redis.asyncio import Redis
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
VARCHAR,
|
VARCHAR,
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -21,6 +23,8 @@ from sqlmodel import (
|
|||||||
)
|
)
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.router.notification.server import ChatServer
|
||||||
# ChatChannel
|
# ChatChannel
|
||||||
|
|
||||||
|
|
||||||
@@ -43,66 +47,57 @@ class ChannelType(str, Enum):
|
|||||||
TEAM = "TEAM"
|
TEAM = "TEAM"
|
||||||
|
|
||||||
|
|
||||||
class ChatChannelBase(SQLModel):
|
class MessageType(str, Enum):
|
||||||
name: str = Field(sa_column=Column(VARCHAR(50), index=True))
|
ACTION = "action"
|
||||||
|
MARKDOWN = "markdown"
|
||||||
|
PLAIN = "plain"
|
||||||
|
|
||||||
|
|
||||||
|
class ChatChannelDict(TypedDict):
|
||||||
|
channel_id: int
|
||||||
|
description: str
|
||||||
|
name: str
|
||||||
|
icon: str | None
|
||||||
|
type: ChannelType
|
||||||
|
uuid: NotRequired[str | None]
|
||||||
|
message_length_limit: NotRequired[int]
|
||||||
|
moderated: NotRequired[bool]
|
||||||
|
current_user_attributes: NotRequired[ChatUserAttributes]
|
||||||
|
last_read_id: NotRequired[int | None]
|
||||||
|
last_message_id: NotRequired[int | None]
|
||||||
|
recent_messages: NotRequired[list["ChatMessageDict"]]
|
||||||
|
users: NotRequired[list[int]]
|
||||||
|
|
||||||
|
|
||||||
|
class ChatChannelModel(DatabaseModel[ChatChannelDict]):
|
||||||
|
CONVERSATION_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"last_message_id",
|
||||||
|
"users",
|
||||||
|
]
|
||||||
|
LISTING_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
*CONVERSATION_INCLUDES,
|
||||||
|
"current_user_attributes",
|
||||||
|
"last_read_id",
|
||||||
|
]
|
||||||
|
|
||||||
|
channel_id: int = Field(primary_key=True, index=True, default=None)
|
||||||
description: str = Field(sa_column=Column(VARCHAR(255), index=True))
|
description: str = Field(sa_column=Column(VARCHAR(255), index=True))
|
||||||
icon: str | None = Field(default=None)
|
icon: str | None = Field(default=None)
|
||||||
type: ChannelType = Field(index=True)
|
type: ChannelType = Field(index=True)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def name(session: AsyncSession, channel: "ChatChannel", user: User, server: "ChatServer") -> str:
|
||||||
|
users = server.channels.get(channel.channel_id, [])
|
||||||
|
if channel.type == ChannelType.PM and users and len(users) == 2:
|
||||||
|
target_user_id = next(u for u in users if u != user.id)
|
||||||
|
target_name = await session.exec(select(User.username).where(User.id == target_user_id))
|
||||||
|
return target_name.one()
|
||||||
|
return channel.channel_name
|
||||||
|
|
||||||
class ChatChannel(ChatChannelBase, table=True):
|
@included
|
||||||
__tablename__ = "chat_channels" # pyright: ignore[reportAssignmentType]
|
@staticmethod
|
||||||
channel_id: int | None = Field(primary_key=True, index=True, default=None)
|
async def moderated(session: AsyncSession, channel: "ChatChannel", user: User) -> bool:
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def get(
|
|
||||||
cls, channel: str | int, session: AsyncSession
|
|
||||||
) -> "ChatChannel | None":
|
|
||||||
if isinstance(channel, int) or channel.isdigit():
|
|
||||||
# 使用查询而不是 get() 来确保对象完全加载
|
|
||||||
result = await session.exec(
|
|
||||||
select(ChatChannel).where(ChatChannel.channel_id == int(channel))
|
|
||||||
)
|
|
||||||
channel_ = result.first()
|
|
||||||
if channel_ is not None:
|
|
||||||
return channel_
|
|
||||||
result = await session.exec(
|
|
||||||
select(ChatChannel).where(ChatChannel.name == channel)
|
|
||||||
)
|
|
||||||
return result.first()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def get_pm_channel(
|
|
||||||
cls, user1: int, user2: int, session: AsyncSession
|
|
||||||
) -> "ChatChannel | None":
|
|
||||||
channel = await cls.get(f"pm_{user1}_{user2}", session)
|
|
||||||
if channel is None:
|
|
||||||
channel = await cls.get(f"pm_{user2}_{user1}", session)
|
|
||||||
return channel
|
|
||||||
|
|
||||||
|
|
||||||
class ChatChannelResp(ChatChannelBase):
|
|
||||||
channel_id: int
|
|
||||||
moderated: bool = False
|
|
||||||
uuid: str | None = None
|
|
||||||
current_user_attributes: ChatUserAttributes | None = None
|
|
||||||
last_read_id: int | None = None
|
|
||||||
last_message_id: int | None = None
|
|
||||||
recent_messages: list["ChatMessageResp"] = Field(default_factory=list)
|
|
||||||
users: list[int] = Field(default_factory=list)
|
|
||||||
message_length_limit: int = 1000
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
channel: ChatChannel,
|
|
||||||
session: AsyncSession,
|
|
||||||
user: User,
|
|
||||||
redis: Redis,
|
|
||||||
users: list[int] | None = None,
|
|
||||||
include_recent_messages: bool = False,
|
|
||||||
) -> Self:
|
|
||||||
c = cls.model_validate(channel)
|
|
||||||
silence = (
|
silence = (
|
||||||
await session.exec(
|
await session.exec(
|
||||||
select(SilenceUser).where(
|
select(SilenceUser).where(
|
||||||
@@ -112,126 +107,175 @@ class ChatChannelResp(ChatChannelBase):
|
|||||||
)
|
)
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
last_msg = await redis.get(f"chat:{channel.channel_id}:last_msg")
|
return silence is not None
|
||||||
if last_msg and last_msg.isdigit():
|
|
||||||
last_msg = int(last_msg)
|
|
||||||
else:
|
|
||||||
last_msg = None
|
|
||||||
|
|
||||||
last_read_id = await redis.get(f"chat:{channel.channel_id}:last_read:{user.id}")
|
@ondemand
|
||||||
if last_read_id and last_read_id.isdigit():
|
@staticmethod
|
||||||
last_read_id = int(last_read_id)
|
async def current_user_attributes(
|
||||||
else:
|
session: AsyncSession,
|
||||||
last_read_id = last_msg
|
channel: "ChatChannel",
|
||||||
|
user: User,
|
||||||
|
) -> ChatUserAttributes:
|
||||||
|
from app.dependencies.database import get_redis
|
||||||
|
|
||||||
if silence is not None:
|
silence = (
|
||||||
attribute = ChatUserAttributes(
|
await session.exec(
|
||||||
can_message=False,
|
select(SilenceUser).where(
|
||||||
can_message_error=silence.reason or "You are muted in this channel.",
|
SilenceUser.channel_id == channel.channel_id,
|
||||||
last_read_id=last_read_id or 0,
|
SilenceUser.user_id == user.id,
|
||||||
)
|
|
||||||
c.moderated = True
|
|
||||||
else:
|
|
||||||
attribute = ChatUserAttributes(
|
|
||||||
can_message=True,
|
|
||||||
last_read_id=last_read_id or 0,
|
|
||||||
)
|
|
||||||
c.moderated = False
|
|
||||||
|
|
||||||
c.current_user_attributes = attribute
|
|
||||||
if c.type != ChannelType.PUBLIC and users is not None:
|
|
||||||
c.users = users
|
|
||||||
c.last_message_id = last_msg
|
|
||||||
c.last_read_id = last_read_id
|
|
||||||
|
|
||||||
if include_recent_messages:
|
|
||||||
messages = (
|
|
||||||
await session.exec(
|
|
||||||
select(ChatMessage)
|
|
||||||
.where(ChatMessage.channel_id == channel.channel_id)
|
|
||||||
.order_by(col(ChatMessage.timestamp).desc())
|
|
||||||
.limit(10)
|
|
||||||
)
|
)
|
||||||
).all()
|
|
||||||
c.recent_messages = [
|
|
||||||
await ChatMessageResp.from_db(msg, session, user) for msg in messages
|
|
||||||
]
|
|
||||||
c.recent_messages.reverse()
|
|
||||||
|
|
||||||
if c.type == ChannelType.PM and users and len(users) == 2:
|
|
||||||
target_user_id = next(u for u in users if u != user.id)
|
|
||||||
target_name = await session.exec(
|
|
||||||
select(User.username).where(User.id == target_user_id)
|
|
||||||
)
|
)
|
||||||
c.name = target_name.one()
|
).first()
|
||||||
assert user.id
|
can_message = silence is None
|
||||||
c.users = [target_user_id, user.id]
|
can_message_error = "You are silenced in this channel" if not can_message else None
|
||||||
return c
|
|
||||||
|
redis = get_redis()
|
||||||
|
last_read_id_raw = await redis.get(f"chat:{channel.channel_id}:last_read:{user.id}")
|
||||||
|
last_msg_raw = await redis.get(f"chat:{channel.channel_id}:last_msg")
|
||||||
|
last_msg = int(last_msg_raw) if last_msg_raw and last_msg_raw.isdigit() else None
|
||||||
|
last_read_id = int(last_read_id_raw) if last_read_id_raw and last_read_id_raw.isdigit() else (last_msg or 0)
|
||||||
|
|
||||||
|
return ChatUserAttributes(
|
||||||
|
can_message=can_message,
|
||||||
|
can_message_error=can_message_error,
|
||||||
|
last_read_id=last_read_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def last_read_id(_session: AsyncSession, channel: "ChatChannel", user: User) -> int | None:
|
||||||
|
from app.dependencies.database import get_redis
|
||||||
|
|
||||||
|
redis = get_redis()
|
||||||
|
last_read_id_raw = await redis.get(f"chat:{channel.channel_id}:last_read:{user.id}")
|
||||||
|
last_msg_raw = await redis.get(f"chat:{channel.channel_id}:last_msg")
|
||||||
|
last_msg = int(last_msg_raw) if last_msg_raw and last_msg_raw.isdigit() else None
|
||||||
|
return int(last_read_id_raw) if last_read_id_raw and last_read_id_raw.isdigit() else last_msg
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def last_message_id(_session: AsyncSession, channel: "ChatChannel") -> int | None:
|
||||||
|
from app.dependencies.database import get_redis
|
||||||
|
|
||||||
|
redis = get_redis()
|
||||||
|
last_msg_raw = await redis.get(f"chat:{channel.channel_id}:last_msg")
|
||||||
|
return int(last_msg_raw) if last_msg_raw and last_msg_raw.isdigit() else None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def recent_messages(
|
||||||
|
session: AsyncSession,
|
||||||
|
channel: "ChatChannel",
|
||||||
|
) -> list["ChatMessageDict"]:
|
||||||
|
messages = (
|
||||||
|
await session.exec(
|
||||||
|
select(ChatMessage)
|
||||||
|
.where(ChatMessage.channel_id == channel.channel_id)
|
||||||
|
.order_by(col(ChatMessage.message_id).desc())
|
||||||
|
.limit(50)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
result = [
|
||||||
|
await ChatMessageModel.transform(
|
||||||
|
msg,
|
||||||
|
)
|
||||||
|
for msg in reversed(messages)
|
||||||
|
]
|
||||||
|
return result
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def users(
|
||||||
|
_session: AsyncSession,
|
||||||
|
channel: "ChatChannel",
|
||||||
|
server: "ChatServer",
|
||||||
|
user: User,
|
||||||
|
) -> list[int]:
|
||||||
|
if channel.type == ChannelType.PUBLIC:
|
||||||
|
return []
|
||||||
|
users = server.channels.get(channel.channel_id, []).copy()
|
||||||
|
if channel.type == ChannelType.PM and users and len(users) == 2:
|
||||||
|
target_user_id = next(u for u in users if u != user.id)
|
||||||
|
users = [target_user_id, user.id]
|
||||||
|
return users
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def message_length_limit(_session: AsyncSession, _channel: "ChatChannel") -> int:
|
||||||
|
return 1000
|
||||||
|
|
||||||
|
|
||||||
|
class ChatChannel(ChatChannelModel, table=True):
|
||||||
|
__tablename__: str = "chat_channels"
|
||||||
|
|
||||||
|
channel_name: str = Field(sa_column=Column(name="name", type_=VARCHAR(50), index=True))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get(cls, channel: str | int, session: AsyncSession) -> "ChatChannel | None":
|
||||||
|
if isinstance(channel, int) or channel.isdigit():
|
||||||
|
# 使用查询而不是 get() 来确保对象完全加载
|
||||||
|
result = await session.exec(select(ChatChannel).where(ChatChannel.channel_id == int(channel)))
|
||||||
|
channel_ = result.first()
|
||||||
|
if channel_ is not None:
|
||||||
|
return channel_
|
||||||
|
result = await session.exec(select(ChatChannel).where(ChatChannel.channel_name == channel))
|
||||||
|
return result.first()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get_pm_channel(cls, user1: int, user2: int, session: AsyncSession) -> "ChatChannel | None":
|
||||||
|
channel = await cls.get(f"pm_{user1}_{user2}", session)
|
||||||
|
if channel is None:
|
||||||
|
channel = await cls.get(f"pm_{user2}_{user1}", session)
|
||||||
|
return channel
|
||||||
|
|
||||||
|
|
||||||
# ChatMessage
|
# ChatMessage
|
||||||
|
class ChatMessageDict(TypedDict):
|
||||||
|
channel_id: int
|
||||||
|
content: str
|
||||||
|
message_id: int
|
||||||
|
sender_id: int
|
||||||
|
timestamp: datetime
|
||||||
|
type: MessageType
|
||||||
|
uuid: str | None
|
||||||
|
is_action: NotRequired[bool]
|
||||||
|
sender: NotRequired[UserDict]
|
||||||
|
|
||||||
|
|
||||||
class MessageType(str, Enum):
|
class ChatMessageModel(DatabaseModel[ChatMessageDict]):
|
||||||
ACTION = "action"
|
|
||||||
MARKDOWN = "markdown"
|
|
||||||
PLAIN = "plain"
|
|
||||||
|
|
||||||
|
|
||||||
class ChatMessageBase(UTCBaseModel, SQLModel):
|
|
||||||
channel_id: int = Field(index=True, foreign_key="chat_channels.channel_id")
|
channel_id: int = Field(index=True, foreign_key="chat_channels.channel_id")
|
||||||
content: str = Field(sa_column=Column(VARCHAR(1000)))
|
content: str = Field(sa_column=Column(VARCHAR(1000)))
|
||||||
message_id: int | None = Field(index=True, primary_key=True, default=None)
|
message_id: int = Field(index=True, primary_key=True, default=None)
|
||||||
sender_id: int = Field(
|
sender_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
timestamp: datetime = Field(sa_column=Column(DateTime, index=True), default_factory=utcnow)
|
||||||
)
|
|
||||||
timestamp: datetime = Field(
|
|
||||||
sa_column=Column(DateTime, index=True), default=datetime.now(UTC)
|
|
||||||
)
|
|
||||||
type: MessageType = Field(default=MessageType.PLAIN, index=True, exclude=True)
|
type: MessageType = Field(default=MessageType.PLAIN, index=True, exclude=True)
|
||||||
uuid: str | None = Field(default=None)
|
uuid: str | None = Field(default=None)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def is_action(_session: AsyncSession, db_message: "ChatMessage") -> bool:
|
||||||
|
return db_message.type == MessageType.ACTION
|
||||||
|
|
||||||
class ChatMessage(ChatMessageBase, table=True):
|
@ondemand
|
||||||
__tablename__ = "chat_messages" # pyright: ignore[reportAssignmentType]
|
@staticmethod
|
||||||
|
async def sender(_session: AsyncSession, db_message: "ChatMessage") -> UserDict:
|
||||||
|
return await UserModel.transform(db_message.user)
|
||||||
|
|
||||||
|
|
||||||
|
class ChatMessage(ChatMessageModel, table=True):
|
||||||
|
__tablename__: str = "chat_messages"
|
||||||
user: User = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
user: User = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
||||||
channel: ChatChannel = Relationship()
|
channel: "ChatChannel" = Relationship()
|
||||||
|
|
||||||
|
|
||||||
class ChatMessageResp(ChatMessageBase):
|
|
||||||
sender: UserResp | None = None
|
|
||||||
is_action: bool = False
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls, db_message: ChatMessage, session: AsyncSession, user: User | None = None
|
|
||||||
) -> "ChatMessageResp":
|
|
||||||
m = cls.model_validate(db_message.model_dump())
|
|
||||||
m.is_action = db_message.type == MessageType.ACTION
|
|
||||||
if user:
|
|
||||||
m.sender = await UserResp.from_db(user, session, RANKING_INCLUDES)
|
|
||||||
else:
|
|
||||||
m.sender = await UserResp.from_db(
|
|
||||||
db_message.user, session, RANKING_INCLUDES
|
|
||||||
)
|
|
||||||
return m
|
|
||||||
|
|
||||||
|
|
||||||
# SilenceUser
|
|
||||||
|
|
||||||
|
|
||||||
class SilenceUser(UTCBaseModel, SQLModel, table=True):
|
class SilenceUser(UTCBaseModel, SQLModel, table=True):
|
||||||
__tablename__ = "chat_silence_users" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "chat_silence_users"
|
||||||
id: int | None = Field(primary_key=True, default=None, index=True)
|
id: int = Field(primary_key=True, default=None, index=True)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
channel_id: int = Field(foreign_key="chat_channels.channel_id", index=True)
|
channel_id: int = Field(foreign_key="chat_channels.channel_id", index=True)
|
||||||
until: datetime | None = Field(sa_column=Column(DateTime, index=True), default=None)
|
until: datetime | None = Field(sa_column=Column(DateTime, index=True), default=None)
|
||||||
reason: str | None = Field(default=None, sa_column=Column(VARCHAR(255), index=True))
|
reason: str | None = Field(default=None, sa_column=Column(VARCHAR(255), index=True))
|
||||||
banned_at: datetime = Field(
|
banned_at: datetime = Field(sa_column=Column(DateTime, index=True), default_factory=utcnow)
|
||||||
sa_column=Column(DateTime, index=True), default=datetime.now(UTC)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UserSilenceResp(SQLModel):
|
class UserSilenceResp(SQLModel):
|
||||||
@@ -240,7 +284,6 @@ class UserSilenceResp(SQLModel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_db(cls, db_silence: SilenceUser) -> "UserSilenceResp":
|
def from_db(cls, db_silence: SilenceUser) -> "UserSilenceResp":
|
||||||
assert db_silence.id is not None
|
|
||||||
return cls(
|
return cls(
|
||||||
id=db_silence.id,
|
id=db_silence.id,
|
||||||
user_id=db_silence.user_id,
|
user_id=db_silence.user_id,
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from sqlmodel import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class CountBase(SQLModel):
|
class CountBase(SQLModel):
|
||||||
@@ -21,28 +21,24 @@ class CountBase(SQLModel):
|
|||||||
|
|
||||||
|
|
||||||
class MonthlyPlaycounts(CountBase, table=True):
|
class MonthlyPlaycounts(CountBase, table=True):
|
||||||
__tablename__ = "monthly_playcounts" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "monthly_playcounts"
|
||||||
|
|
||||||
id: int | None = Field(
|
id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
||||||
)
|
)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
user: "User" = Relationship(back_populates="monthly_playcounts")
|
user: "User" = Relationship(back_populates="monthly_playcounts")
|
||||||
|
|
||||||
|
|
||||||
class ReplayWatchedCount(CountBase, table=True):
|
class ReplayWatchedCount(CountBase, table=True):
|
||||||
__tablename__ = "replays_watched_counts" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "replays_watched_counts"
|
||||||
|
|
||||||
id: int | None = Field(
|
id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
sa_column=Column(BigInteger, primary_key=True, autoincrement=True),
|
||||||
)
|
)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
user: "User" = Relationship(back_populates="replays_watched_counts")
|
user: "User" = Relationship(back_populates="replays_watched_counts")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from datetime import UTC, datetime, timedelta
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
from app.utils import are_adjacent_weeks
|
from app.utils import are_adjacent_weeks, utcnow
|
||||||
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -17,16 +17,15 @@ from sqlmodel import (
|
|||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class DailyChallengeStatsBase(SQLModel, UTCBaseModel):
|
class DailyChallengeStatsBase(SQLModel, UTCBaseModel):
|
||||||
daily_streak_best: int = Field(default=0)
|
daily_streak_best: int = Field(default=0)
|
||||||
daily_streak_current: int = Field(default=0)
|
daily_streak_current: int = Field(default=0)
|
||||||
last_update: datetime | None = Field(default=None, sa_column=Column(DateTime))
|
last_update: datetime | None = Field(default=None, sa_column=Column(DateTime))
|
||||||
last_weekly_streak: datetime | None = Field(
|
last_day_streak: datetime | None = Field(default=None, sa_column=Column(DateTime), exclude=True)
|
||||||
default=None, sa_column=Column(DateTime)
|
last_weekly_streak: datetime | None = Field(default=None, sa_column=Column(DateTime))
|
||||||
)
|
|
||||||
playcount: int = Field(default=0)
|
playcount: int = Field(default=0)
|
||||||
top_10p_placements: int = Field(default=0)
|
top_10p_placements: int = Field(default=0)
|
||||||
top_50p_placements: int = Field(default=0)
|
top_50p_placements: int = Field(default=0)
|
||||||
@@ -35,7 +34,7 @@ class DailyChallengeStatsBase(SQLModel, UTCBaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class DailyChallengeStats(DailyChallengeStatsBase, table=True):
|
class DailyChallengeStats(DailyChallengeStatsBase, table=True):
|
||||||
__tablename__ = "daily_challenge_stats" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "daily_challenge_stats"
|
||||||
|
|
||||||
user_id: int | None = Field(
|
user_id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
@@ -58,12 +57,12 @@ class DailyChallengeStatsResp(DailyChallengeStatsBase):
|
|||||||
cls,
|
cls,
|
||||||
obj: DailyChallengeStats,
|
obj: DailyChallengeStats,
|
||||||
) -> "DailyChallengeStatsResp":
|
) -> "DailyChallengeStatsResp":
|
||||||
return cls.model_validate(obj)
|
stats = cls.model_validate(obj)
|
||||||
|
stats.last_update = obj.last_day_streak
|
||||||
|
return stats
|
||||||
|
|
||||||
|
|
||||||
async def process_daily_challenge_score(
|
async def process_daily_challenge_score(session: AsyncSession, user_id: int, room_id: int):
|
||||||
session: AsyncSession, user_id: int, room_id: int
|
|
||||||
):
|
|
||||||
from .playlist_best_score import PlaylistBestScore
|
from .playlist_best_score import PlaylistBestScore
|
||||||
|
|
||||||
score = (
|
score = (
|
||||||
@@ -83,7 +82,7 @@ async def process_daily_challenge_score(
|
|||||||
session.add(stats)
|
session.add(stats)
|
||||||
|
|
||||||
stats.playcount += 1
|
stats.playcount += 1
|
||||||
now = datetime.now(UTC)
|
now = utcnow()
|
||||||
if stats.last_update is None:
|
if stats.last_update is None:
|
||||||
stats.daily_streak_best = 1
|
stats.daily_streak_best = 1
|
||||||
stats.daily_streak_current = 1
|
stats.daily_streak_current = 1
|
||||||
@@ -107,4 +106,5 @@ async def process_daily_challenge_score(
|
|||||||
else:
|
else:
|
||||||
stats.weekly_streak_current = 1
|
stats.weekly_streak_current = 1
|
||||||
stats.last_update = now
|
stats.last_update = now
|
||||||
|
stats.last_day_streak = now
|
||||||
stats.last_weekly_streak = now
|
stats.last_weekly_streak = now
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
"""
|
|
||||||
邮件验证相关数据库模型
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import datetime, UTC
|
|
||||||
from sqlmodel import SQLModel, Field
|
|
||||||
from sqlalchemy import Column, BigInteger, ForeignKey
|
|
||||||
|
|
||||||
|
|
||||||
class EmailVerification(SQLModel, table=True):
|
|
||||||
"""邮件验证记录"""
|
|
||||||
|
|
||||||
__tablename__: str = "email_verifications"
|
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True)
|
|
||||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
|
||||||
email: str = Field(index=True)
|
|
||||||
verification_code: str = Field(max_length=8) # 8位验证码
|
|
||||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
|
||||||
expires_at: datetime = Field() # 验证码过期时间
|
|
||||||
is_used: bool = Field(default=False) # 是否已使用
|
|
||||||
used_at: datetime | None = Field(default=None)
|
|
||||||
ip_address: str | None = Field(default=None) # 请求IP
|
|
||||||
user_agent: str | None = Field(default=None) # 用户代理
|
|
||||||
|
|
||||||
|
|
||||||
class LoginSession(SQLModel, table=True):
|
|
||||||
"""登录会话记录"""
|
|
||||||
|
|
||||||
__tablename__: str = "login_sessions"
|
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True)
|
|
||||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
|
||||||
session_token: str = Field(unique=True, index=True) # 会话令牌
|
|
||||||
ip_address: str = Field() # 登录IP
|
|
||||||
user_agent: str | None = Field(default=None)
|
|
||||||
country_code: str | None = Field(default=None)
|
|
||||||
is_verified: bool = Field(default=False) # 是否已验证
|
|
||||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
|
||||||
verified_at: datetime | None = Field(default=None)
|
|
||||||
expires_at: datetime = Field() # 会话过期时间
|
|
||||||
is_new_location: bool = Field(default=False) # 是否新位置登录
|
|
||||||
@@ -2,6 +2,10 @@ from datetime import UTC, datetime
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from pydantic import model_serializer
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
JSON,
|
JSON,
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -14,7 +18,7 @@ from sqlmodel import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class EventType(str, Enum):
|
class EventType(str, Enum):
|
||||||
@@ -34,30 +38,47 @@ class EventType(str, Enum):
|
|||||||
USERNAME_CHANGE = "username_change"
|
USERNAME_CHANGE = "username_change"
|
||||||
|
|
||||||
|
|
||||||
class EventBase(SQLModel):
|
class Event(UTCBaseModel, SQLModel, table=True):
|
||||||
|
__tablename__: str = "user_events"
|
||||||
id: int = Field(default=None, primary_key=True)
|
id: int = Field(default=None, primary_key=True)
|
||||||
created_at: datetime = Field(
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime(timezone=True)))
|
||||||
sa_column=Column(DateTime(timezone=True), default=datetime.now(UTC))
|
|
||||||
)
|
|
||||||
type: EventType
|
type: EventType
|
||||||
event_payload: dict = Field(
|
event_payload: dict = Field(exclude=True, default_factory=dict, sa_column=Column(JSON))
|
||||||
exclude=True, default_factory=dict, sa_column=Column(JSON)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Event(EventBase, table=True):
|
|
||||||
__tablename__ = "user_events" # pyright: ignore[reportAssignmentType]
|
|
||||||
user_id: int | None = Field(
|
user_id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True),
|
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True),
|
||||||
)
|
)
|
||||||
user: "User" = Relationship(back_populates="events")
|
user: "User" = Relationship(back_populates="events")
|
||||||
|
|
||||||
|
@model_serializer
|
||||||
|
def serialize(self) -> dict:
|
||||||
|
d = {
|
||||||
|
"id": self.id,
|
||||||
|
"createdAt": self.created_at.replace(tzinfo=UTC).isoformat(),
|
||||||
|
"type": self.type.value,
|
||||||
|
}
|
||||||
|
|
||||||
class EventResp(EventBase):
|
# 临时修复:统一成就事件格式 (TODO: 可在数据迁移完成后移除)
|
||||||
def merge_payload(self) -> "EventResp":
|
if self.type == EventType.ACHIEVEMENT and "achievement" in self.event_payload:
|
||||||
for key, value in self.event_payload.items():
|
achievement_data = self.event_payload["achievement"]
|
||||||
setattr(self, key, value)
|
if "achievement_id" in achievement_data and (
|
||||||
return self
|
"name" not in achievement_data or "slug" not in achievement_data
|
||||||
|
):
|
||||||
|
from app.models.achievement import MEDALS
|
||||||
|
|
||||||
pass
|
achievement_id = achievement_data["achievement_id"]
|
||||||
|
for medal in MEDALS:
|
||||||
|
if medal.id == achievement_id:
|
||||||
|
fixed_payload = dict(self.event_payload)
|
||||||
|
fixed_payload["achievement"] = {"name": medal.name, "slug": medal.assets_id}
|
||||||
|
for k, v in fixed_payload.items():
|
||||||
|
d[k] = v
|
||||||
|
return d
|
||||||
|
|
||||||
|
for k, v in self.event_payload.items():
|
||||||
|
d[k] = v
|
||||||
|
else:
|
||||||
|
for k, v in self.event_payload.items():
|
||||||
|
d[k] = v
|
||||||
|
|
||||||
|
return d
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ FAILTIME_STRUCT = Struct("<100i")
|
|||||||
|
|
||||||
|
|
||||||
class FailTime(SQLModel, table=True):
|
class FailTime(SQLModel, table=True):
|
||||||
__tablename__ = "failtime" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "failtime"
|
||||||
beatmap_id: int = Field(primary_key=True, index=True, foreign_key="beatmaps.id")
|
beatmap_id: int = Field(primary_key=True, foreign_key="beatmaps.id")
|
||||||
exit: bytes = Field(sa_column=Column(VARBINARY(400), nullable=False))
|
exit: bytes = Field(sa_column=Column(VARBINARY(400), nullable=False))
|
||||||
fail: bytes = Field(sa_column=Column(VARBINARY(400), nullable=False))
|
fail: bytes = Field(sa_column=Column(VARBINARY(400), nullable=False))
|
||||||
|
|
||||||
@@ -41,12 +41,8 @@ class FailTime(SQLModel, table=True):
|
|||||||
|
|
||||||
|
|
||||||
class FailTimeResp(BaseModel):
|
class FailTimeResp(BaseModel):
|
||||||
exit: list[int] = Field(
|
exit: list[int] = Field(default_factory=lambda: list(FAILTIME_STRUCT.unpack(b"\x00" * 400)))
|
||||||
default_factory=lambda: list(FAILTIME_STRUCT.unpack(b"\x00" * 400))
|
fail: list[int] = Field(default_factory=lambda: list(FAILTIME_STRUCT.unpack(b"\x00" * 400)))
|
||||||
)
|
|
||||||
fail: list[int] = Field(
|
|
||||||
default_factory=lambda: list(FAILTIME_STRUCT.unpack(b"\x00" * 400))
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_db(cls, failtime: FailTime) -> "FailTimeResp":
|
def from_db(cls, failtime: FailTime) -> "FailTimeResp":
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from app.database.beatmapset import Beatmapset
|
from .beatmapset import Beatmapset
|
||||||
from app.database.lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
@@ -16,8 +16,9 @@ from sqlmodel import (
|
|||||||
|
|
||||||
|
|
||||||
class FavouriteBeatmapset(AsyncAttrs, SQLModel, table=True):
|
class FavouriteBeatmapset(AsyncAttrs, SQLModel, table=True):
|
||||||
__tablename__ = "favourite_beatmapset" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "favourite_beatmapset"
|
||||||
id: int | None = Field(
|
|
||||||
|
id: int = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, autoincrement=True, primary_key=True),
|
sa_column=Column(BigInteger, autoincrement=True, primary_key=True),
|
||||||
exclude=True,
|
exclude=True,
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
"""
|
|
||||||
数据库字段类型工具
|
|
||||||
提供处理数据库和 Pydantic 之间类型转换的工具
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pydantic import field_validator
|
|
||||||
from sqlalchemy import Boolean
|
|
||||||
|
|
||||||
|
|
||||||
def bool_field_validator(field_name: str):
|
|
||||||
"""为特定布尔字段创建验证器,处理数据库中的 0/1 整数"""
|
|
||||||
|
|
||||||
@field_validator(field_name, mode="before")
|
|
||||||
@classmethod
|
|
||||||
def validate_bool_field(cls, v: Any) -> bool:
|
|
||||||
"""将整数 0/1 转换为布尔值"""
|
|
||||||
if isinstance(v, int):
|
|
||||||
return bool(v)
|
|
||||||
return v
|
|
||||||
|
|
||||||
return validate_bool_field
|
|
||||||
|
|
||||||
|
|
||||||
def create_bool_field(**kwargs):
|
|
||||||
"""创建一个带有正确 SQLAlchemy 列定义的布尔字段"""
|
|
||||||
from sqlmodel import Column, Field
|
|
||||||
|
|
||||||
# 如果没有指定 sa_column,则使用 Boolean 类型
|
|
||||||
if "sa_column" not in kwargs:
|
|
||||||
# 处理 index 参数
|
|
||||||
index = kwargs.pop("index", False)
|
|
||||||
if index:
|
|
||||||
kwargs["sa_column"] = Column(Boolean, index=True)
|
|
||||||
else:
|
|
||||||
kwargs["sa_column"] = Column(Boolean)
|
|
||||||
|
|
||||||
return Field(**kwargs)
|
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
from .lazer_user import User, UserResp
|
from typing import Any, NotRequired, TypedDict
|
||||||
from .playlist_best_score import PlaylistBestScore
|
|
||||||
|
from ._base import DatabaseModel, ondemand
|
||||||
|
from .playlist_best_score import PlaylistBestScore
|
||||||
|
from .user import User, UserDict, UserModel
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -9,7 +11,6 @@ from sqlmodel import (
|
|||||||
Field,
|
Field,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Relationship,
|
Relationship,
|
||||||
SQLModel,
|
|
||||||
col,
|
col,
|
||||||
func,
|
func,
|
||||||
select,
|
select,
|
||||||
@@ -17,20 +18,67 @@ from sqlmodel import (
|
|||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
class ItemAttemptsCountBase(SQLModel):
|
class ItemAttemptsCountDict(TypedDict):
|
||||||
room_id: int = Field(foreign_key="rooms.id", index=True)
|
accuracy: float
|
||||||
|
attempts: int
|
||||||
|
completed: int
|
||||||
|
pp: float
|
||||||
|
room_id: int
|
||||||
|
total_score: int
|
||||||
|
user_id: int
|
||||||
|
user: NotRequired[UserDict]
|
||||||
|
position: NotRequired[int]
|
||||||
|
playlist_item_attempts: NotRequired[list[dict[str, Any]]]
|
||||||
|
|
||||||
|
|
||||||
|
class ItemAttemptsCountModel(DatabaseModel[ItemAttemptsCountDict]):
|
||||||
|
accuracy: float = 0.0
|
||||||
attempts: int = Field(default=0)
|
attempts: int = Field(default=0)
|
||||||
completed: int = Field(default=0)
|
completed: int = Field(default=0)
|
||||||
user_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
accuracy: float = 0.0
|
|
||||||
pp: float = 0
|
pp: float = 0
|
||||||
|
room_id: int = Field(foreign_key="rooms.id", index=True)
|
||||||
total_score: int = 0
|
total_score: int = 0
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def user(_session: AsyncSession, item_attempts: "ItemAttemptsCount") -> UserDict:
|
||||||
|
user_instance = await item_attempts.awaitable_attrs.user
|
||||||
|
return await UserModel.transform(user_instance)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def position(session: AsyncSession, item_attempts: "ItemAttemptsCount") -> int:
|
||||||
|
return await item_attempts.get_position(session)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def playlist_item_attempts(
|
||||||
|
session: AsyncSession,
|
||||||
|
item_attempts: "ItemAttemptsCount",
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
playlist_scores = (
|
||||||
|
await session.exec(
|
||||||
|
select(PlaylistBestScore).where(
|
||||||
|
PlaylistBestScore.room_id == item_attempts.room_id,
|
||||||
|
PlaylistBestScore.user_id == item_attempts.user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
result: list[dict[str, Any]] = []
|
||||||
|
for score in playlist_scores:
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"id": score.playlist_id,
|
||||||
|
"attempts": score.attempts,
|
||||||
|
"passed": score.score.passed,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class ItemAttemptsCount(AsyncAttrs, ItemAttemptsCountBase, table=True):
|
class ItemAttemptsCount(AsyncAttrs, ItemAttemptsCountModel, table=True):
|
||||||
__tablename__ = "item_attempts_count" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "item_attempts_count"
|
||||||
id: int | None = Field(default=None, primary_key=True)
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
|
|
||||||
user: User = Relationship()
|
user: User = Relationship()
|
||||||
@@ -39,15 +87,15 @@ class ItemAttemptsCount(AsyncAttrs, ItemAttemptsCountBase, table=True):
|
|||||||
rownum = (
|
rownum = (
|
||||||
func.row_number()
|
func.row_number()
|
||||||
.over(
|
.over(
|
||||||
partition_by=col(ItemAttemptsCountBase.room_id),
|
partition_by=col(ItemAttemptsCount.room_id),
|
||||||
order_by=col(ItemAttemptsCountBase.total_score).desc(),
|
order_by=col(ItemAttemptsCount.total_score).desc(),
|
||||||
)
|
)
|
||||||
.label("rn")
|
.label("rn")
|
||||||
)
|
)
|
||||||
subq = select(ItemAttemptsCountBase, rownum).subquery()
|
subq = select(ItemAttemptsCount, rownum).subquery()
|
||||||
stmt = select(subq.c.rn).where(subq.c.user_id == self.user_id)
|
stmt = select(subq.c.rn).where(subq.c.user_id == self.user_id)
|
||||||
result = await session.exec(stmt)
|
result = await session.exec(stmt)
|
||||||
return result.one()
|
return result.first() or 0
|
||||||
|
|
||||||
async def update(self, session: AsyncSession):
|
async def update(self, session: AsyncSession):
|
||||||
playlist_scores = (
|
playlist_scores = (
|
||||||
@@ -61,11 +109,10 @@ class ItemAttemptsCount(AsyncAttrs, ItemAttemptsCountBase, table=True):
|
|||||||
self.attempts = sum(score.attempts for score in playlist_scores)
|
self.attempts = sum(score.attempts for score in playlist_scores)
|
||||||
self.total_score = sum(score.total_score for score in playlist_scores)
|
self.total_score = sum(score.total_score for score in playlist_scores)
|
||||||
self.pp = sum(score.score.pp for score in playlist_scores)
|
self.pp = sum(score.score.pp for score in playlist_scores)
|
||||||
self.completed = len([score for score in playlist_scores if score.score.passed])
|
passed_scores = [score for score in playlist_scores if score.score.passed]
|
||||||
|
self.completed = len(passed_scores)
|
||||||
self.accuracy = (
|
self.accuracy = (
|
||||||
sum(score.score.accuracy for score in playlist_scores) / self.completed
|
sum(score.score.accuracy for score in passed_scores) / self.completed if self.completed > 0 else 0.0
|
||||||
if self.completed > 0
|
|
||||||
else 0.0
|
|
||||||
)
|
)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await session.refresh(self)
|
await session.refresh(self)
|
||||||
@@ -91,62 +138,3 @@ class ItemAttemptsCount(AsyncAttrs, ItemAttemptsCountBase, table=True):
|
|||||||
await session.refresh(item_attempts)
|
await session.refresh(item_attempts)
|
||||||
await item_attempts.update(session)
|
await item_attempts.update(session)
|
||||||
return item_attempts
|
return item_attempts
|
||||||
|
|
||||||
|
|
||||||
class ItemAttemptsResp(ItemAttemptsCountBase):
|
|
||||||
user: UserResp | None = None
|
|
||||||
position: int | None = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
item_attempts: ItemAttemptsCount,
|
|
||||||
session: AsyncSession,
|
|
||||||
include: list[str] = [],
|
|
||||||
) -> "ItemAttemptsResp":
|
|
||||||
resp = cls.model_validate(item_attempts.model_dump())
|
|
||||||
resp.user = await UserResp.from_db(
|
|
||||||
await item_attempts.awaitable_attrs.user,
|
|
||||||
session=session,
|
|
||||||
include=["statistics", "team", "daily_challenge_user_stats"],
|
|
||||||
)
|
|
||||||
if "position" in include:
|
|
||||||
resp.position = await item_attempts.get_position(session)
|
|
||||||
# resp.accuracy *= 100
|
|
||||||
return resp
|
|
||||||
|
|
||||||
|
|
||||||
class ItemAttemptsCountForItem(BaseModel):
|
|
||||||
id: int
|
|
||||||
attempts: int
|
|
||||||
passed: bool
|
|
||||||
|
|
||||||
|
|
||||||
class PlaylistAggregateScore(BaseModel):
|
|
||||||
playlist_item_attempts: list[ItemAttemptsCountForItem] = Field(default_factory=list)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
room_id: int,
|
|
||||||
user_id: int,
|
|
||||||
session: AsyncSession,
|
|
||||||
) -> "PlaylistAggregateScore":
|
|
||||||
playlist_scores = (
|
|
||||||
await session.exec(
|
|
||||||
select(PlaylistBestScore).where(
|
|
||||||
PlaylistBestScore.room_id == room_id,
|
|
||||||
PlaylistBestScore.user_id == user_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
playlist_item_attempts = []
|
|
||||||
for score in playlist_scores:
|
|
||||||
playlist_item_attempts.append(
|
|
||||||
ItemAttemptsCountForItem(
|
|
||||||
id=score.playlist_id,
|
|
||||||
attempts=score.attempts,
|
|
||||||
passed=score.score.passed,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return cls(playlist_item_attempts=playlist_item_attempts)
|
|
||||||
@@ -1,534 +0,0 @@
|
|||||||
from datetime import UTC, datetime, timedelta
|
|
||||||
import json
|
|
||||||
from typing import TYPE_CHECKING, NotRequired, TypedDict
|
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
|
||||||
from app.models.score import GameMode
|
|
||||||
from app.models.user import Country, Page
|
|
||||||
from app.path import STATIC_DIR
|
|
||||||
|
|
||||||
from .achievement import UserAchievement, UserAchievementResp
|
|
||||||
from .beatmap_playcounts import BeatmapPlaycounts
|
|
||||||
from .counts import CountResp, MonthlyPlaycounts, ReplayWatchedCount
|
|
||||||
from .daily_challenge import DailyChallengeStats, DailyChallengeStatsResp
|
|
||||||
from .events import Event
|
|
||||||
from .rank_history import RankHistory, RankHistoryResp, RankTop
|
|
||||||
from .statistics import UserStatistics, UserStatisticsResp
|
|
||||||
from .team import Team, TeamMember
|
|
||||||
from .user_account_history import UserAccountHistory, UserAccountHistoryResp
|
|
||||||
|
|
||||||
from pydantic import field_validator
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
|
||||||
from sqlmodel import (
|
|
||||||
JSON,
|
|
||||||
BigInteger,
|
|
||||||
Column,
|
|
||||||
DateTime,
|
|
||||||
Field,
|
|
||||||
Relationship,
|
|
||||||
SQLModel,
|
|
||||||
col,
|
|
||||||
func,
|
|
||||||
select,
|
|
||||||
)
|
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .favourite_beatmapset import FavouriteBeatmapset
|
|
||||||
from .relationship import RelationshipResp
|
|
||||||
|
|
||||||
|
|
||||||
class Kudosu(TypedDict):
|
|
||||||
available: int
|
|
||||||
total: int
|
|
||||||
|
|
||||||
|
|
||||||
class RankHighest(TypedDict):
|
|
||||||
rank: int
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class UserProfileCover(TypedDict):
|
|
||||||
url: str
|
|
||||||
custom_url: NotRequired[str]
|
|
||||||
id: NotRequired[str]
|
|
||||||
|
|
||||||
|
|
||||||
Badge = TypedDict(
|
|
||||||
"Badge",
|
|
||||||
{
|
|
||||||
"awarded_at": datetime,
|
|
||||||
"description": str,
|
|
||||||
"image@2x_url": str,
|
|
||||||
"image_url": str,
|
|
||||||
"url": str,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
COUNTRIES = json.loads((STATIC_DIR / "iso3166.json").read_text())
|
|
||||||
|
|
||||||
|
|
||||||
class UserBase(UTCBaseModel, SQLModel):
|
|
||||||
avatar_url: str = ""
|
|
||||||
country_code: str = Field(default="CN", max_length=2, index=True)
|
|
||||||
# ? default_group: str|None
|
|
||||||
is_active: bool = True
|
|
||||||
is_bot: bool = False
|
|
||||||
is_supporter: bool = False
|
|
||||||
last_visit: datetime | None = Field(
|
|
||||||
default=datetime.now(UTC), sa_column=Column(DateTime(timezone=True))
|
|
||||||
)
|
|
||||||
pm_friends_only: bool = False
|
|
||||||
profile_colour: str | None = None
|
|
||||||
username: str = Field(max_length=32, unique=True, index=True)
|
|
||||||
page: Page = Field(sa_column=Column(JSON), default=Page(html="", raw=""))
|
|
||||||
previous_usernames: list[str] = Field(default_factory=list, sa_column=Column(JSON))
|
|
||||||
support_level: int = 0
|
|
||||||
badges: list[Badge] = Field(default_factory=list, sa_column=Column(JSON))
|
|
||||||
|
|
||||||
# optional
|
|
||||||
is_restricted: bool = False
|
|
||||||
# blocks
|
|
||||||
cover: UserProfileCover = Field(
|
|
||||||
default=UserProfileCover(
|
|
||||||
url="https://assets.ppy.sh/user-profile-covers/default.jpeg"
|
|
||||||
),
|
|
||||||
sa_column=Column(JSON),
|
|
||||||
)
|
|
||||||
beatmap_playcounts_count: int = 0
|
|
||||||
# kudosu
|
|
||||||
|
|
||||||
# UserExtended
|
|
||||||
playmode: GameMode = GameMode.OSU
|
|
||||||
discord: str | None = None
|
|
||||||
has_supported: bool = False
|
|
||||||
interests: str | None = None
|
|
||||||
join_date: datetime = Field(default=datetime.now(UTC))
|
|
||||||
location: str | None = None
|
|
||||||
max_blocks: int = 50
|
|
||||||
max_friends: int = 500
|
|
||||||
occupation: str | None = None
|
|
||||||
playstyle: list[str] = Field(default_factory=list, sa_column=Column(JSON))
|
|
||||||
# TODO: post_count
|
|
||||||
profile_hue: int | None = None
|
|
||||||
profile_order: list[str] = Field(
|
|
||||||
default_factory=lambda: [
|
|
||||||
"me",
|
|
||||||
"recent_activity",
|
|
||||||
"top_ranks",
|
|
||||||
"medals",
|
|
||||||
"historical",
|
|
||||||
"beatmaps",
|
|
||||||
"kudosu",
|
|
||||||
],
|
|
||||||
sa_column=Column(JSON),
|
|
||||||
)
|
|
||||||
title: str | None = None
|
|
||||||
title_url: str | None = None
|
|
||||||
twitter: str | None = None
|
|
||||||
website: str | None = None
|
|
||||||
|
|
||||||
# undocumented
|
|
||||||
comments_count: int = 0
|
|
||||||
post_count: int = 0
|
|
||||||
is_admin: bool = False
|
|
||||||
is_gmt: bool = False
|
|
||||||
is_qat: bool = False
|
|
||||||
is_bng: bool = False
|
|
||||||
|
|
||||||
@field_validator("playmode", mode="before")
|
|
||||||
@classmethod
|
|
||||||
def validate_playmode(cls, v):
|
|
||||||
"""将字符串转换为 GameMode 枚举"""
|
|
||||||
if isinstance(v, str):
|
|
||||||
try:
|
|
||||||
return GameMode(v)
|
|
||||||
except ValueError:
|
|
||||||
# 如果转换失败,返回默认值
|
|
||||||
return GameMode.OSU
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class User(AsyncAttrs, UserBase, table=True):
|
|
||||||
__tablename__ = "lazer_users" # pyright: ignore[reportAssignmentType]
|
|
||||||
|
|
||||||
id: int | None = Field(
|
|
||||||
default=None,
|
|
||||||
sa_column=Column(BigInteger, primary_key=True, autoincrement=True, index=True),
|
|
||||||
)
|
|
||||||
account_history: list[UserAccountHistory] = Relationship()
|
|
||||||
statistics: list[UserStatistics] = Relationship()
|
|
||||||
achievement: list[UserAchievement] = Relationship(back_populates="user")
|
|
||||||
team_membership: TeamMember | None = Relationship(back_populates="user")
|
|
||||||
daily_challenge_stats: DailyChallengeStats | None = Relationship(
|
|
||||||
back_populates="user"
|
|
||||||
)
|
|
||||||
monthly_playcounts: list[MonthlyPlaycounts] = Relationship(back_populates="user")
|
|
||||||
replays_watched_counts: list[ReplayWatchedCount] = Relationship(
|
|
||||||
back_populates="user"
|
|
||||||
)
|
|
||||||
favourite_beatmapsets: list["FavouriteBeatmapset"] = Relationship(
|
|
||||||
back_populates="user"
|
|
||||||
)
|
|
||||||
rank_history: list[RankHistory] = Relationship(
|
|
||||||
back_populates="user",
|
|
||||||
)
|
|
||||||
events: list[Event] = Relationship(back_populates="user")
|
|
||||||
|
|
||||||
email: str = Field(max_length=254, unique=True, index=True, exclude=True)
|
|
||||||
priv: int = Field(default=1, exclude=True)
|
|
||||||
pw_bcrypt: str = Field(max_length=60, exclude=True)
|
|
||||||
silence_end_at: datetime | None = Field(
|
|
||||||
default=None, sa_column=Column(DateTime(timezone=True)), exclude=True
|
|
||||||
)
|
|
||||||
donor_end_at: datetime | None = Field(
|
|
||||||
default=None, sa_column=Column(DateTime(timezone=True)), exclude=True
|
|
||||||
)
|
|
||||||
|
|
||||||
async def is_user_can_pm(
|
|
||||||
self, from_user: "User", session: AsyncSession
|
|
||||||
) -> tuple[bool, str]:
|
|
||||||
from .relationship import Relationship, RelationshipType
|
|
||||||
|
|
||||||
from_relationship = (
|
|
||||||
await session.exec(
|
|
||||||
select(Relationship).where(
|
|
||||||
Relationship.user_id == from_user.id,
|
|
||||||
Relationship.target_id == self.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
if from_relationship and from_relationship.type == RelationshipType.BLOCK:
|
|
||||||
return False, "You have blocked the target user."
|
|
||||||
if from_user.pm_friends_only and (
|
|
||||||
not from_relationship or from_relationship.type != RelationshipType.FOLLOW
|
|
||||||
):
|
|
||||||
return (
|
|
||||||
False,
|
|
||||||
"You have disabled non-friend communications "
|
|
||||||
"and target user is not your friend.",
|
|
||||||
)
|
|
||||||
|
|
||||||
relationship = (
|
|
||||||
await session.exec(
|
|
||||||
select(Relationship).where(
|
|
||||||
Relationship.user_id == self.id,
|
|
||||||
Relationship.target_id == from_user.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
if relationship and relationship.type == RelationshipType.BLOCK:
|
|
||||||
return False, "Target user has blocked you."
|
|
||||||
if self.pm_friends_only and (
|
|
||||||
not relationship or relationship.type != RelationshipType.FOLLOW
|
|
||||||
):
|
|
||||||
return False, "Target user has disabled non-friend communications"
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
|
|
||||||
class UserResp(UserBase):
|
|
||||||
id: int | None = None
|
|
||||||
is_online: bool = False
|
|
||||||
groups: list = [] # TODO
|
|
||||||
country: Country = Field(default_factory=lambda: Country(code="CN", name="China"))
|
|
||||||
favourite_beatmapset_count: int = 0
|
|
||||||
graveyard_beatmapset_count: int = 0 # TODO
|
|
||||||
guest_beatmapset_count: int = 0 # TODO
|
|
||||||
loved_beatmapset_count: int = 0 # TODO
|
|
||||||
mapping_follower_count: int = 0 # TODO
|
|
||||||
nominated_beatmapset_count: int = 0 # TODO
|
|
||||||
pending_beatmapset_count: int = 0 # TODO
|
|
||||||
ranked_beatmapset_count: int = 0 # TODO
|
|
||||||
follow_user_mapping: list[int] = Field(default_factory=list)
|
|
||||||
follower_count: int = 0
|
|
||||||
friends: list["RelationshipResp"] | None = None
|
|
||||||
scores_best_count: int = 0
|
|
||||||
scores_first_count: int = 0 # TODO
|
|
||||||
scores_recent_count: int = 0
|
|
||||||
scores_pinned_count: int = 0
|
|
||||||
beatmap_playcounts_count: int = 0
|
|
||||||
account_history: list[UserAccountHistoryResp] = []
|
|
||||||
active_tournament_banners: list[dict] = [] # TODO
|
|
||||||
kudosu: Kudosu = Field(default_factory=lambda: Kudosu(available=0, total=0)) # TODO
|
|
||||||
monthly_playcounts: list[CountResp] = Field(default_factory=list)
|
|
||||||
replay_watched_counts: list[CountResp] = Field(default_factory=list)
|
|
||||||
unread_pm_count: int = 0 # TODO
|
|
||||||
rank_history: RankHistoryResp | None = None
|
|
||||||
rank_highest: RankHighest | None = None
|
|
||||||
statistics: UserStatisticsResp | None = None
|
|
||||||
statistics_rulesets: dict[str, UserStatisticsResp] | None = None
|
|
||||||
user_achievements: list[UserAchievementResp] = Field(default_factory=list)
|
|
||||||
cover_url: str = "" # deprecated
|
|
||||||
team: Team | None = None
|
|
||||||
session_verified: bool = True
|
|
||||||
daily_challenge_user_stats: DailyChallengeStatsResp | None = None
|
|
||||||
default_group: str = ""
|
|
||||||
is_deleted: bool = False # TODO
|
|
||||||
|
|
||||||
# TODO: monthly_playcounts, unread_pm_count, rank_history, user_preferences
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
obj: User,
|
|
||||||
session: AsyncSession,
|
|
||||||
include: list[str] = [],
|
|
||||||
ruleset: GameMode | None = None,
|
|
||||||
) -> "UserResp":
|
|
||||||
from app.dependencies.database import get_redis
|
|
||||||
|
|
||||||
from .best_score import BestScore
|
|
||||||
from .favourite_beatmapset import FavouriteBeatmapset
|
|
||||||
from .pp_best_score import PPBestScore
|
|
||||||
from .relationship import Relationship, RelationshipResp, RelationshipType
|
|
||||||
from .score import Score
|
|
||||||
|
|
||||||
ruleset = ruleset or obj.playmode
|
|
||||||
|
|
||||||
u = cls.model_validate(obj.model_dump())
|
|
||||||
u.id = obj.id
|
|
||||||
u.default_group = "bot" if u.is_bot else "default"
|
|
||||||
u.country = Country(
|
|
||||||
code=obj.country_code, name=COUNTRIES.get(obj.country_code, "Unknown")
|
|
||||||
)
|
|
||||||
u.follower_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(Relationship)
|
|
||||||
.where(
|
|
||||||
Relationship.target_id == obj.id,
|
|
||||||
Relationship.type == RelationshipType.FOLLOW,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
u.scores_best_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(BestScore)
|
|
||||||
.where(
|
|
||||||
BestScore.user_id == obj.id,
|
|
||||||
)
|
|
||||||
.limit(200)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
redis = get_redis()
|
|
||||||
u.is_online = await redis.exists(f"metadata:online:{obj.id}")
|
|
||||||
u.cover_url = (
|
|
||||||
obj.cover.get(
|
|
||||||
"url", "https://assets.ppy.sh/user-profile-covers/default.jpeg"
|
|
||||||
)
|
|
||||||
if obj.cover
|
|
||||||
else "https://assets.ppy.sh/user-profile-covers/default.jpeg"
|
|
||||||
)
|
|
||||||
|
|
||||||
if "friends" in include:
|
|
||||||
u.friends = [
|
|
||||||
await RelationshipResp.from_db(session, r)
|
|
||||||
for r in (
|
|
||||||
await session.exec(
|
|
||||||
select(Relationship).where(
|
|
||||||
Relationship.user_id == obj.id,
|
|
||||||
Relationship.type == RelationshipType.FOLLOW,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
]
|
|
||||||
|
|
||||||
if "team" in include:
|
|
||||||
if await obj.awaitable_attrs.team_membership:
|
|
||||||
assert obj.team_membership
|
|
||||||
u.team = obj.team_membership.team
|
|
||||||
|
|
||||||
if "account_history" in include:
|
|
||||||
u.account_history = [
|
|
||||||
UserAccountHistoryResp.from_db(ah)
|
|
||||||
for ah in await obj.awaitable_attrs.account_history
|
|
||||||
]
|
|
||||||
|
|
||||||
if "daily_challenge_user_stats":
|
|
||||||
if await obj.awaitable_attrs.daily_challenge_stats:
|
|
||||||
assert obj.daily_challenge_stats
|
|
||||||
u.daily_challenge_user_stats = DailyChallengeStatsResp.from_db(
|
|
||||||
obj.daily_challenge_stats
|
|
||||||
)
|
|
||||||
|
|
||||||
if "statistics" in include:
|
|
||||||
current_stattistics = None
|
|
||||||
for i in await obj.awaitable_attrs.statistics:
|
|
||||||
if i.mode == ruleset:
|
|
||||||
current_stattistics = i
|
|
||||||
break
|
|
||||||
u.statistics = (
|
|
||||||
await UserStatisticsResp.from_db(
|
|
||||||
current_stattistics, session, obj.country_code
|
|
||||||
)
|
|
||||||
if current_stattistics
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
if "statistics_rulesets" in include:
|
|
||||||
u.statistics_rulesets = {
|
|
||||||
i.mode.value: await UserStatisticsResp.from_db(
|
|
||||||
i, session, obj.country_code
|
|
||||||
)
|
|
||||||
for i in await obj.awaitable_attrs.statistics
|
|
||||||
}
|
|
||||||
|
|
||||||
if "monthly_playcounts" in include:
|
|
||||||
u.monthly_playcounts = [
|
|
||||||
CountResp.from_db(pc)
|
|
||||||
for pc in await obj.awaitable_attrs.monthly_playcounts
|
|
||||||
]
|
|
||||||
if len(u.monthly_playcounts) == 1:
|
|
||||||
d = u.monthly_playcounts[0].start_date
|
|
||||||
u.monthly_playcounts.insert(
|
|
||||||
0, CountResp(start_date=d - timedelta(days=20), count=0)
|
|
||||||
)
|
|
||||||
|
|
||||||
if "replays_watched_counts" in include:
|
|
||||||
u.replay_watched_counts = [
|
|
||||||
CountResp.from_db(rwc)
|
|
||||||
for rwc in await obj.awaitable_attrs.replays_watched_counts
|
|
||||||
]
|
|
||||||
if len(u.replay_watched_counts) == 1:
|
|
||||||
d = u.replay_watched_counts[0].start_date
|
|
||||||
u.replay_watched_counts.insert(
|
|
||||||
0, CountResp(start_date=d - timedelta(days=20), count=0)
|
|
||||||
)
|
|
||||||
|
|
||||||
if "achievements" in include:
|
|
||||||
u.user_achievements = [
|
|
||||||
UserAchievementResp.from_db(ua)
|
|
||||||
for ua in await obj.awaitable_attrs.achievement
|
|
||||||
]
|
|
||||||
if "rank_history" in include:
|
|
||||||
rank_history = await RankHistoryResp.from_db(session, obj.id, ruleset)
|
|
||||||
if len(rank_history.data) != 0:
|
|
||||||
u.rank_history = rank_history
|
|
||||||
|
|
||||||
rank_top = (
|
|
||||||
await session.exec(
|
|
||||||
select(RankTop).where(
|
|
||||||
RankTop.user_id == obj.id, RankTop.mode == ruleset
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
if rank_top:
|
|
||||||
u.rank_highest = (
|
|
||||||
RankHighest(
|
|
||||||
rank=rank_top.rank,
|
|
||||||
updated_at=datetime.combine(rank_top.date, datetime.min.time()),
|
|
||||||
)
|
|
||||||
if rank_top
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
u.favourite_beatmapset_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(FavouriteBeatmapset)
|
|
||||||
.where(FavouriteBeatmapset.user_id == obj.id)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
u.scores_pinned_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(Score)
|
|
||||||
.where(
|
|
||||||
Score.user_id == obj.id,
|
|
||||||
Score.pinned_order > 0,
|
|
||||||
Score.gamemode == ruleset,
|
|
||||||
col(Score.passed).is_(True),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
u.scores_best_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(PPBestScore)
|
|
||||||
.where(
|
|
||||||
PPBestScore.user_id == obj.id,
|
|
||||||
PPBestScore.gamemode == ruleset,
|
|
||||||
)
|
|
||||||
.limit(200)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
u.scores_recent_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(Score)
|
|
||||||
.where(
|
|
||||||
Score.user_id == obj.id,
|
|
||||||
Score.gamemode == ruleset,
|
|
||||||
col(Score.passed).is_(True),
|
|
||||||
Score.ended_at > datetime.now(UTC) - timedelta(hours=24),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
u.beatmap_playcounts_count = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(BeatmapPlaycounts)
|
|
||||||
.where(
|
|
||||||
BeatmapPlaycounts.user_id == obj.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).one()
|
|
||||||
|
|
||||||
# 检查会话验证状态
|
|
||||||
# 如果邮件验证功能被禁用,则始终设置 session_verified 为 true
|
|
||||||
from app.config import settings
|
|
||||||
if not settings.enable_email_verification:
|
|
||||||
u.session_verified = True
|
|
||||||
else:
|
|
||||||
# 如果用户有未验证的登录会话,则设置 session_verified 为 false
|
|
||||||
from .email_verification import LoginSession
|
|
||||||
unverified_session = (
|
|
||||||
await session.exec(
|
|
||||||
select(LoginSession).where(
|
|
||||||
LoginSession.user_id == obj.id,
|
|
||||||
LoginSession.is_verified == False,
|
|
||||||
LoginSession.expires_at > datetime.now(UTC)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
u.session_verified = unverified_session is None
|
|
||||||
|
|
||||||
return u
|
|
||||||
|
|
||||||
|
|
||||||
ALL_INCLUDED = [
|
|
||||||
"friends",
|
|
||||||
"team",
|
|
||||||
"account_history",
|
|
||||||
"daily_challenge_user_stats",
|
|
||||||
"statistics",
|
|
||||||
"statistics_rulesets",
|
|
||||||
"achievements",
|
|
||||||
"monthly_playcounts",
|
|
||||||
"replays_watched_counts",
|
|
||||||
"rank_history",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
SEARCH_INCLUDED = [
|
|
||||||
"team",
|
|
||||||
"daily_challenge_user_stats",
|
|
||||||
"statistics",
|
|
||||||
"statistics_rulesets",
|
|
||||||
"achievements",
|
|
||||||
"monthly_playcounts",
|
|
||||||
"replays_watched_counts",
|
|
||||||
"rank_history",
|
|
||||||
]
|
|
||||||
|
|
||||||
BASE_INCLUDES = [
|
|
||||||
"team",
|
|
||||||
"daily_challenge_user_stats",
|
|
||||||
"statistics",
|
|
||||||
]
|
|
||||||
|
|
||||||
RANKING_INCLUDES = [
|
|
||||||
"team",
|
|
||||||
"statistics",
|
|
||||||
]
|
|
||||||
109
app/database/matchmaking.py
Normal file
109
app/database/matchmaking.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING, Any, Optional
|
||||||
|
|
||||||
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.models.mods import APIMod
|
||||||
|
|
||||||
|
from sqlalchemy import Column, DateTime, ForeignKey, Index, SmallInteger
|
||||||
|
from sqlmodel import (
|
||||||
|
JSON,
|
||||||
|
BigInteger,
|
||||||
|
Field,
|
||||||
|
Relationship,
|
||||||
|
SQLModel,
|
||||||
|
func,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .beatmap import Beatmap
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingUserStatsBase(SQLModel, UTCBaseModel):
|
||||||
|
user_id: int = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True),
|
||||||
|
)
|
||||||
|
pool_id: int = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(ForeignKey("matchmaking_pools.id"), primary_key=True, nullable=True),
|
||||||
|
)
|
||||||
|
first_placements: int = Field(default=0, ge=0)
|
||||||
|
total_points: int = Field(default=0, ge=0)
|
||||||
|
elo_data: dict[str, Any] | None = Field(default=None, sa_column=Column(JSON))
|
||||||
|
created_at: datetime | None = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(DateTime(timezone=True), server_default=func.now()),
|
||||||
|
)
|
||||||
|
updated_at: datetime | None = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingUserStats(MatchmakingUserStatsBase, table=True):
|
||||||
|
__tablename__: str = "matchmaking_user_stats"
|
||||||
|
__table_args__ = (
|
||||||
|
Index("matchmaking_user_stats_pool_first_idx", "pool_id", "first_placements"),
|
||||||
|
Index("matchmaking_user_stats_pool_points_idx", "pool_id", "total_points"),
|
||||||
|
)
|
||||||
|
|
||||||
|
user: "User" = Relationship(back_populates="matchmaking_stats", sa_relationship_kwargs={"lazy": "joined"})
|
||||||
|
pool: "MatchmakingPool" = Relationship()
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingPoolBase(SQLModel, UTCBaseModel):
|
||||||
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
|
ruleset_id: int = Field(
|
||||||
|
default=0,
|
||||||
|
sa_column=Column(SmallInteger, nullable=False),
|
||||||
|
)
|
||||||
|
name: str = Field(max_length=255)
|
||||||
|
active: bool = Field(default=True)
|
||||||
|
lobby_size: int = Field(default=8)
|
||||||
|
rating_search_radius: int = Field(default=20)
|
||||||
|
rating_search_radius_exp: int = Field(default=15)
|
||||||
|
created_at: datetime | None = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(DateTime(timezone=True), server_default=func.now()),
|
||||||
|
)
|
||||||
|
updated_at: datetime | None = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingPool(MatchmakingPoolBase, table=True):
|
||||||
|
__tablename__: str = "matchmaking_pools"
|
||||||
|
__table_args__ = (Index("matchmaking_pools_ruleset_active_idx", "ruleset_id", "active"),)
|
||||||
|
|
||||||
|
beatmaps: list["MatchmakingPoolBeatmap"] = Relationship(
|
||||||
|
back_populates="pool",
|
||||||
|
# sa_relationship_kwargs={
|
||||||
|
# "lazy": "selectin",
|
||||||
|
# },
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingPoolBeatmapBase(SQLModel, UTCBaseModel):
|
||||||
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
|
pool_id: int = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(ForeignKey("matchmaking_pools.id"), nullable=False, index=True),
|
||||||
|
)
|
||||||
|
beatmap_id: int = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(ForeignKey("beatmaps.id"), nullable=False),
|
||||||
|
)
|
||||||
|
mods: list[APIMod] | None = Field(default=None, sa_column=Column(JSON))
|
||||||
|
rating: int | None = Field(default=1500)
|
||||||
|
selection_count: int = Field(default=0)
|
||||||
|
|
||||||
|
|
||||||
|
class MatchmakingPoolBeatmap(MatchmakingPoolBeatmapBase, table=True):
|
||||||
|
__tablename__: str = "matchmaking_pool_beatmaps"
|
||||||
|
|
||||||
|
pool: MatchmakingPool = Relationship(back_populates="beatmaps")
|
||||||
|
beatmap: Optional["Beatmap"] = Relationship(
|
||||||
|
# sa_relationship_kwargs={"lazy": "joined"},
|
||||||
|
)
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
JSON,
|
JSON,
|
||||||
@@ -24,14 +25,14 @@ class MultiplayerEventBase(SQLModel, UTCBaseModel):
|
|||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
DateTime(timezone=True),
|
DateTime(timezone=True),
|
||||||
),
|
),
|
||||||
default=datetime.now(UTC),
|
default_factory=utcnow,
|
||||||
)
|
)
|
||||||
event_type: str = Field(index=True)
|
event_type: str = Field(index=True)
|
||||||
|
|
||||||
|
|
||||||
class MultiplayerEvent(MultiplayerEventBase, table=True):
|
class MultiplayerEvent(MultiplayerEventBase, table=True):
|
||||||
__tablename__ = "multiplayer_events" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "multiplayer_events"
|
||||||
id: int | None = Field(
|
id: int = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, primary_key=True, autoincrement=True, index=True),
|
sa_column=Column(BigInteger, primary_key=True, autoincrement=True, index=True),
|
||||||
)
|
)
|
||||||
@@ -40,7 +41,7 @@ class MultiplayerEvent(MultiplayerEventBase, table=True):
|
|||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
DateTime(timezone=True),
|
DateTime(timezone=True),
|
||||||
),
|
),
|
||||||
default=datetime.now(UTC),
|
default_factory=utcnow,
|
||||||
)
|
)
|
||||||
event_detail: dict[str, Any] | None = Field(
|
event_detail: dict[str, Any] | None = Field(
|
||||||
sa_column=Column(JSON),
|
sa_column=Column(JSON),
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from app.models.notification import NotificationDetail, NotificationName
|
from app.models.notification import NotificationDetail, NotificationName
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
JSON,
|
JSON,
|
||||||
@@ -17,7 +18,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession
|
|||||||
|
|
||||||
|
|
||||||
class Notification(SQLModel, table=True):
|
class Notification(SQLModel, table=True):
|
||||||
__tablename__ = "notifications" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "notifications"
|
||||||
|
|
||||||
id: int = Field(primary_key=True, index=True, default=None)
|
id: int = Field(primary_key=True, index=True, default=None)
|
||||||
name: NotificationName = Field(index=True)
|
name: NotificationName = Field(index=True)
|
||||||
@@ -30,7 +31,7 @@ class Notification(SQLModel, table=True):
|
|||||||
|
|
||||||
|
|
||||||
class UserNotification(SQLModel, table=True):
|
class UserNotification(SQLModel, table=True):
|
||||||
__tablename__ = "user_notifications" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "user_notifications"
|
||||||
id: int = Field(
|
id: int = Field(
|
||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -40,9 +41,7 @@ class UserNotification(SQLModel, table=True):
|
|||||||
default=None,
|
default=None,
|
||||||
)
|
)
|
||||||
notification_id: int = Field(index=True, foreign_key="notifications.id")
|
notification_id: int = Field(index=True, foreign_key="notifications.id")
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
is_read: bool = Field(index=True)
|
is_read: bool = Field(index=True)
|
||||||
|
|
||||||
notification: Notification = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
notification: Notification = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
||||||
@@ -56,7 +55,7 @@ async def insert_notification(session: AsyncSession, detail: NotificationDetail)
|
|||||||
object_id=detail.object_id,
|
object_id=detail.object_id,
|
||||||
source_user_id=detail.source_user_id,
|
source_user_id=detail.source_user_id,
|
||||||
details=detail.model_dump(),
|
details=detail.model_dump(),
|
||||||
created_at=datetime.now(UTC),
|
created_at=utcnow(),
|
||||||
)
|
)
|
||||||
session.add(notification)
|
session.add(notification)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|||||||
@@ -2,11 +2,12 @@
|
|||||||
密码重置相关数据库模型
|
密码重置相关数据库模型
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from datetime import datetime
|
||||||
|
|
||||||
from datetime import datetime, UTC
|
from app.utils import utcnow
|
||||||
from sqlmodel import SQLModel, Field
|
|
||||||
from sqlalchemy import Column, BigInteger, ForeignKey
|
from sqlalchemy import BigInteger, Column, ForeignKey
|
||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
class PasswordReset(SQLModel, table=True):
|
class PasswordReset(SQLModel, table=True):
|
||||||
@@ -18,7 +19,7 @@ class PasswordReset(SQLModel, table=True):
|
|||||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
||||||
email: str = Field(index=True)
|
email: str = Field(index=True)
|
||||||
reset_code: str = Field(max_length=8) # 8位重置验证码
|
reset_code: str = Field(max_length=8) # 8位重置验证码
|
||||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
created_at: datetime = Field(default_factory=utcnow)
|
||||||
expires_at: datetime = Field() # 验证码过期时间
|
expires_at: datetime = Field() # 验证码过期时间
|
||||||
is_used: bool = Field(default=False) # 是否已使用
|
is_used: bool = Field(default=False) # 是否已使用
|
||||||
used_at: datetime | None = Field(default=None)
|
used_at: datetime | None = Field(default=None)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
from redis.asyncio import Redis
|
from redis.asyncio import Redis
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
@@ -21,14 +21,10 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class PlaylistBestScore(SQLModel, table=True):
|
class PlaylistBestScore(SQLModel, table=True):
|
||||||
__tablename__ = "playlist_best_scores" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "playlist_best_scores"
|
||||||
|
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
score_id: int = Field(sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True))
|
||||||
)
|
|
||||||
score_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True)
|
|
||||||
)
|
|
||||||
room_id: int = Field(foreign_key="rooms.id", index=True)
|
room_id: int = Field(foreign_key="rooms.id", index=True)
|
||||||
playlist_id: int = Field(index=True)
|
playlist_id: int = Field(index=True)
|
||||||
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Any, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
|
||||||
from app.models.mods import APIMod
|
from app.models.mods import APIMod
|
||||||
from app.models.multiplayer_hub import PlaylistItem
|
from app.models.playlist import PlaylistItem
|
||||||
|
|
||||||
from .beatmap import Beatmap, BeatmapResp
|
from ._base import DatabaseModel, ondemand
|
||||||
|
from .beatmap import Beatmap, BeatmapDict, BeatmapModel
|
||||||
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
JSON,
|
JSON,
|
||||||
@@ -15,7 +15,6 @@ from sqlmodel import (
|
|||||||
Field,
|
Field,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Relationship,
|
Relationship,
|
||||||
SQLModel,
|
|
||||||
func,
|
func,
|
||||||
select,
|
select,
|
||||||
)
|
)
|
||||||
@@ -23,18 +22,34 @@ from sqlmodel.ext.asyncio.session import AsyncSession
|
|||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .room import Room
|
from .room import Room
|
||||||
|
from .score import ScoreDict
|
||||||
|
|
||||||
|
|
||||||
class PlaylistBase(SQLModel, UTCBaseModel):
|
class PlaylistDict(TypedDict):
|
||||||
|
id: int
|
||||||
|
room_id: int
|
||||||
|
beatmap_id: int
|
||||||
|
created_at: datetime | None
|
||||||
|
ruleset_id: int
|
||||||
|
allowed_mods: list[APIMod]
|
||||||
|
required_mods: list[APIMod]
|
||||||
|
freestyle: bool
|
||||||
|
expired: bool
|
||||||
|
owner_id: int
|
||||||
|
playlist_order: int
|
||||||
|
played_at: datetime | None
|
||||||
|
beatmap: NotRequired["BeatmapDict"]
|
||||||
|
scores: NotRequired[list[dict[str, Any]]]
|
||||||
|
|
||||||
|
|
||||||
|
class PlaylistModel(DatabaseModel[PlaylistDict]):
|
||||||
id: int = Field(index=True)
|
id: int = Field(index=True)
|
||||||
owner_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
room_id: int = Field(foreign_key="rooms.id")
|
||||||
ruleset_id: int = Field(ge=0, le=3)
|
beatmap_id: int = Field(
|
||||||
expired: bool = Field(default=False)
|
foreign_key="beatmaps.id",
|
||||||
playlist_order: int = Field(default=0)
|
|
||||||
played_at: datetime | None = Field(
|
|
||||||
sa_column=Column(DateTime(timezone=True)),
|
|
||||||
default=None,
|
|
||||||
)
|
)
|
||||||
|
created_at: datetime | None = Field(default=None, sa_column_kwargs={"server_default": func.now()})
|
||||||
|
ruleset_id: int
|
||||||
allowed_mods: list[APIMod] = Field(
|
allowed_mods: list[APIMod] = Field(
|
||||||
default_factory=list,
|
default_factory=list,
|
||||||
sa_column=Column(JSON),
|
sa_column=Column(JSON),
|
||||||
@@ -43,16 +58,46 @@ class PlaylistBase(SQLModel, UTCBaseModel):
|
|||||||
default_factory=list,
|
default_factory=list,
|
||||||
sa_column=Column(JSON),
|
sa_column=Column(JSON),
|
||||||
)
|
)
|
||||||
beatmap_id: int = Field(
|
|
||||||
foreign_key="beatmaps.id",
|
|
||||||
)
|
|
||||||
freestyle: bool = Field(default=False)
|
freestyle: bool = Field(default=False)
|
||||||
|
expired: bool = Field(default=False)
|
||||||
|
owner_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
||||||
|
playlist_order: int = Field(default=0)
|
||||||
|
played_at: datetime | None = Field(
|
||||||
|
sa_column=Column(DateTime(timezone=True)),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def beatmap(_session: AsyncSession, playlist: "Playlist", includes: list[str] | None = None) -> BeatmapDict:
|
||||||
|
return await BeatmapModel.transform(playlist.beatmap, includes=includes)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def scores(session: AsyncSession, playlist: "Playlist") -> list["ScoreDict"]:
|
||||||
|
from .score import Score, ScoreModel
|
||||||
|
|
||||||
|
scores = (
|
||||||
|
await session.exec(
|
||||||
|
select(Score).where(
|
||||||
|
Score.playlist_item_id == playlist.id,
|
||||||
|
Score.room_id == playlist.room_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
result: list[ScoreDict] = []
|
||||||
|
for score in scores:
|
||||||
|
result.append(
|
||||||
|
await ScoreModel.transform(
|
||||||
|
score,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class Playlist(PlaylistBase, table=True):
|
class Playlist(PlaylistModel, table=True):
|
||||||
__tablename__ = "room_playlists" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "room_playlists"
|
||||||
db_id: int = Field(default=None, primary_key=True, index=True, exclude=True)
|
db_id: int = Field(default=None, primary_key=True, index=True, exclude=True)
|
||||||
room_id: int = Field(foreign_key="rooms.id", exclude=True)
|
|
||||||
|
|
||||||
beatmap: Beatmap = Relationship(
|
beatmap: Beatmap = Relationship(
|
||||||
sa_relationship_kwargs={
|
sa_relationship_kwargs={
|
||||||
@@ -60,19 +105,18 @@ class Playlist(PlaylistBase, table=True):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
room: "Room" = Relationship()
|
room: "Room" = Relationship()
|
||||||
|
updated_at: datetime | None = Field(
|
||||||
|
default=None, sa_column_kwargs={"server_default": func.now(), "onupdate": func.now()}
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def get_next_id_for_room(cls, room_id: int, session: AsyncSession) -> int:
|
async def get_next_id_for_room(cls, room_id: int, session: AsyncSession) -> int:
|
||||||
stmt = select(func.coalesce(func.max(cls.id), -1) + 1).where(
|
stmt = select(func.coalesce(func.max(cls.id), -1) + 1).where(cls.room_id == room_id)
|
||||||
cls.room_id == room_id
|
|
||||||
)
|
|
||||||
result = await session.exec(stmt)
|
result = await session.exec(stmt)
|
||||||
return result.one()
|
return result.one()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_hub(
|
async def from_model(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession) -> "Playlist":
|
||||||
cls, playlist: PlaylistItem, room_id: int, session: AsyncSession
|
|
||||||
) -> "Playlist":
|
|
||||||
next_id = await cls.get_next_id_for_room(room_id, session=session)
|
next_id = await cls.get_next_id_for_room(room_id, session=session)
|
||||||
return cls(
|
return cls(
|
||||||
id=next_id,
|
id=next_id,
|
||||||
@@ -90,9 +134,7 @@ class Playlist(PlaylistBase, table=True):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def update(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession):
|
async def update(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession):
|
||||||
db_playlist = await session.exec(
|
db_playlist = await session.exec(select(cls).where(cls.id == playlist.id, cls.room_id == room_id))
|
||||||
select(cls).where(cls.id == playlist.id, cls.room_id == room_id)
|
|
||||||
)
|
|
||||||
db_playlist = db_playlist.first()
|
db_playlist = db_playlist.first()
|
||||||
if db_playlist is None:
|
if db_playlist is None:
|
||||||
raise ValueError("Playlist item not found")
|
raise ValueError("Playlist item not found")
|
||||||
@@ -108,10 +150,8 @@ class Playlist(PlaylistBase, table=True):
|
|||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def add_to_db(
|
async def add_to_db(cls, playlist: PlaylistItem, room_id: int, session: AsyncSession):
|
||||||
cls, playlist: PlaylistItem, room_id: int, session: AsyncSession
|
db_playlist = await cls.from_model(playlist, room_id, session)
|
||||||
):
|
|
||||||
db_playlist = await cls.from_hub(playlist, room_id, session)
|
|
||||||
session.add(db_playlist)
|
session.add(db_playlist)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await session.refresh(db_playlist)
|
await session.refresh(db_playlist)
|
||||||
@@ -119,25 +159,9 @@ class Playlist(PlaylistBase, table=True):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def delete_item(cls, item_id: int, room_id: int, session: AsyncSession):
|
async def delete_item(cls, item_id: int, room_id: int, session: AsyncSession):
|
||||||
db_playlist = await session.exec(
|
db_playlist = await session.exec(select(cls).where(cls.id == item_id, cls.room_id == room_id))
|
||||||
select(cls).where(cls.id == item_id, cls.room_id == room_id)
|
|
||||||
)
|
|
||||||
db_playlist = db_playlist.first()
|
db_playlist = db_playlist.first()
|
||||||
if db_playlist is None:
|
if db_playlist is None:
|
||||||
raise ValueError("Playlist item not found")
|
raise ValueError("Playlist item not found")
|
||||||
await session.delete(db_playlist)
|
await session.delete(db_playlist)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
class PlaylistResp(PlaylistBase):
|
|
||||||
beatmap: BeatmapResp | None = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls, playlist: Playlist, include: list[str] = []
|
|
||||||
) -> "PlaylistResp":
|
|
||||||
data = playlist.model_dump()
|
|
||||||
if "beatmap" in include:
|
|
||||||
data["beatmap"] = await BeatmapResp.from_db(playlist.beatmap)
|
|
||||||
resp = cls.model_validate(data)
|
|
||||||
return resp
|
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from app.models.score import GameMode
|
|
||||||
|
|
||||||
from .lazer_user import User
|
|
||||||
|
|
||||||
from sqlmodel import (
|
|
||||||
BigInteger,
|
|
||||||
Column,
|
|
||||||
Field,
|
|
||||||
Float,
|
|
||||||
ForeignKey,
|
|
||||||
Relationship,
|
|
||||||
SQLModel,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .beatmap import Beatmap
|
|
||||||
from .score import Score
|
|
||||||
|
|
||||||
|
|
||||||
class PPBestScore(SQLModel, table=True):
|
|
||||||
__tablename__ = "best_scores" # pyright: ignore[reportAssignmentType]
|
|
||||||
user_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
score_id: int = Field(
|
|
||||||
sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True)
|
|
||||||
)
|
|
||||||
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
|
||||||
gamemode: GameMode = Field(index=True)
|
|
||||||
pp: float = Field(
|
|
||||||
sa_column=Column(Float, default=0),
|
|
||||||
)
|
|
||||||
acc: float = Field(
|
|
||||||
sa_column=Column(Float, default=0),
|
|
||||||
)
|
|
||||||
|
|
||||||
user: User = Relationship()
|
|
||||||
score: "Score" = Relationship()
|
|
||||||
beatmap: "Beatmap" = Relationship()
|
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
from datetime import (
|
from datetime import (
|
||||||
UTC,
|
|
||||||
date as dt,
|
date as dt,
|
||||||
datetime,
|
|
||||||
)
|
)
|
||||||
from typing import TYPE_CHECKING, Optional
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
from app.models.score import GameMode
|
from app.models.score import GameMode
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
@@ -22,20 +21,18 @@ from sqlmodel import (
|
|||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class RankHistory(SQLModel, table=True):
|
class RankHistory(SQLModel, table=True):
|
||||||
__tablename__ = "rank_history" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "rank_history"
|
||||||
|
|
||||||
id: int | None = Field(default=None, sa_column=Column(BigInteger, primary_key=True))
|
id: int | None = Field(default=None, sa_column=Column(BigInteger, primary_key=True))
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
mode: GameMode
|
mode: GameMode
|
||||||
rank: int
|
rank: int
|
||||||
date: dt = Field(
|
date: dt = Field(
|
||||||
default_factory=lambda: datetime.now(UTC).date(),
|
default_factory=lambda: utcnow().date(),
|
||||||
sa_column=Column(Date, index=True),
|
sa_column=Column(Date, index=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -43,16 +40,14 @@ class RankHistory(SQLModel, table=True):
|
|||||||
|
|
||||||
|
|
||||||
class RankTop(SQLModel, table=True):
|
class RankTop(SQLModel, table=True):
|
||||||
__tablename__ = "rank_top" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "rank_top"
|
||||||
|
|
||||||
id: int | None = Field(default=None, sa_column=Column(BigInteger, primary_key=True))
|
id: int | None = Field(default=None, sa_column=Column(BigInteger, primary_key=True))
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
|
||||||
mode: GameMode
|
mode: GameMode
|
||||||
rank: int
|
rank: int
|
||||||
date: dt = Field(
|
date: dt = Field(
|
||||||
default_factory=lambda: datetime.now(UTC).date(),
|
default_factory=lambda: utcnow().date(),
|
||||||
sa_column=Column(Date, index=True),
|
sa_column=Column(Date, index=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -62,9 +57,7 @@ class RankHistoryResp(BaseModel):
|
|||||||
data: list[int]
|
data: list[int]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_db(
|
async def from_db(cls, session: AsyncSession, user_id: int, mode: GameMode) -> "RankHistoryResp":
|
||||||
cls, session: AsyncSession, user_id: int, mode: GameMode
|
|
||||||
) -> "RankHistoryResp":
|
|
||||||
results = (
|
results = (
|
||||||
await session.exec(
|
await session.exec(
|
||||||
select(RankHistory)
|
select(RankHistory)
|
||||||
|
|||||||
@@ -1,27 +1,40 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import TYPE_CHECKING, NotRequired, TypedDict
|
||||||
|
|
||||||
from .lazer_user import User, UserResp
|
from app.models.score import GameMode
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, included, ondemand
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
BigInteger,
|
BigInteger,
|
||||||
Column,
|
Column,
|
||||||
Field,
|
Field,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Relationship as SQLRelationship,
|
Relationship as SQLRelationship,
|
||||||
SQLModel,
|
|
||||||
select,
|
select,
|
||||||
)
|
)
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .user import User, UserDict
|
||||||
|
|
||||||
|
|
||||||
class RelationshipType(str, Enum):
|
class RelationshipType(str, Enum):
|
||||||
FOLLOW = "Friend"
|
FOLLOW = "friend"
|
||||||
BLOCK = "Block"
|
BLOCK = "block"
|
||||||
|
|
||||||
|
|
||||||
class Relationship(SQLModel, table=True):
|
class RelationshipDict(TypedDict):
|
||||||
__tablename__ = "relationship" # pyright: ignore[reportAssignmentType]
|
target_id: int | None
|
||||||
|
type: RelationshipType
|
||||||
|
id: NotRequired[int | None]
|
||||||
|
user_id: NotRequired[int | None]
|
||||||
|
mutual: NotRequired[bool]
|
||||||
|
target: NotRequired["UserDict"]
|
||||||
|
|
||||||
|
|
||||||
|
class RelationshipModel(DatabaseModel[RelationshipDict]):
|
||||||
|
__tablename__: str = "relationship"
|
||||||
id: int | None = Field(
|
id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(BigInteger, autoincrement=True, primary_key=True),
|
sa_column=Column(BigInteger, autoincrement=True, primary_key=True),
|
||||||
@@ -34,6 +47,7 @@ class Relationship(SQLModel, table=True):
|
|||||||
ForeignKey("lazer_users.id"),
|
ForeignKey("lazer_users.id"),
|
||||||
index=True,
|
index=True,
|
||||||
),
|
),
|
||||||
|
exclude=True,
|
||||||
)
|
)
|
||||||
target_id: int = Field(
|
target_id: int = Field(
|
||||||
default=None,
|
default=None,
|
||||||
@@ -44,24 +58,10 @@ class Relationship(SQLModel, table=True):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
type: RelationshipType = Field(default=RelationshipType.FOLLOW, nullable=False)
|
type: RelationshipType = Field(default=RelationshipType.FOLLOW, nullable=False)
|
||||||
target: User = SQLRelationship(
|
|
||||||
sa_relationship_kwargs={
|
|
||||||
"foreign_keys": "[Relationship.target_id]",
|
|
||||||
"lazy": "selectin",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
@included
|
||||||
class RelationshipResp(BaseModel):
|
@staticmethod
|
||||||
target_id: int
|
async def mutual(session: AsyncSession, relationship: "Relationship") -> bool:
|
||||||
target: UserResp
|
|
||||||
mutual: bool = False
|
|
||||||
type: RelationshipType
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls, session: AsyncSession, relationship: Relationship
|
|
||||||
) -> "RelationshipResp":
|
|
||||||
target_relationship = (
|
target_relationship = (
|
||||||
await session.exec(
|
await session.exec(
|
||||||
select(Relationship).where(
|
select(Relationship).where(
|
||||||
@@ -70,23 +70,29 @@ class RelationshipResp(BaseModel):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
).first()
|
).first()
|
||||||
mutual = bool(
|
return bool(
|
||||||
target_relationship is not None
|
target_relationship is not None
|
||||||
and relationship.type == RelationshipType.FOLLOW
|
and relationship.type == RelationshipType.FOLLOW
|
||||||
and target_relationship.type == RelationshipType.FOLLOW
|
and target_relationship.type == RelationshipType.FOLLOW
|
||||||
)
|
)
|
||||||
return cls(
|
|
||||||
target_id=relationship.target_id,
|
@ondemand
|
||||||
target=await UserResp.from_db(
|
@staticmethod
|
||||||
relationship.target,
|
async def target(
|
||||||
session,
|
_session: AsyncSession,
|
||||||
include=[
|
relationship: "Relationship",
|
||||||
"team",
|
ruleset: GameMode | None = None,
|
||||||
"daily_challenge_user_stats",
|
includes: list[str] | None = None,
|
||||||
"statistics",
|
) -> "UserDict":
|
||||||
"statistics_rulesets",
|
from .user import UserModel
|
||||||
],
|
|
||||||
),
|
return await UserModel.transform(relationship.target, ruleset=ruleset, includes=includes)
|
||||||
mutual=mutual,
|
|
||||||
type=relationship.type,
|
|
||||||
)
|
class Relationship(RelationshipModel, table=True):
|
||||||
|
target: "User" = SQLRelationship(
|
||||||
|
sa_relationship_kwargs={
|
||||||
|
"foreign_keys": "[Relationship.target_id]",
|
||||||
|
"lazy": "selectin",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
|
from typing import ClassVar, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.database.playlist_attempts import PlaylistAggregateScore
|
|
||||||
from app.database.room_participated_user import RoomParticipatedUser
|
|
||||||
from app.models.model import UTCBaseModel
|
|
||||||
from app.models.multiplayer_hub import ServerMultiplayerRoom
|
|
||||||
from app.models.room import (
|
from app.models.room import (
|
||||||
MatchType,
|
MatchType,
|
||||||
QueueMode,
|
QueueMode,
|
||||||
@@ -12,34 +9,66 @@ from app.models.room import (
|
|||||||
RoomPlaylistItemStats,
|
RoomPlaylistItemStats,
|
||||||
RoomStatus,
|
RoomStatus,
|
||||||
)
|
)
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from .lazer_user import User, UserResp
|
from ._base import DatabaseModel, included, ondemand
|
||||||
from .playlists import Playlist, PlaylistResp
|
from .item_attempts_count import ItemAttemptsCount, ItemAttemptsCountDict, ItemAttemptsCountModel
|
||||||
|
from .playlists import Playlist, PlaylistDict, PlaylistModel
|
||||||
|
from .room_participated_user import RoomParticipatedUser
|
||||||
|
from .user import User, UserDict, UserModel
|
||||||
|
|
||||||
|
from pydantic import field_validator
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import (
|
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, Relationship, SQLModel, col, select
|
||||||
BigInteger,
|
|
||||||
Column,
|
|
||||||
DateTime,
|
|
||||||
Field,
|
|
||||||
ForeignKey,
|
|
||||||
Relationship,
|
|
||||||
SQLModel,
|
|
||||||
col,
|
|
||||||
select,
|
|
||||||
)
|
|
||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
class RoomBase(SQLModel, UTCBaseModel):
|
class RoomDict(TypedDict):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
category: RoomCategory
|
||||||
|
status: RoomStatus
|
||||||
|
type: MatchType
|
||||||
|
duration: int | None
|
||||||
|
starts_at: datetime | None
|
||||||
|
ends_at: datetime | None
|
||||||
|
max_attempts: int | None
|
||||||
|
participant_count: int
|
||||||
|
channel_id: int
|
||||||
|
queue_mode: QueueMode
|
||||||
|
auto_skip: bool
|
||||||
|
auto_start_duration: int
|
||||||
|
has_password: NotRequired[bool]
|
||||||
|
current_playlist_item: NotRequired["PlaylistDict | None"]
|
||||||
|
playlist: NotRequired[list["PlaylistDict"]]
|
||||||
|
playlist_item_stats: NotRequired[RoomPlaylistItemStats]
|
||||||
|
difficulty_range: NotRequired[RoomDifficultyRange]
|
||||||
|
host: NotRequired[UserDict]
|
||||||
|
recent_participants: NotRequired[list[UserDict]]
|
||||||
|
current_user_score: NotRequired["ItemAttemptsCountDict | None"]
|
||||||
|
|
||||||
|
|
||||||
|
class RoomModel(DatabaseModel[RoomDict]):
|
||||||
|
SHOW_RESPONSE_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"current_user_score.playlist_item_attempts",
|
||||||
|
"host.country",
|
||||||
|
"playlist.beatmap.beatmapset",
|
||||||
|
"playlist.beatmap.checksum",
|
||||||
|
"playlist.beatmap.max_combo",
|
||||||
|
"recent_participants",
|
||||||
|
]
|
||||||
|
|
||||||
|
id: int = Field(default=None, primary_key=True, index=True)
|
||||||
name: str = Field(index=True)
|
name: str = Field(index=True)
|
||||||
category: RoomCategory = Field(default=RoomCategory.NORMAL, index=True)
|
category: RoomCategory = Field(default=RoomCategory.NORMAL, index=True)
|
||||||
|
status: RoomStatus
|
||||||
|
type: MatchType
|
||||||
duration: int | None = Field(default=None) # minutes
|
duration: int | None = Field(default=None) # minutes
|
||||||
starts_at: datetime | None = Field(
|
starts_at: datetime | None = Field(
|
||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
DateTime(timezone=True),
|
DateTime(timezone=True),
|
||||||
),
|
),
|
||||||
default=datetime.now(UTC),
|
default_factory=utcnow,
|
||||||
)
|
)
|
||||||
ends_at: datetime | None = Field(
|
ends_at: datetime | None = Field(
|
||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
@@ -47,22 +76,135 @@ class RoomBase(SQLModel, UTCBaseModel):
|
|||||||
),
|
),
|
||||||
default=None,
|
default=None,
|
||||||
)
|
)
|
||||||
participant_count: int = Field(default=0)
|
|
||||||
max_attempts: int | None = Field(default=None) # playlists
|
max_attempts: int | None = Field(default=None) # playlists
|
||||||
type: MatchType
|
participant_count: int = Field(default=0)
|
||||||
|
channel_id: int = 0
|
||||||
queue_mode: QueueMode
|
queue_mode: QueueMode
|
||||||
auto_skip: bool
|
auto_skip: bool
|
||||||
|
|
||||||
auto_start_duration: int
|
auto_start_duration: int
|
||||||
status: RoomStatus
|
|
||||||
channel_id: int | None = None
|
@field_validator("channel_id", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_channel_id(cls, v):
|
||||||
|
"""将 None 转换为 0"""
|
||||||
|
if v is None:
|
||||||
|
return 0
|
||||||
|
return v
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def has_password(_session: AsyncSession, room: "Room") -> bool:
|
||||||
|
return bool(room.password)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def current_playlist_item(
|
||||||
|
_session: AsyncSession, room: "Room", includes: list[str] | None = None
|
||||||
|
) -> "PlaylistDict | None":
|
||||||
|
playlists = await room.awaitable_attrs.playlist
|
||||||
|
if not playlists:
|
||||||
|
return None
|
||||||
|
return await PlaylistModel.transform(playlists[-1], includes=includes)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def playlist(_session: AsyncSession, room: "Room", includes: list[str] | None = None) -> list["PlaylistDict"]:
|
||||||
|
playlists = await room.awaitable_attrs.playlist
|
||||||
|
result: list[PlaylistDict] = []
|
||||||
|
for playlist_item in playlists:
|
||||||
|
result.append(await PlaylistModel.transform(playlist_item, includes=includes))
|
||||||
|
return result
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def playlist_item_stats(_session: AsyncSession, room: "Room") -> RoomPlaylistItemStats:
|
||||||
|
playlists = await room.awaitable_attrs.playlist
|
||||||
|
stats = RoomPlaylistItemStats(count_active=0, count_total=0, ruleset_ids=[])
|
||||||
|
rulesets: set[int] = set()
|
||||||
|
for playlist in playlists:
|
||||||
|
stats.count_total += 1
|
||||||
|
if not playlist.expired:
|
||||||
|
stats.count_active += 1
|
||||||
|
rulesets.add(playlist.ruleset_id)
|
||||||
|
stats.ruleset_ids = list(rulesets)
|
||||||
|
return stats
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def difficulty_range(_session: AsyncSession, room: "Room") -> RoomDifficultyRange:
|
||||||
|
playlists = await room.awaitable_attrs.playlist
|
||||||
|
if not playlists:
|
||||||
|
return RoomDifficultyRange(min=0.0, max=0.0)
|
||||||
|
min_diff = float("inf")
|
||||||
|
max_diff = float("-inf")
|
||||||
|
for playlist in playlists:
|
||||||
|
rating = playlist.beatmap.difficulty_rating
|
||||||
|
min_diff = min(min_diff, rating)
|
||||||
|
max_diff = max(max_diff, rating)
|
||||||
|
if min_diff == float("inf"):
|
||||||
|
min_diff = 0.0
|
||||||
|
if max_diff == float("-inf"):
|
||||||
|
max_diff = 0.0
|
||||||
|
return RoomDifficultyRange(min=min_diff, max=max_diff)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def host(_session: AsyncSession, room: "Room", includes: list[str] | None = None) -> UserDict:
|
||||||
|
host_user = await room.awaitable_attrs.host
|
||||||
|
return await UserModel.transform(host_user, includes=includes)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def recent_participants(session: AsyncSession, room: "Room") -> list[UserDict]:
|
||||||
|
participants: list[UserDict] = []
|
||||||
|
if room.category == RoomCategory.REALTIME:
|
||||||
|
query = (
|
||||||
|
select(RoomParticipatedUser)
|
||||||
|
.where(
|
||||||
|
RoomParticipatedUser.room_id == room.id,
|
||||||
|
col(RoomParticipatedUser.left_at).is_(None),
|
||||||
|
)
|
||||||
|
.limit(8)
|
||||||
|
.order_by(col(RoomParticipatedUser.joined_at).desc())
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
query = (
|
||||||
|
select(RoomParticipatedUser)
|
||||||
|
.where(
|
||||||
|
RoomParticipatedUser.room_id == room.id,
|
||||||
|
)
|
||||||
|
.limit(8)
|
||||||
|
.order_by(col(RoomParticipatedUser.joined_at).desc())
|
||||||
|
)
|
||||||
|
for recent_participant in await session.exec(query):
|
||||||
|
user_instance = await recent_participant.awaitable_attrs.user
|
||||||
|
participants.append(await UserModel.transform(user_instance))
|
||||||
|
return participants
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def current_user_score(
|
||||||
|
session: AsyncSession, room: "Room", includes: list[str] | None = None
|
||||||
|
) -> "ItemAttemptsCountDict | None":
|
||||||
|
item_attempt = (
|
||||||
|
await session.exec(
|
||||||
|
select(ItemAttemptsCount).where(
|
||||||
|
ItemAttemptsCount.room_id == room.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
if item_attempt is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return await ItemAttemptsCountModel.transform(item_attempt, includes=includes)
|
||||||
|
|
||||||
|
|
||||||
class Room(AsyncAttrs, RoomBase, table=True):
|
class Room(AsyncAttrs, RoomModel, table=True):
|
||||||
__tablename__ = "rooms" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "rooms"
|
||||||
id: int = Field(default=None, primary_key=True, index=True)
|
|
||||||
host_id: int = Field(
|
host_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
password: str | None = Field(default=None)
|
||||||
)
|
|
||||||
|
|
||||||
host: User = Relationship()
|
host: User = Relationship()
|
||||||
playlist: list[Playlist] = Relationship(
|
playlist: list[Playlist] = Relationship(
|
||||||
@@ -74,108 +216,28 @@ class Room(AsyncAttrs, RoomBase, table=True):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class RoomResp(RoomBase):
|
class APIUploadedRoom(SQLModel):
|
||||||
id: int
|
name: str = Field(index=True)
|
||||||
has_password: bool = False
|
category: RoomCategory = Field(default=RoomCategory.NORMAL, index=True)
|
||||||
host: UserResp | None = None
|
status: RoomStatus
|
||||||
playlist: list[PlaylistResp] = []
|
type: MatchType
|
||||||
playlist_item_stats: RoomPlaylistItemStats | None = None
|
duration: int | None = Field(default=None) # minutes
|
||||||
difficulty_range: RoomDifficultyRange | None = None
|
starts_at: datetime | None = Field(
|
||||||
current_playlist_item: PlaylistResp | None = None
|
sa_column=Column(
|
||||||
current_user_score: PlaylistAggregateScore | None = None
|
DateTime(timezone=True),
|
||||||
recent_participants: list[UserResp] = Field(default_factory=list)
|
),
|
||||||
|
default_factory=utcnow,
|
||||||
|
)
|
||||||
|
ends_at: datetime | None = Field(
|
||||||
|
sa_column=Column(
|
||||||
|
DateTime(timezone=True),
|
||||||
|
),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
max_attempts: int | None = Field(default=None) # playlists
|
||||||
|
participant_count: int = Field(default=0)
|
||||||
channel_id: int = 0
|
channel_id: int = 0
|
||||||
|
queue_mode: QueueMode
|
||||||
@classmethod
|
auto_skip: bool
|
||||||
async def from_db(
|
auto_start_duration: int
|
||||||
cls,
|
|
||||||
room: Room,
|
|
||||||
session: AsyncSession,
|
|
||||||
include: list[str] = [],
|
|
||||||
user: User | None = None,
|
|
||||||
) -> "RoomResp":
|
|
||||||
d = room.model_dump()
|
|
||||||
d["channel_id"] = d.get("channel_id", 0) or 0
|
|
||||||
resp = cls.model_validate(d)
|
|
||||||
|
|
||||||
stats = RoomPlaylistItemStats(count_active=0, count_total=0)
|
|
||||||
difficulty_range = RoomDifficultyRange(
|
|
||||||
min=0,
|
|
||||||
max=0,
|
|
||||||
)
|
|
||||||
rulesets = set()
|
|
||||||
for playlist in room.playlist:
|
|
||||||
stats.count_total += 1
|
|
||||||
if not playlist.expired:
|
|
||||||
stats.count_active += 1
|
|
||||||
rulesets.add(playlist.ruleset_id)
|
|
||||||
difficulty_range.min = min(
|
|
||||||
difficulty_range.min, playlist.beatmap.difficulty_rating
|
|
||||||
)
|
|
||||||
difficulty_range.max = max(
|
|
||||||
difficulty_range.max, playlist.beatmap.difficulty_rating
|
|
||||||
)
|
|
||||||
resp.playlist.append(await PlaylistResp.from_db(playlist, ["beatmap"]))
|
|
||||||
stats.ruleset_ids = list(rulesets)
|
|
||||||
resp.playlist_item_stats = stats
|
|
||||||
resp.difficulty_range = difficulty_range
|
|
||||||
resp.current_playlist_item = resp.playlist[-1] if resp.playlist else None
|
|
||||||
resp.recent_participants = []
|
|
||||||
for recent_participant in await session.exec(
|
|
||||||
select(RoomParticipatedUser)
|
|
||||||
.where(
|
|
||||||
RoomParticipatedUser.room_id == room.id,
|
|
||||||
col(RoomParticipatedUser.left_at).is_(None),
|
|
||||||
)
|
|
||||||
.limit(8)
|
|
||||||
.order_by(col(RoomParticipatedUser.joined_at).desc())
|
|
||||||
):
|
|
||||||
resp.recent_participants.append(
|
|
||||||
await UserResp.from_db(
|
|
||||||
await recent_participant.awaitable_attrs.user,
|
|
||||||
session,
|
|
||||||
include=["statistics"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
resp.host = await UserResp.from_db(
|
|
||||||
await room.awaitable_attrs.host, session, include=["statistics"]
|
|
||||||
)
|
|
||||||
if "current_user_score" in include and user:
|
|
||||||
resp.current_user_score = await PlaylistAggregateScore.from_db(
|
|
||||||
room.id, user.id, session
|
|
||||||
)
|
|
||||||
return resp
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_hub(cls, server_room: ServerMultiplayerRoom) -> "RoomResp":
|
|
||||||
room = server_room.room
|
|
||||||
resp = cls(
|
|
||||||
id=room.room_id,
|
|
||||||
name=room.settings.name,
|
|
||||||
type=room.settings.match_type,
|
|
||||||
queue_mode=room.settings.queue_mode,
|
|
||||||
auto_skip=room.settings.auto_skip,
|
|
||||||
auto_start_duration=int(room.settings.auto_start_duration.total_seconds()),
|
|
||||||
status=server_room.status,
|
|
||||||
category=server_room.category,
|
|
||||||
# duration = room.settings.duration,
|
|
||||||
starts_at=server_room.start_at,
|
|
||||||
participant_count=len(room.users),
|
|
||||||
channel_id=server_room.room.channel_id or 0,
|
|
||||||
)
|
|
||||||
return resp
|
|
||||||
|
|
||||||
|
|
||||||
class APIUploadedRoom(RoomBase):
|
|
||||||
def to_room(self) -> Room:
|
|
||||||
"""
|
|
||||||
将 APIUploadedRoom 转换为 Room 对象,playlist 字段需单独处理。
|
|
||||||
"""
|
|
||||||
room_dict = self.model_dump()
|
|
||||||
room_dict.pop("playlist", None)
|
|
||||||
# host_id 已在字段中
|
|
||||||
return Room(**room_dict)
|
|
||||||
|
|
||||||
id: int | None
|
|
||||||
host_id: int | None = None
|
|
||||||
playlist: list[Playlist] = Field(default_factory=list)
|
playlist: list[Playlist] = Field(default_factory=list)
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
from sqlmodel import (
|
from sqlmodel import (
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@@ -13,27 +15,21 @@ from sqlmodel import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
|
||||||
from .room import Room
|
from .room import Room
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class RoomParticipatedUser(AsyncAttrs, SQLModel, table=True):
|
class RoomParticipatedUser(AsyncAttrs, SQLModel, table=True):
|
||||||
__tablename__ = "room_participated_users" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "room_participated_users"
|
||||||
|
|
||||||
id: int | None = Field(
|
id: int | None = Field(default=None, sa_column=Column(BigInteger, primary_key=True, autoincrement=True))
|
||||||
default=None, sa_column=Column(BigInteger, primary_key=True, autoincrement=True)
|
|
||||||
)
|
|
||||||
room_id: int = Field(sa_column=Column(ForeignKey("rooms.id"), nullable=False))
|
room_id: int = Field(sa_column=Column(ForeignKey("rooms.id"), nullable=False))
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False)
|
|
||||||
)
|
|
||||||
joined_at: datetime = Field(
|
joined_at: datetime = Field(
|
||||||
sa_column=Column(DateTime(timezone=True), nullable=False),
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
||||||
default=datetime.now(UTC),
|
default_factory=utcnow,
|
||||||
)
|
|
||||||
left_at: datetime | None = Field(
|
|
||||||
sa_column=Column(DateTime(timezone=True), nullable=True), default=None
|
|
||||||
)
|
)
|
||||||
|
left_at: datetime | None = Field(sa_column=Column(DateTime(timezone=True), nullable=True), default=None)
|
||||||
|
|
||||||
room: "Room" = Relationship()
|
room: "Room" = Relationship()
|
||||||
user: "User" = Relationship()
|
user: "User" = Relationship()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -2,30 +2,17 @@ from datetime import datetime
|
|||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
from app.models.score import GameMode
|
from app.models.score import GameMode
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from .beatmap import Beatmap
|
from .beatmap import Beatmap
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
from sqlalchemy import Column, DateTime, Index
|
from sqlalchemy import Column, DateTime, Index
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
||||||
|
|
||||||
|
|
||||||
class ScoreTokenBase(SQLModel, UTCBaseModel):
|
class ScoreTokenBase(SQLModel, UTCBaseModel):
|
||||||
score_id: int | None = Field(sa_column=Column(BigInteger), default=None)
|
|
||||||
ruleset_id: GameMode
|
|
||||||
playlist_item_id: int | None = Field(default=None) # playlist
|
|
||||||
created_at: datetime = Field(
|
|
||||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
|
||||||
)
|
|
||||||
updated_at: datetime = Field(
|
|
||||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ScoreToken(ScoreTokenBase, table=True):
|
|
||||||
__tablename__ = "score_tokens" # pyright: ignore[reportAssignmentType]
|
|
||||||
__table_args__ = (Index("idx_user_playlist", "user_id", "playlist_item_id"),)
|
|
||||||
|
|
||||||
id: int | None = Field(
|
id: int | None = Field(
|
||||||
default=None,
|
default=None,
|
||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
@@ -35,17 +22,28 @@ class ScoreToken(ScoreTokenBase, table=True):
|
|||||||
autoincrement=True,
|
autoincrement=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
score_id: int | None = Field(sa_column=Column(BigInteger), default=None)
|
||||||
|
ruleset_id: GameMode
|
||||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
||||||
beatmap_id: int = Field(foreign_key="beatmaps.id")
|
beatmap_id: int = Field(foreign_key="beatmaps.id")
|
||||||
user: User = Relationship()
|
room_id: int | None = Field(default=None)
|
||||||
beatmap: Beatmap = Relationship()
|
playlist_item_id: int | None = Field(default=None) # playlist
|
||||||
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
|
updated_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
|
|
||||||
|
|
||||||
|
class ScoreToken(ScoreTokenBase, table=True):
|
||||||
|
__tablename__: str = "score_tokens"
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_user_playlist", "user_id", "playlist_item_id"),
|
||||||
|
Index("idx_playlist_room", "playlist_item_id", "room_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
user: Mapped[User] = Relationship()
|
||||||
|
beatmap: Mapped[Beatmap] = Relationship()
|
||||||
|
|
||||||
|
|
||||||
class ScoreTokenResp(ScoreTokenBase):
|
class ScoreTokenResp(ScoreTokenBase):
|
||||||
id: int
|
|
||||||
user_id: int
|
|
||||||
beatmap_id: int
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_db(cls, obj: ScoreToken) -> "ScoreTokenResp":
|
def from_db(cls, obj: ScoreToken) -> "ScoreTokenResp":
|
||||||
return cls.model_validate(obj)
|
return cls.model_validate(obj)
|
||||||
|
|||||||
13
app/database/search_beatmapset.py
Normal file
13
app/database/search_beatmapset.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from . import beatmap # noqa: F401
|
||||||
|
from .beatmapset import BeatmapsetModel
|
||||||
|
|
||||||
|
from sqlmodel import SQLModel
|
||||||
|
|
||||||
|
SearchBeatmapset = BeatmapsetModel.generate_typeddict(("beatmaps.max_combo", "pack_tags"))
|
||||||
|
|
||||||
|
|
||||||
|
class SearchBeatmapsetsResp(SQLModel):
|
||||||
|
beatmapsets: list[SearchBeatmapset] # pyright: ignore[reportInvalidTypeForm]
|
||||||
|
total: int
|
||||||
|
cursor: dict[str, int | float | str] | None = None
|
||||||
|
cursor_string: str | None = None
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
from datetime import UTC, datetime, timedelta
|
from datetime import timedelta
|
||||||
import math
|
import math
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, ClassVar, NotRequired, TypedDict
|
||||||
|
|
||||||
from app.models.score import GameMode
|
from app.models.score import GameMode
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, included, ondemand
|
||||||
from .rank_history import RankHistory
|
from .rank_history import RankHistory
|
||||||
|
|
||||||
from pydantic import field_validator
|
from pydantic import field_validator
|
||||||
@@ -14,7 +16,6 @@ from sqlmodel import (
|
|||||||
Field,
|
Field,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Relationship,
|
Relationship,
|
||||||
SQLModel,
|
|
||||||
col,
|
col,
|
||||||
func,
|
func,
|
||||||
select,
|
select,
|
||||||
@@ -22,10 +23,40 @@ from sqlmodel import (
|
|||||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User, UserResp
|
from .user import User, UserDict
|
||||||
|
|
||||||
|
|
||||||
class UserStatisticsBase(SQLModel):
|
class UserStatisticsDict(TypedDict):
|
||||||
|
mode: GameMode
|
||||||
|
count_100: int
|
||||||
|
count_300: int
|
||||||
|
count_50: int
|
||||||
|
count_miss: int
|
||||||
|
pp: float
|
||||||
|
ranked_score: int
|
||||||
|
hit_accuracy: float
|
||||||
|
total_score: int
|
||||||
|
total_hits: int
|
||||||
|
maximum_combo: int
|
||||||
|
play_count: int
|
||||||
|
play_time: int
|
||||||
|
replays_watched_by_others: int
|
||||||
|
is_ranked: bool
|
||||||
|
level: NotRequired[dict[str, int]]
|
||||||
|
global_rank: NotRequired[int | None]
|
||||||
|
grade_counts: NotRequired[dict[str, int]]
|
||||||
|
rank_change_since_30_days: NotRequired[int]
|
||||||
|
country_rank: NotRequired[int | None]
|
||||||
|
user: NotRequired["UserDict"]
|
||||||
|
|
||||||
|
|
||||||
|
class UserStatisticsModel(DatabaseModel[UserStatisticsDict]):
|
||||||
|
RANKING_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"user.country",
|
||||||
|
"user.cover",
|
||||||
|
"user.team",
|
||||||
|
]
|
||||||
|
|
||||||
mode: GameMode = Field(index=True)
|
mode: GameMode = Field(index=True)
|
||||||
count_100: int = Field(default=0, sa_column=Column(BigInteger))
|
count_100: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
count_300: int = Field(default=0, sa_column=Column(BigInteger))
|
count_300: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
@@ -33,7 +64,7 @@ class UserStatisticsBase(SQLModel):
|
|||||||
count_miss: int = Field(default=0, sa_column=Column(BigInteger))
|
count_miss: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
|
|
||||||
pp: float = Field(default=0.0, index=True)
|
pp: float = Field(default=0.0, index=True)
|
||||||
ranked_score: int = Field(default=0)
|
ranked_score: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
hit_accuracy: float = Field(default=0.00)
|
hit_accuracy: float = Field(default=0.00)
|
||||||
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
total_hits: int = Field(default=0, sa_column=Column(BigInteger))
|
total_hits: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
@@ -56,9 +87,64 @@ class UserStatisticsBase(SQLModel):
|
|||||||
return GameMode.OSU
|
return GameMode.OSU
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def level(_session: AsyncSession, statistics: "UserStatistics") -> dict[str, int]:
|
||||||
|
return {
|
||||||
|
"current": int(statistics.level_current),
|
||||||
|
"progress": int(math.fmod(statistics.level_current, 1) * 100),
|
||||||
|
}
|
||||||
|
|
||||||
class UserStatistics(AsyncAttrs, UserStatisticsBase, table=True):
|
@included
|
||||||
__tablename__ = "lazer_user_statistics" # pyright: ignore[reportAssignmentType]
|
@staticmethod
|
||||||
|
async def global_rank(session: AsyncSession, statistics: "UserStatistics") -> int | None:
|
||||||
|
return await get_rank(session, statistics)
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def grade_counts(_session: AsyncSession, statistics: "UserStatistics") -> dict[str, int]:
|
||||||
|
return {
|
||||||
|
"ss": statistics.grade_ss,
|
||||||
|
"ssh": statistics.grade_ssh,
|
||||||
|
"s": statistics.grade_s,
|
||||||
|
"sh": statistics.grade_sh,
|
||||||
|
"a": statistics.grade_a,
|
||||||
|
}
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def rank_change_since_30_days(session: AsyncSession, statistics: "UserStatistics") -> int:
|
||||||
|
global_rank = await get_rank(session, statistics)
|
||||||
|
rank_best = (
|
||||||
|
await session.exec(
|
||||||
|
select(func.max(RankHistory.rank)).where(
|
||||||
|
RankHistory.date > utcnow() - timedelta(days=30),
|
||||||
|
RankHistory.user_id == statistics.user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
if rank_best is None or global_rank is None:
|
||||||
|
return 0
|
||||||
|
return rank_best - global_rank
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def country_rank(
|
||||||
|
session: AsyncSession, statistics: "UserStatistics", user_country: str | None = None
|
||||||
|
) -> int | None:
|
||||||
|
return await get_rank(session, statistics, user_country)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def user(_session: AsyncSession, statistics: "UserStatistics") -> "UserDict":
|
||||||
|
from .user import UserModel
|
||||||
|
|
||||||
|
user_instance = await statistics.awaitable_attrs.user
|
||||||
|
return await UserModel.transform(user_instance)
|
||||||
|
|
||||||
|
|
||||||
|
class UserStatistics(AsyncAttrs, UserStatisticsModel, table=True):
|
||||||
|
__tablename__: str = "lazer_user_statistics"
|
||||||
id: int | None = Field(default=None, primary_key=True)
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
user_id: int = Field(
|
user_id: int = Field(
|
||||||
default=None,
|
default=None,
|
||||||
@@ -76,83 +162,11 @@ class UserStatistics(AsyncAttrs, UserStatisticsBase, table=True):
|
|||||||
|
|
||||||
level_current: float = Field(default=1)
|
level_current: float = Field(default=1)
|
||||||
|
|
||||||
user: "User" = Relationship(back_populates="statistics") # type: ignore[valid-type]
|
user: "User" = Relationship(back_populates="statistics")
|
||||||
|
|
||||||
|
|
||||||
class UserStatisticsResp(UserStatisticsBase):
|
async def get_rank(session: AsyncSession, statistics: UserStatistics, country: str | None = None) -> int | None:
|
||||||
user: "UserResp | None" = None
|
from .user import User
|
||||||
rank_change_since_30_days: int | None = 0
|
|
||||||
global_rank: int | None = Field(default=None)
|
|
||||||
country_rank: int | None = Field(default=None)
|
|
||||||
grade_counts: dict[str, int] = Field(
|
|
||||||
default_factory=lambda: {
|
|
||||||
"ss": 0,
|
|
||||||
"ssh": 0,
|
|
||||||
"s": 0,
|
|
||||||
"sh": 0,
|
|
||||||
"a": 0,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
level: dict[str, int] = Field(
|
|
||||||
default_factory=lambda: {
|
|
||||||
"current": 1,
|
|
||||||
"progress": 0,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def from_db(
|
|
||||||
cls,
|
|
||||||
obj: UserStatistics,
|
|
||||||
session: AsyncSession,
|
|
||||||
user_country: str | None = None,
|
|
||||||
include: list[str] = [],
|
|
||||||
) -> "UserStatisticsResp":
|
|
||||||
s = cls.model_validate(obj.model_dump())
|
|
||||||
s.grade_counts = {
|
|
||||||
"ss": obj.grade_ss,
|
|
||||||
"ssh": obj.grade_ssh,
|
|
||||||
"s": obj.grade_s,
|
|
||||||
"sh": obj.grade_sh,
|
|
||||||
"a": obj.grade_a,
|
|
||||||
}
|
|
||||||
s.level = {
|
|
||||||
"current": int(obj.level_current),
|
|
||||||
"progress": int(math.fmod(obj.level_current, 1) * 100),
|
|
||||||
}
|
|
||||||
if "user" in include:
|
|
||||||
from .lazer_user import RANKING_INCLUDES, UserResp
|
|
||||||
|
|
||||||
user = await UserResp.from_db(
|
|
||||||
await obj.awaitable_attrs.user, session, include=RANKING_INCLUDES
|
|
||||||
)
|
|
||||||
s.user = user
|
|
||||||
user_country = user.country_code
|
|
||||||
|
|
||||||
s.global_rank = await get_rank(session, obj)
|
|
||||||
s.country_rank = await get_rank(session, obj, user_country)
|
|
||||||
|
|
||||||
if "rank_change_since_30_days" in include:
|
|
||||||
rank_best = (
|
|
||||||
await session.exec(
|
|
||||||
select(func.max(RankHistory.rank)).where(
|
|
||||||
RankHistory.date > datetime.now(UTC) - timedelta(days=30),
|
|
||||||
RankHistory.user_id == obj.user_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).first()
|
|
||||||
if rank_best is None or s.global_rank is None:
|
|
||||||
s.rank_change_since_30_days = 0
|
|
||||||
else:
|
|
||||||
s.rank_change_since_30_days = rank_best - s.global_rank
|
|
||||||
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
async def get_rank(
|
|
||||||
session: AsyncSession, statistics: UserStatistics, country: str | None = None
|
|
||||||
) -> int | None:
|
|
||||||
from .lazer_user import User
|
|
||||||
|
|
||||||
query = select(
|
query = select(
|
||||||
UserStatistics.user_id,
|
UserStatistics.user_id,
|
||||||
@@ -167,17 +181,14 @@ async def get_rank(
|
|||||||
query = query.join(User).where(User.country_code == country)
|
query = query.join(User).where(User.country_code == country)
|
||||||
|
|
||||||
subq = query.subquery()
|
subq = query.subquery()
|
||||||
|
result = await session.exec(select(subq.c.rank).where(subq.c.user_id == statistics.user_id))
|
||||||
result = await session.exec(
|
|
||||||
select(subq.c.rank).where(subq.c.user_id == statistics.user_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
rank = result.first()
|
rank = result.first()
|
||||||
if rank is None:
|
if rank is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if country is None:
|
if country is None:
|
||||||
today = datetime.now(UTC).date()
|
today = utcnow().date()
|
||||||
rank_history = (
|
rank_history = (
|
||||||
await session.exec(
|
await session.exec(
|
||||||
select(RankHistory).where(
|
select(RankHistory).where(
|
||||||
|
|||||||
@@ -1,59 +1,144 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.models.score import GameMode
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlalchemy import Column, DateTime
|
from sqlalchemy import Column, DateTime
|
||||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel, Text, col, func, select
|
||||||
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .lazer_user import User
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class Team(SQLModel, UTCBaseModel, table=True):
|
class TeamBase(SQLModel, UTCBaseModel):
|
||||||
__tablename__ = "teams" # pyright: ignore[reportAssignmentType]
|
id: int = Field(default=None, primary_key=True, index=True)
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
|
||||||
name: str = Field(max_length=100)
|
name: str = Field(max_length=100)
|
||||||
short_name: str = Field(max_length=10)
|
short_name: str = Field(max_length=10)
|
||||||
flag_url: str | None = Field(default=None)
|
flag_url: str | None = Field(default=None)
|
||||||
cover_url: str | None = Field(default=None)
|
cover_url: str | None = Field(default=None)
|
||||||
created_at: datetime = Field(default=datetime.now(UTC), sa_column=Column(DateTime))
|
created_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
leader_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
leader_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id")))
|
||||||
|
description: str | None = Field(default=None, sa_column=Column(Text))
|
||||||
|
playmode: GameMode = Field(default=GameMode.OSU)
|
||||||
|
website: str | None = Field(default=None, sa_column=Column(Text))
|
||||||
|
|
||||||
|
|
||||||
|
class Team(TeamBase, table=True):
|
||||||
|
__tablename__: str = "teams"
|
||||||
|
|
||||||
leader: "User" = Relationship()
|
leader: "User" = Relationship()
|
||||||
members: list["TeamMember"] = Relationship(back_populates="team")
|
members: list["TeamMember"] = Relationship(back_populates="team")
|
||||||
|
|
||||||
|
|
||||||
|
class TeamResp(TeamBase):
|
||||||
|
rank: int = 0
|
||||||
|
pp: float = 0.0
|
||||||
|
ranked_score: int = 0
|
||||||
|
total_play_count: int = 0
|
||||||
|
member_count: int = 0
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_db(cls, team: Team, session: AsyncSession, gamemode: GameMode | None = None) -> "TeamResp":
|
||||||
|
from .statistics import UserStatistics
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
playmode = gamemode or team.playmode
|
||||||
|
|
||||||
|
pp_expr = func.coalesce(func.sum(col(UserStatistics.pp)), 0.0)
|
||||||
|
ranked_score_expr = func.coalesce(func.sum(col(UserStatistics.ranked_score)), 0)
|
||||||
|
play_count_expr = func.coalesce(func.sum(col(UserStatistics.play_count)), 0)
|
||||||
|
member_count_expr = func.count(func.distinct(col(UserStatistics.user_id)))
|
||||||
|
|
||||||
|
team_stats_stmt = (
|
||||||
|
select(pp_expr, ranked_score_expr, play_count_expr, member_count_expr)
|
||||||
|
.select_from(UserStatistics)
|
||||||
|
.join(TeamMember, col(TeamMember.user_id) == col(UserStatistics.user_id))
|
||||||
|
.join(User, col(User.id) == col(UserStatistics.user_id))
|
||||||
|
.join(Team, col(Team.id) == col(TeamMember.team_id))
|
||||||
|
.where(
|
||||||
|
col(Team.id) == team.id,
|
||||||
|
col(Team.playmode) == playmode,
|
||||||
|
col(UserStatistics.mode) == playmode,
|
||||||
|
col(UserStatistics.pp) > 0,
|
||||||
|
col(UserStatistics.is_ranked).is_(True),
|
||||||
|
~User.is_restricted_query(col(UserStatistics.user_id)),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
team_stats_result = await session.exec(team_stats_stmt)
|
||||||
|
stats_row = team_stats_result.one_or_none()
|
||||||
|
if stats_row is None:
|
||||||
|
total_pp = 0.0
|
||||||
|
total_ranked_score = 0
|
||||||
|
total_play_count = 0
|
||||||
|
active_member_count = 0
|
||||||
|
else:
|
||||||
|
total_pp, total_ranked_score, total_play_count, active_member_count = stats_row
|
||||||
|
total_pp = float(total_pp or 0.0)
|
||||||
|
total_ranked_score = int(total_ranked_score or 0)
|
||||||
|
total_play_count = int(total_play_count or 0)
|
||||||
|
active_member_count = int(active_member_count or 0)
|
||||||
|
|
||||||
|
total_pp_ranking_expr = func.coalesce(func.sum(col(UserStatistics.pp)), 0.0)
|
||||||
|
ranking_stmt = (
|
||||||
|
select(Team.id, total_pp_ranking_expr)
|
||||||
|
.select_from(Team)
|
||||||
|
.join(TeamMember, col(TeamMember.team_id) == col(Team.id))
|
||||||
|
.join(UserStatistics, col(UserStatistics.user_id) == col(TeamMember.user_id))
|
||||||
|
.join(User, col(User.id) == col(TeamMember.user_id))
|
||||||
|
.where(
|
||||||
|
col(Team.playmode) == playmode,
|
||||||
|
col(UserStatistics.mode) == playmode,
|
||||||
|
col(UserStatistics.pp) > 0,
|
||||||
|
col(UserStatistics.is_ranked).is_(True),
|
||||||
|
~User.is_restricted_query(col(UserStatistics.user_id)),
|
||||||
|
)
|
||||||
|
.group_by(col(Team.id))
|
||||||
|
.order_by(total_pp_ranking_expr.desc())
|
||||||
|
)
|
||||||
|
|
||||||
|
ranking_result = await session.exec(ranking_stmt)
|
||||||
|
ranking_rows = ranking_result.all()
|
||||||
|
rank = 0
|
||||||
|
for index, (team_id, _) in enumerate(ranking_rows, start=1):
|
||||||
|
if team_id == team.id:
|
||||||
|
rank = index
|
||||||
|
break
|
||||||
|
|
||||||
|
data = team.model_dump()
|
||||||
|
data.update(
|
||||||
|
{
|
||||||
|
"pp": total_pp,
|
||||||
|
"ranked_score": total_ranked_score,
|
||||||
|
"total_play_count": total_play_count,
|
||||||
|
"member_count": active_member_count,
|
||||||
|
"rank": rank,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls.model_validate(data)
|
||||||
|
|
||||||
|
|
||||||
class TeamMember(SQLModel, UTCBaseModel, table=True):
|
class TeamMember(SQLModel, UTCBaseModel, table=True):
|
||||||
__tablename__ = "team_members" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "team_members"
|
||||||
|
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True)
|
|
||||||
)
|
|
||||||
team_id: int = Field(foreign_key="teams.id")
|
team_id: int = Field(foreign_key="teams.id")
|
||||||
joined_at: datetime = Field(
|
joined_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
|
||||||
)
|
|
||||||
|
|
||||||
user: "User" = Relationship(
|
user: "User" = Relationship(back_populates="team_membership", sa_relationship_kwargs={"lazy": "joined"})
|
||||||
back_populates="team_membership", sa_relationship_kwargs={"lazy": "joined"}
|
team: "Team" = Relationship(back_populates="members", sa_relationship_kwargs={"lazy": "joined"})
|
||||||
)
|
|
||||||
team: "Team" = Relationship(
|
|
||||||
back_populates="members", sa_relationship_kwargs={"lazy": "joined"}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TeamRequest(SQLModel, UTCBaseModel, table=True):
|
class TeamRequest(SQLModel, UTCBaseModel, table=True):
|
||||||
__tablename__ = "team_requests" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "team_requests"
|
||||||
|
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), primary_key=True)
|
|
||||||
)
|
|
||||||
team_id: int = Field(foreign_key="teams.id", primary_key=True)
|
team_id: int = Field(foreign_key="teams.id", primary_key=True)
|
||||||
requested_at: datetime = Field(
|
requested_at: datetime = Field(default_factory=utcnow, sa_column=Column(DateTime))
|
||||||
default=datetime.now(UTC), sa_column=Column(DateTime)
|
|
||||||
)
|
|
||||||
|
|
||||||
user: "User" = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
user: "User" = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
||||||
team: "Team" = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
team: "Team" = Relationship(sa_relationship_kwargs={"lazy": "joined"})
|
||||||
|
|||||||
92
app/database/total_score_best_scores.py
Normal file
92
app/database/total_score_best_scores.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from app.calculator import calculate_score_to_level
|
||||||
|
from app.models.score import GameMode, Rank
|
||||||
|
|
||||||
|
from .statistics import UserStatistics
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
from sqlalchemy import Index
|
||||||
|
from sqlmodel import (
|
||||||
|
JSON,
|
||||||
|
BigInteger,
|
||||||
|
Column,
|
||||||
|
Field,
|
||||||
|
ForeignKey,
|
||||||
|
Relationship,
|
||||||
|
SQLModel,
|
||||||
|
col,
|
||||||
|
func,
|
||||||
|
select,
|
||||||
|
)
|
||||||
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .beatmap import Beatmap
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
|
||||||
|
class TotalScoreBestScore(SQLModel, table=True):
|
||||||
|
__tablename__: str = "total_score_best_scores"
|
||||||
|
__table_args__ = (
|
||||||
|
Index("ix_total_score_best_scores_user_mode_score", "user_id", "gamemode", "score_id"),
|
||||||
|
Index("ix_total_score_best_scores_beatmap_mode_score", "beatmap_id", "gamemode", "total_score"),
|
||||||
|
)
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
|
score_id: int = Field(sa_column=Column(BigInteger, ForeignKey("scores.id"), primary_key=True))
|
||||||
|
beatmap_id: int = Field(foreign_key="beatmaps.id", index=True)
|
||||||
|
gamemode: GameMode = Field(index=True)
|
||||||
|
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
||||||
|
mods: list[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
sa_column=Column(JSON),
|
||||||
|
)
|
||||||
|
rank: Rank
|
||||||
|
|
||||||
|
user: User = Relationship()
|
||||||
|
score: "Score" = Relationship(
|
||||||
|
sa_relationship_kwargs={
|
||||||
|
"foreign_keys": "[TotalScoreBestScore.score_id]",
|
||||||
|
"lazy": "joined",
|
||||||
|
},
|
||||||
|
back_populates="best_score",
|
||||||
|
)
|
||||||
|
beatmap: "Beatmap" = Relationship()
|
||||||
|
|
||||||
|
async def delete(self, session: AsyncSession):
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
statistics = await session.exec(
|
||||||
|
select(UserStatistics).where(UserStatistics.user_id == self.user_id, UserStatistics.mode == self.gamemode)
|
||||||
|
)
|
||||||
|
statistics = statistics.first()
|
||||||
|
if statistics:
|
||||||
|
# Use display score from the referenced score for consistency with current scoring mode
|
||||||
|
display_score = self.score.get_display_score()
|
||||||
|
statistics.total_score -= display_score
|
||||||
|
statistics.ranked_score -= display_score
|
||||||
|
statistics.level_current = calculate_score_to_level(statistics.total_score)
|
||||||
|
match self.rank:
|
||||||
|
case Rank.X:
|
||||||
|
statistics.grade_ss -= 1
|
||||||
|
case Rank.XH:
|
||||||
|
statistics.grade_ssh -= 1
|
||||||
|
case Rank.S:
|
||||||
|
statistics.grade_s -= 1
|
||||||
|
case Rank.SH:
|
||||||
|
statistics.grade_sh -= 1
|
||||||
|
case Rank.A:
|
||||||
|
statistics.grade_a -= 1
|
||||||
|
|
||||||
|
max_combo = (
|
||||||
|
await session.exec(
|
||||||
|
select(func.max(Score.max_combo)).where(
|
||||||
|
Score.user_id == self.user_id,
|
||||||
|
col(Score.id).in_(select(TotalScoreBestScore.score_id)),
|
||||||
|
Score.gamemode == self.gamemode,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
statistics.maximum_combo = max(0, max_combo or 0)
|
||||||
|
|
||||||
|
await session.delete(self)
|
||||||
796
app/database/user.py
Normal file
796
app/database/user.py
Normal file
@@ -0,0 +1,796 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
import json
|
||||||
|
from typing import TYPE_CHECKING, ClassVar, Literal, NotRequired, TypedDict, overload
|
||||||
|
|
||||||
|
from app.config import settings
|
||||||
|
from app.models.notification import NotificationName
|
||||||
|
from app.models.score import GameMode
|
||||||
|
from app.models.user import Country, Page
|
||||||
|
from app.path import STATIC_DIR
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
|
from ._base import DatabaseModel, OnDemand, included, ondemand
|
||||||
|
from .achievement import UserAchievement, UserAchievementResp
|
||||||
|
from .auth import TotpKeys
|
||||||
|
from .beatmap_playcounts import BeatmapPlaycounts
|
||||||
|
from .counts import CountResp, MonthlyPlaycounts, ReplayWatchedCount
|
||||||
|
from .daily_challenge import DailyChallengeStats, DailyChallengeStatsResp
|
||||||
|
from .events import Event
|
||||||
|
from .notification import Notification, UserNotification
|
||||||
|
from .rank_history import RankHistory, RankHistoryResp, RankTop
|
||||||
|
from .relationship import RelationshipModel
|
||||||
|
from .statistics import UserStatistics, UserStatisticsModel
|
||||||
|
from .team import Team, TeamMember
|
||||||
|
from .user_account_history import UserAccountHistory, UserAccountHistoryResp, UserAccountHistoryType
|
||||||
|
from .user_preference import DEFAULT_ORDER, UserPreference
|
||||||
|
|
||||||
|
from pydantic import field_validator
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
|
from sqlmodel import (
|
||||||
|
JSON,
|
||||||
|
BigInteger,
|
||||||
|
Column,
|
||||||
|
DateTime,
|
||||||
|
Field,
|
||||||
|
Relationship,
|
||||||
|
col,
|
||||||
|
exists,
|
||||||
|
func,
|
||||||
|
select,
|
||||||
|
text,
|
||||||
|
)
|
||||||
|
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
from .matchmaking import MatchmakingUserStats
|
||||||
|
from .relationship import Relationship, RelationshipDict
|
||||||
|
from .statistics import UserStatisticsDict
|
||||||
|
|
||||||
|
|
||||||
|
class Kudosu(TypedDict):
|
||||||
|
available: int
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class RankHighest(TypedDict):
|
||||||
|
rank: int
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class UserProfileCover(TypedDict):
|
||||||
|
url: str
|
||||||
|
custom_url: NotRequired[str]
|
||||||
|
id: NotRequired[str]
|
||||||
|
|
||||||
|
|
||||||
|
Badge = TypedDict(
|
||||||
|
"Badge",
|
||||||
|
{
|
||||||
|
"awarded_at": datetime,
|
||||||
|
"description": str,
|
||||||
|
"image@2x_url": str,
|
||||||
|
"image_url": str,
|
||||||
|
"url": str,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
COUNTRIES = json.loads((STATIC_DIR / "iso3166.json").read_text())
|
||||||
|
|
||||||
|
|
||||||
|
class UserDict(TypedDict):
|
||||||
|
avatar_url: str
|
||||||
|
country_code: str
|
||||||
|
id: int
|
||||||
|
is_active: bool
|
||||||
|
is_bot: bool
|
||||||
|
is_supporter: bool
|
||||||
|
last_visit: datetime | None
|
||||||
|
pm_friends_only: bool
|
||||||
|
profile_colour: str | None
|
||||||
|
username: str
|
||||||
|
is_online: bool
|
||||||
|
g0v0_playmode: GameMode
|
||||||
|
page: NotRequired[Page]
|
||||||
|
previous_usernames: NotRequired[list[str]]
|
||||||
|
support_level: NotRequired[int]
|
||||||
|
badges: NotRequired[list[Badge]]
|
||||||
|
cover: NotRequired[UserProfileCover]
|
||||||
|
beatmap_playcounts_count: NotRequired[int]
|
||||||
|
playmode: NotRequired[GameMode]
|
||||||
|
discord: NotRequired[str | None]
|
||||||
|
has_supported: NotRequired[bool]
|
||||||
|
interests: NotRequired[str | None]
|
||||||
|
join_date: NotRequired[datetime]
|
||||||
|
location: NotRequired[str | None]
|
||||||
|
max_blocks: NotRequired[int]
|
||||||
|
max_friends: NotRequired[int]
|
||||||
|
occupation: NotRequired[str | None]
|
||||||
|
playstyle: NotRequired[list[str]]
|
||||||
|
profile_hue: NotRequired[int | None]
|
||||||
|
title: NotRequired[str | None]
|
||||||
|
title_url: NotRequired[str | None]
|
||||||
|
twitter: NotRequired[str | None]
|
||||||
|
website: NotRequired[str | None]
|
||||||
|
comments_count: NotRequired[int]
|
||||||
|
post_count: NotRequired[int]
|
||||||
|
is_admin: NotRequired[bool]
|
||||||
|
is_gmt: NotRequired[bool]
|
||||||
|
is_qat: NotRequired[bool]
|
||||||
|
is_bng: NotRequired[bool]
|
||||||
|
groups: NotRequired[list[str]]
|
||||||
|
active_tournament_banners: NotRequired[list[dict]]
|
||||||
|
graveyard_beatmapset_count: NotRequired[int]
|
||||||
|
loved_beatmapset_count: NotRequired[int]
|
||||||
|
mapping_follower_count: NotRequired[int]
|
||||||
|
nominated_beatmapset_count: NotRequired[int]
|
||||||
|
guest_beatmapset_count: NotRequired[int]
|
||||||
|
pending_beatmapset_count: NotRequired[int]
|
||||||
|
ranked_beatmapset_count: NotRequired[int]
|
||||||
|
follow_user_mapping: NotRequired[list[int]]
|
||||||
|
is_deleted: NotRequired[bool]
|
||||||
|
country: NotRequired[Country]
|
||||||
|
favourite_beatmapset_count: NotRequired[int]
|
||||||
|
follower_count: NotRequired[int]
|
||||||
|
scores_best_count: NotRequired[int]
|
||||||
|
scores_pinned_count: NotRequired[int]
|
||||||
|
scores_recent_count: NotRequired[int]
|
||||||
|
scores_first_count: NotRequired[int]
|
||||||
|
cover_url: NotRequired[str]
|
||||||
|
profile_order: NotRequired[list[str]]
|
||||||
|
user_preference: NotRequired[UserPreference | None]
|
||||||
|
friends: NotRequired[list["RelationshipDict"]]
|
||||||
|
team: NotRequired[Team | None]
|
||||||
|
account_history: NotRequired[list[UserAccountHistoryResp]]
|
||||||
|
daily_challenge_user_stats: NotRequired[DailyChallengeStatsResp | None]
|
||||||
|
statistics: NotRequired["UserStatisticsDict | None"]
|
||||||
|
statistics_rulesets: NotRequired[dict[str, "UserStatisticsDict"]]
|
||||||
|
monthly_playcounts: NotRequired[list[CountResp]]
|
||||||
|
replay_watched_counts: NotRequired[list[CountResp]]
|
||||||
|
user_achievements: NotRequired[list[UserAchievementResp]]
|
||||||
|
rank_history: NotRequired[RankHistoryResp | None]
|
||||||
|
rank_highest: NotRequired[RankHighest | None]
|
||||||
|
is_restricted: NotRequired[bool]
|
||||||
|
kudosu: NotRequired[Kudosu]
|
||||||
|
unread_pm_count: NotRequired[int]
|
||||||
|
default_group: NotRequired[str]
|
||||||
|
session_verified: NotRequired[bool]
|
||||||
|
session_verification_method: NotRequired[Literal["totp", "mail"] | None]
|
||||||
|
|
||||||
|
|
||||||
|
class UserModel(DatabaseModel[UserDict]):
|
||||||
|
# https://github.com/ppy/osu-web/blob/d0407b1f2846dfd8b85ec0cf20e3fe3028a7b486/app/Transformers/UserCompactTransformer.php#L22-L39
|
||||||
|
CARD_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"country",
|
||||||
|
"cover",
|
||||||
|
"groups",
|
||||||
|
"team",
|
||||||
|
]
|
||||||
|
LIST_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
*CARD_INCLUDES,
|
||||||
|
"statistics",
|
||||||
|
"support_level",
|
||||||
|
]
|
||||||
|
|
||||||
|
# https://github.com/ppy/osu-web/blob/d0407b1f2846dfd8b85ec0cf20e3fe3028a7b486/app/Transformers/UserTransformer.php#L36-L53
|
||||||
|
USER_TRANSFORMER_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"cover_url",
|
||||||
|
"discord",
|
||||||
|
"has_supported",
|
||||||
|
"interests",
|
||||||
|
"join_date",
|
||||||
|
"location",
|
||||||
|
"max_blocks",
|
||||||
|
"max_friends",
|
||||||
|
"occupation",
|
||||||
|
"playmode",
|
||||||
|
"playstyle",
|
||||||
|
"post_count",
|
||||||
|
"profile_hue",
|
||||||
|
"profile_order",
|
||||||
|
"title",
|
||||||
|
"title_url",
|
||||||
|
"twitter",
|
||||||
|
"website",
|
||||||
|
# https://github.com/ppy/osu-web/blob/d0407b1f2846dfd8b85ec0cf20e3fe3028a7b486/app/Transformers/UserTransformer.php#L13C22-L25
|
||||||
|
"cover",
|
||||||
|
"country",
|
||||||
|
"is_admin",
|
||||||
|
"is_bng",
|
||||||
|
"is_full_bn",
|
||||||
|
"is_gmt",
|
||||||
|
"is_limited_bn",
|
||||||
|
"is_moderator",
|
||||||
|
"is_nat",
|
||||||
|
"is_restricted",
|
||||||
|
"is_silenced",
|
||||||
|
"kudosu",
|
||||||
|
]
|
||||||
|
|
||||||
|
# https://github.com/ppy/osu-web/blob/d0407b1f2846dfd8b85ec0cf20e3fe3028a7b486/app/Transformers/UserCompactTransformer.php#L41-L51
|
||||||
|
PROFILE_HEADER_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
"active_tournament_banner",
|
||||||
|
"active_tournament_banners",
|
||||||
|
"badges",
|
||||||
|
"comments_count",
|
||||||
|
"follower_count",
|
||||||
|
"groups",
|
||||||
|
"mapping_follower_count",
|
||||||
|
"previous_usernames",
|
||||||
|
"support_level",
|
||||||
|
]
|
||||||
|
|
||||||
|
# https://github.com/ppy/osu-web/blob/3f08fe12d70bcac1e32455c31e984eb6ef589b42/app/Http/Controllers/UsersController.php#L900-L937
|
||||||
|
USER_INCLUDES: ClassVar[list[str]] = [
|
||||||
|
# == apiIncludes ==
|
||||||
|
# historical
|
||||||
|
"beatmap_playcounts_count",
|
||||||
|
"monthly_playcounts",
|
||||||
|
"replays_watched_counts",
|
||||||
|
"scores_recent_count",
|
||||||
|
# beatmapsets
|
||||||
|
"favourite_beatmapset_count",
|
||||||
|
"graveyard_beatmapset_count",
|
||||||
|
"guest_beatmapset_count",
|
||||||
|
"loved_beatmapset_count",
|
||||||
|
"nominated_beatmapset_count",
|
||||||
|
"pending_beatmapset_count",
|
||||||
|
"ranked_beatmapset_count",
|
||||||
|
# top scores
|
||||||
|
"scores_best_count",
|
||||||
|
"scores_first_count",
|
||||||
|
"scores_pinned_count",
|
||||||
|
# others
|
||||||
|
"account_history",
|
||||||
|
"current_season_stats",
|
||||||
|
"daily_challenge_user_stats",
|
||||||
|
"page",
|
||||||
|
"pending_beatmapset_count",
|
||||||
|
"rank_highest",
|
||||||
|
"rank_history",
|
||||||
|
"statistics",
|
||||||
|
"statistics.country_rank",
|
||||||
|
"statistics.rank",
|
||||||
|
"statistics.variants",
|
||||||
|
"team",
|
||||||
|
"user_achievements",
|
||||||
|
*PROFILE_HEADER_INCLUDES,
|
||||||
|
*USER_TRANSFORMER_INCLUDES,
|
||||||
|
]
|
||||||
|
|
||||||
|
# https://github.com/ppy/osu-web/blob/d0407b1f2846dfd8b85ec0cf20e3fe3028a7b486/app/Transformers/UserCompactTransformer.php#L133-L150
|
||||||
|
avatar_url: str = "https://lazer-data.g0v0.top/default.jpg"
|
||||||
|
country_code: str = Field(default="CN", max_length=2, index=True)
|
||||||
|
# ? default_group: str|None
|
||||||
|
id: int = Field(
|
||||||
|
default=None,
|
||||||
|
sa_column=Column(BigInteger, primary_key=True, autoincrement=True, index=True),
|
||||||
|
)
|
||||||
|
is_active: bool = True
|
||||||
|
is_bot: bool = False
|
||||||
|
is_supporter: bool = False
|
||||||
|
is_online: bool = False
|
||||||
|
last_visit: datetime | None = Field(default_factory=utcnow, sa_column=Column(DateTime(timezone=True)))
|
||||||
|
pm_friends_only: bool = False
|
||||||
|
profile_colour: str | None = None
|
||||||
|
username: str = Field(max_length=32, unique=True, index=True)
|
||||||
|
|
||||||
|
page: OnDemand[Page] = Field(sa_column=Column(JSON), default=Page(html="", raw=""))
|
||||||
|
previous_usernames: OnDemand[list[str]] = Field(default_factory=list, sa_column=Column(JSON))
|
||||||
|
support_level: OnDemand[int] = Field(default=0)
|
||||||
|
badges: OnDemand[list[Badge]] = Field(default_factory=list, sa_column=Column(JSON))
|
||||||
|
|
||||||
|
# optional
|
||||||
|
# blocks
|
||||||
|
cover: OnDemand[UserProfileCover] = Field(
|
||||||
|
default=UserProfileCover(url=""),
|
||||||
|
sa_column=Column(JSON),
|
||||||
|
)
|
||||||
|
# kudosu
|
||||||
|
|
||||||
|
# UserExtended
|
||||||
|
playmode: OnDemand[GameMode] = Field(default=GameMode.OSU)
|
||||||
|
discord: OnDemand[str | None] = Field(default=None)
|
||||||
|
has_supported: OnDemand[bool] = Field(default=False)
|
||||||
|
interests: OnDemand[str | None] = Field(default=None)
|
||||||
|
join_date: OnDemand[datetime] = Field(default_factory=utcnow)
|
||||||
|
location: OnDemand[str | None] = Field(default=None)
|
||||||
|
max_blocks: OnDemand[int] = Field(default=50)
|
||||||
|
max_friends: OnDemand[int] = Field(default=500)
|
||||||
|
occupation: OnDemand[str | None] = Field(default=None)
|
||||||
|
playstyle: OnDemand[list[str]] = Field(default_factory=list, sa_column=Column(JSON))
|
||||||
|
# TODO: post_count
|
||||||
|
profile_hue: OnDemand[int | None] = Field(default=None)
|
||||||
|
title: OnDemand[str | None] = Field(default=None)
|
||||||
|
title_url: OnDemand[str | None] = Field(default=None)
|
||||||
|
twitter: OnDemand[str | None] = Field(default=None)
|
||||||
|
website: OnDemand[str | None] = Field(default=None)
|
||||||
|
|
||||||
|
# undocumented
|
||||||
|
comments_count: OnDemand[int] = Field(default=0)
|
||||||
|
post_count: OnDemand[int] = Field(default=0)
|
||||||
|
is_admin: OnDemand[bool] = Field(default=False)
|
||||||
|
is_gmt: OnDemand[bool] = Field(default=False)
|
||||||
|
is_qat: OnDemand[bool] = Field(default=False)
|
||||||
|
is_bng: OnDemand[bool] = Field(default=False)
|
||||||
|
|
||||||
|
# g0v0-extra
|
||||||
|
g0v0_playmode: GameMode = GameMode.OSU
|
||||||
|
|
||||||
|
@field_validator("playmode", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_playmode(cls, v):
|
||||||
|
"""将字符串转换为 GameMode 枚举"""
|
||||||
|
if isinstance(v, str):
|
||||||
|
try:
|
||||||
|
return GameMode(v)
|
||||||
|
except ValueError:
|
||||||
|
# 如果转换失败,返回默认值
|
||||||
|
return GameMode.OSU
|
||||||
|
return v
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def groups(_session: AsyncSession, _obj: "User") -> list[str]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def active_tournament_banners(_session: AsyncSession, _obj: "User") -> list[dict]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def graveyard_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def loved_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def mapping_follower_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def nominated_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def guest_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def pending_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def ranked_beatmapset_count(_session: AsyncSession, _obj: "User") -> int:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def follow_user_mapping(_session: AsyncSession, _obj: "User") -> list[int]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def is_deleted(_session: AsyncSession, _obj: "User") -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def country(_session: AsyncSession, obj: "User") -> Country:
|
||||||
|
return Country(code=obj.country_code, name=COUNTRIES.get(obj.country_code, "Unknown"))
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def favourite_beatmapset_count(session: AsyncSession, obj: "User") -> int:
|
||||||
|
from .favourite_beatmapset import FavouriteBeatmapset
|
||||||
|
|
||||||
|
return (
|
||||||
|
await session.exec(
|
||||||
|
select(func.count()).select_from(FavouriteBeatmapset).where(FavouriteBeatmapset.user_id == obj.id)
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def follower_count(session: AsyncSession, obj: "User") -> int:
|
||||||
|
from .relationship import Relationship, RelationshipType
|
||||||
|
|
||||||
|
stmt = (
|
||||||
|
select(func.count())
|
||||||
|
.select_from(Relationship)
|
||||||
|
.where(
|
||||||
|
Relationship.target_id == obj.id,
|
||||||
|
Relationship.type == RelationshipType.FOLLOW,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return (await session.exec(stmt)).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def scores_best_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> int:
|
||||||
|
from .best_scores import BestScore
|
||||||
|
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
stmt = (
|
||||||
|
select(func.count())
|
||||||
|
.select_from(BestScore)
|
||||||
|
.where(
|
||||||
|
BestScore.user_id == obj.id,
|
||||||
|
BestScore.gamemode == mode,
|
||||||
|
)
|
||||||
|
.limit(200)
|
||||||
|
)
|
||||||
|
return (await session.exec(stmt)).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def scores_pinned_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> int:
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
stmt = (
|
||||||
|
select(func.count())
|
||||||
|
.select_from(Score)
|
||||||
|
.where(
|
||||||
|
Score.user_id == obj.id,
|
||||||
|
Score.gamemode == mode,
|
||||||
|
Score.pinned_order > 0,
|
||||||
|
col(Score.passed).is_(True),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return (await session.exec(stmt)).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def scores_recent_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> int:
|
||||||
|
from .score import Score
|
||||||
|
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
stmt = (
|
||||||
|
select(func.count())
|
||||||
|
.select_from(Score)
|
||||||
|
.where(
|
||||||
|
Score.user_id == obj.id,
|
||||||
|
Score.gamemode == mode,
|
||||||
|
col(Score.passed).is_(True),
|
||||||
|
Score.ended_at > utcnow() - timedelta(hours=24),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return (await session.exec(stmt)).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def scores_first_count(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> int:
|
||||||
|
from .score import get_user_first_score_count
|
||||||
|
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
return await get_user_first_score_count(session, obj.id, mode)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def beatmap_playcounts_count(session: AsyncSession, obj: "User") -> int:
|
||||||
|
stmt = select(func.count()).select_from(BeatmapPlaycounts).where(BeatmapPlaycounts.user_id == obj.id)
|
||||||
|
return (await session.exec(stmt)).one()
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def cover_url(_session: AsyncSession, obj: "User") -> str:
|
||||||
|
return obj.cover.get("url", "") if obj.cover else ""
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def profile_order(_session: AsyncSession, obj: "User") -> list[str]:
|
||||||
|
await obj.awaitable_attrs.user_preference
|
||||||
|
if obj.user_preference:
|
||||||
|
return list(obj.user_preference.extras_order)
|
||||||
|
return list(DEFAULT_ORDER)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def user_preference(_session: AsyncSession, obj: "User") -> UserPreference | None:
|
||||||
|
await obj.awaitable_attrs.user_preference
|
||||||
|
return obj.user_preference
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def friends(session: AsyncSession, obj: "User") -> list["RelationshipDict"]:
|
||||||
|
from .relationship import Relationship, RelationshipType
|
||||||
|
|
||||||
|
relationships = (
|
||||||
|
await session.exec(
|
||||||
|
select(Relationship).where(
|
||||||
|
Relationship.user_id == obj.id,
|
||||||
|
Relationship.type == RelationshipType.FOLLOW,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
return [await RelationshipModel.transform(rel, ruleset=obj.playmode) for rel in relationships]
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def team(_session: AsyncSession, obj: "User") -> Team | None:
|
||||||
|
membership = await obj.awaitable_attrs.team_membership
|
||||||
|
return membership.team if membership else None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def account_history(_session: AsyncSession, obj: "User") -> list[UserAccountHistoryResp]:
|
||||||
|
await obj.awaitable_attrs.account_history
|
||||||
|
return [UserAccountHistoryResp.from_db(ah) for ah in obj.account_history]
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def daily_challenge_user_stats(_session: AsyncSession, obj: "User") -> DailyChallengeStatsResp | None:
|
||||||
|
stats = await obj.awaitable_attrs.daily_challenge_stats
|
||||||
|
return DailyChallengeStatsResp.from_db(stats) if stats else None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def statistics(
|
||||||
|
_session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
) -> "UserStatisticsDict | None":
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
for stat in await obj.awaitable_attrs.statistics:
|
||||||
|
if stat.mode == mode:
|
||||||
|
return await UserStatisticsModel.transform(stat, user_country=obj.country_code, includes=includes)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def statistics_rulesets(
|
||||||
|
_session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
includes: list[str] | None = None,
|
||||||
|
) -> dict[str, "UserStatisticsDict"]:
|
||||||
|
stats = await obj.awaitable_attrs.statistics
|
||||||
|
result: dict[str, UserStatisticsDict] = {}
|
||||||
|
for stat in stats:
|
||||||
|
result[stat.mode.value] = await UserStatisticsModel.transform(
|
||||||
|
stat, user_country=obj.country_code, includes=includes
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def monthly_playcounts(_session: AsyncSession, obj: "User") -> list[CountResp]:
|
||||||
|
playcounts = [CountResp.from_db(pc) for pc in await obj.awaitable_attrs.monthly_playcounts]
|
||||||
|
if len(playcounts) == 1:
|
||||||
|
d = playcounts[0].start_date
|
||||||
|
playcounts.insert(0, CountResp(start_date=d - timedelta(days=20), count=0))
|
||||||
|
return playcounts
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def replay_watched_counts(_session: AsyncSession, obj: "User") -> list[CountResp]:
|
||||||
|
counts = [CountResp.from_db(rwc) for rwc in await obj.awaitable_attrs.replays_watched_counts]
|
||||||
|
if len(counts) == 1:
|
||||||
|
d = counts[0].start_date
|
||||||
|
counts.insert(0, CountResp(start_date=d - timedelta(days=20), count=0))
|
||||||
|
return counts
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def user_achievements(_session: AsyncSession, obj: "User") -> list[UserAchievementResp]:
|
||||||
|
return [UserAchievementResp.from_db(ua) for ua in await obj.awaitable_attrs.achievement]
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def rank_history(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> RankHistoryResp | None:
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
rank_history = await RankHistoryResp.from_db(session, obj.id, mode)
|
||||||
|
return rank_history if len(rank_history.data) != 0 else None
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def rank_highest(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
ruleset: GameMode | None = None,
|
||||||
|
) -> RankHighest | None:
|
||||||
|
mode = ruleset or obj.playmode
|
||||||
|
rank_top = (await session.exec(select(RankTop).where(RankTop.user_id == obj.id, RankTop.mode == mode))).first()
|
||||||
|
if not rank_top:
|
||||||
|
return None
|
||||||
|
return RankHighest(
|
||||||
|
rank=rank_top.rank,
|
||||||
|
updated_at=datetime.combine(rank_top.date, datetime.min.time()),
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def is_restricted(session: AsyncSession, obj: "User") -> bool:
|
||||||
|
return await obj.is_restricted(session)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def kudosu(_session: AsyncSession, _obj: "User") -> Kudosu:
|
||||||
|
return Kudosu(available=0, total=0) # TODO
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def unread_pm_count(session: AsyncSession, obj: "User") -> int:
|
||||||
|
return (
|
||||||
|
await session.exec(
|
||||||
|
select(func.count())
|
||||||
|
.join(Notification, col(Notification.id) == UserNotification.notification_id)
|
||||||
|
.select_from(UserNotification)
|
||||||
|
.where(
|
||||||
|
col(UserNotification.is_read).is_(False),
|
||||||
|
UserNotification.user_id == obj.id,
|
||||||
|
Notification.name == NotificationName.CHANNEL_MESSAGE,
|
||||||
|
text("details->>'$.type' = 'pm'"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
|
||||||
|
@included
|
||||||
|
@staticmethod
|
||||||
|
async def default_group(_session: AsyncSession, obj: "User") -> str:
|
||||||
|
return "default" if not obj.is_bot else "bot"
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def session_verified(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
token_id: int | None = None,
|
||||||
|
) -> bool:
|
||||||
|
from app.service.verification_service import LoginSessionService
|
||||||
|
|
||||||
|
return (
|
||||||
|
not await LoginSessionService.check_is_need_verification(session, user_id=obj.id, token_id=token_id)
|
||||||
|
if token_id
|
||||||
|
else True
|
||||||
|
)
|
||||||
|
|
||||||
|
@ondemand
|
||||||
|
@staticmethod
|
||||||
|
async def session_verification_method(
|
||||||
|
session: AsyncSession,
|
||||||
|
obj: "User",
|
||||||
|
token_id: int | None = None,
|
||||||
|
) -> Literal["totp", "mail"] | None:
|
||||||
|
from app.dependencies.database import get_redis
|
||||||
|
from app.service.verification_service import LoginSessionService
|
||||||
|
|
||||||
|
if (settings.enable_totp_verification or settings.enable_email_verification) and token_id:
|
||||||
|
redis = get_redis()
|
||||||
|
if not await LoginSessionService.check_is_need_verification(session, user_id=obj.id, token_id=token_id):
|
||||||
|
return None
|
||||||
|
return await LoginSessionService.get_login_method(obj.id, token_id, redis)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class User(AsyncAttrs, UserModel, table=True):
|
||||||
|
__tablename__: str = "lazer_users"
|
||||||
|
|
||||||
|
email: str = Field(max_length=254, unique=True, index=True)
|
||||||
|
priv: int = Field(default=1)
|
||||||
|
pw_bcrypt: str = Field(max_length=60)
|
||||||
|
silence_end_at: datetime | None = Field(default=None, sa_column=Column(DateTime(timezone=True)))
|
||||||
|
donor_end_at: datetime | None = Field(default=None, sa_column=Column(DateTime(timezone=True)))
|
||||||
|
|
||||||
|
account_history: list[UserAccountHistory] = Relationship(back_populates="user")
|
||||||
|
statistics: list[UserStatistics] = Relationship(back_populates="user")
|
||||||
|
achievement: list[UserAchievement] = Relationship(back_populates="user")
|
||||||
|
team_membership: TeamMember | None = Relationship(back_populates="user")
|
||||||
|
daily_challenge_stats: DailyChallengeStats | None = Relationship(back_populates="user")
|
||||||
|
matchmaking_stats: list["MatchmakingUserStats"] = Relationship(back_populates="user")
|
||||||
|
monthly_playcounts: list[MonthlyPlaycounts] = Relationship(back_populates="user")
|
||||||
|
replays_watched_counts: list[ReplayWatchedCount] = Relationship(back_populates="user")
|
||||||
|
favourite_beatmapsets: list["FavouriteBeatmapset"] = Relationship(back_populates="user")
|
||||||
|
rank_history: list[RankHistory] = Relationship(
|
||||||
|
back_populates="user",
|
||||||
|
)
|
||||||
|
events: list[Event] = Relationship(back_populates="user")
|
||||||
|
totp_key: TotpKeys | None = Relationship(back_populates="user")
|
||||||
|
user_preference: UserPreference | None = Relationship(back_populates="user")
|
||||||
|
|
||||||
|
async def is_user_can_pm(self, from_user: "User", session: AsyncSession) -> tuple[bool, str]:
|
||||||
|
from .relationship import Relationship, RelationshipType
|
||||||
|
|
||||||
|
from_relationship = (
|
||||||
|
await session.exec(
|
||||||
|
select(Relationship).where(
|
||||||
|
Relationship.user_id == from_user.id,
|
||||||
|
Relationship.target_id == self.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
if from_relationship and from_relationship.type == RelationshipType.BLOCK:
|
||||||
|
return False, "You have blocked the target user."
|
||||||
|
if from_user.pm_friends_only and (not from_relationship or from_relationship.type != RelationshipType.FOLLOW):
|
||||||
|
return (
|
||||||
|
False,
|
||||||
|
"You have disabled non-friend communications and target user is not your friend.",
|
||||||
|
)
|
||||||
|
|
||||||
|
relationship = (
|
||||||
|
await session.exec(
|
||||||
|
select(Relationship).where(
|
||||||
|
Relationship.user_id == self.id,
|
||||||
|
Relationship.target_id == from_user.id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).first()
|
||||||
|
if relationship and relationship.type == RelationshipType.BLOCK:
|
||||||
|
return False, "Target user has blocked you."
|
||||||
|
if self.pm_friends_only and (not relationship or relationship.type != RelationshipType.FOLLOW):
|
||||||
|
return False, "Target user has disabled non-friend communications"
|
||||||
|
if await self.is_restricted(session):
|
||||||
|
return False, "Target user is restricted"
|
||||||
|
return True, ""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@overload
|
||||||
|
def is_restricted_query(cls, user_id: int): ...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@overload
|
||||||
|
def is_restricted_query(cls, user_id: Mapped[int]): ...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_restricted_query(cls, user_id: int | Mapped[int]):
|
||||||
|
return exists().where(
|
||||||
|
(col(UserAccountHistory.user_id) == user_id)
|
||||||
|
& (col(UserAccountHistory.type) == UserAccountHistoryType.RESTRICTION)
|
||||||
|
& (
|
||||||
|
(col(UserAccountHistory.permanent).is_(True))
|
||||||
|
| (
|
||||||
|
(
|
||||||
|
func.timestampadd(
|
||||||
|
text("SECOND"),
|
||||||
|
col(UserAccountHistory.length),
|
||||||
|
col(UserAccountHistory.timestamp),
|
||||||
|
)
|
||||||
|
> func.now()
|
||||||
|
)
|
||||||
|
& (func.now() > col(UserAccountHistory.timestamp))
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def is_restricted(self, session: AsyncSession) -> bool:
|
||||||
|
active_restrictions = (await session.exec(select(self.is_restricted_query(self.id)))).first()
|
||||||
|
return active_restrictions or False
|
||||||
|
|
||||||
|
|
||||||
|
# 为了向后兼容,在 SQL 查询中使用 User
|
||||||
|
# 例如: select(User).where(User.id == 1)
|
||||||
|
# 但类型注解和返回值使用 User
|
||||||
|
# 例如: async def get_user() -> User | None:
|
||||||
|
# return (await session.exec(select(User)...)).first()
|
||||||
@@ -1,9 +1,14 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from app.models.model import UTCBaseModel
|
from app.models.model import UTCBaseModel
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlmodel import BigInteger, Column, Field, ForeignKey, Integer, SQLModel
|
from sqlmodel import BigInteger, Column, Field, ForeignKey, Integer, Relationship, SQLModel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
|
||||||
class UserAccountHistoryType(str, Enum):
|
class UserAccountHistoryType(str, Enum):
|
||||||
@@ -17,12 +22,12 @@ class UserAccountHistoryBase(SQLModel, UTCBaseModel):
|
|||||||
description: str | None = None
|
description: str | None = None
|
||||||
length: int
|
length: int
|
||||||
permanent: bool = False
|
permanent: bool = False
|
||||||
timestamp: datetime = Field(default=datetime.now(UTC))
|
timestamp: datetime = Field(default_factory=utcnow)
|
||||||
type: UserAccountHistoryType
|
type: UserAccountHistoryType
|
||||||
|
|
||||||
|
|
||||||
class UserAccountHistory(UserAccountHistoryBase, table=True):
|
class UserAccountHistory(UserAccountHistoryBase, table=True):
|
||||||
__tablename__ = "user_account_history" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "user_account_history"
|
||||||
|
|
||||||
id: int | None = Field(
|
id: int | None = Field(
|
||||||
sa_column=Column(
|
sa_column=Column(
|
||||||
@@ -32,9 +37,9 @@ class UserAccountHistory(UserAccountHistoryBase, table=True):
|
|||||||
primary_key=True,
|
primary_key=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
user_id: int = Field(
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True))
|
||||||
sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), index=True)
|
|
||||||
)
|
user: "User" = Relationship(back_populates="account_history")
|
||||||
|
|
||||||
|
|
||||||
class UserAccountHistoryResp(UserAccountHistoryBase):
|
class UserAccountHistoryResp(UserAccountHistoryBase):
|
||||||
|
|||||||
@@ -4,33 +4,25 @@ User Login Log Database Model
|
|||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.utils import utcnow
|
||||||
|
|
||||||
from sqlmodel import Field, SQLModel
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
class UserLoginLog(SQLModel, table=True):
|
class UserLoginLog(SQLModel, table=True):
|
||||||
"""User login log table"""
|
"""User login log table"""
|
||||||
|
|
||||||
__tablename__ = "user_login_log" # pyright: ignore[reportAssignmentType]
|
__tablename__: str = "user_login_log"
|
||||||
|
|
||||||
id: int | None = Field(default=None, primary_key=True, description="Record ID")
|
id: int | None = Field(default=None, primary_key=True, description="Record ID")
|
||||||
user_id: int = Field(index=True, description="User ID")
|
user_id: int = Field(index=True, description="User ID")
|
||||||
ip_address: str = Field(
|
ip_address: str = Field(max_length=45, index=True, description="IP address (supports IPv4 and IPv6)")
|
||||||
max_length=45, index=True, description="IP address (supports IPv4 and IPv6)"
|
user_agent: str | None = Field(default=None, max_length=500, description="User agent information")
|
||||||
)
|
login_time: datetime = Field(default_factory=utcnow, description="Login time")
|
||||||
user_agent: str | None = Field(
|
|
||||||
default=None, max_length=500, description="User agent information"
|
|
||||||
)
|
|
||||||
login_time: datetime = Field(
|
|
||||||
default_factory=datetime.utcnow, description="Login time"
|
|
||||||
)
|
|
||||||
|
|
||||||
# GeoIP information
|
# GeoIP information
|
||||||
country_code: str | None = Field(
|
country_code: str | None = Field(default=None, max_length=2, description="Country code")
|
||||||
default=None, max_length=2, description="Country code"
|
country_name: str | None = Field(default=None, max_length=100, description="Country name")
|
||||||
)
|
|
||||||
country_name: str | None = Field(
|
|
||||||
default=None, max_length=100, description="Country name"
|
|
||||||
)
|
|
||||||
city_name: str | None = Field(default=None, max_length=100, description="City name")
|
city_name: str | None = Field(default=None, max_length=100, description="City name")
|
||||||
latitude: str | None = Field(default=None, max_length=20, description="Latitude")
|
latitude: str | None = Field(default=None, max_length=20, description="Latitude")
|
||||||
longitude: str | None = Field(default=None, max_length=20, description="Longitude")
|
longitude: str | None = Field(default=None, max_length=20, description="Longitude")
|
||||||
@@ -38,22 +30,14 @@ class UserLoginLog(SQLModel, table=True):
|
|||||||
|
|
||||||
# ASN information
|
# ASN information
|
||||||
asn: int | None = Field(default=None, description="Autonomous System Number")
|
asn: int | None = Field(default=None, description="Autonomous System Number")
|
||||||
organization: str | None = Field(
|
organization: str | None = Field(default=None, max_length=200, description="Organization name")
|
||||||
default=None, max_length=200, description="Organization name"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Login status
|
# Login status
|
||||||
login_success: bool = Field(
|
login_success: bool = Field(default=True, description="Whether the login was successful")
|
||||||
default=True, description="Whether the login was successful"
|
login_method: str = Field(max_length=50, description="Login method (password/oauth/etc.)")
|
||||||
)
|
|
||||||
login_method: str = Field(
|
|
||||||
max_length=50, description="Login method (password/oauth/etc.)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Additional information
|
# Additional information
|
||||||
notes: str | None = Field(
|
notes: str | None = Field(default=None, max_length=500, description="Additional notes")
|
||||||
default=None, max_length=500, description="Additional notes"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|||||||
88
app/database/user_preference.py
Normal file
88
app/database/user_preference.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from sqlmodel import JSON, BigInteger, Column, Field, ForeignKey, Relationship, SQLModel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .user import User
|
||||||
|
|
||||||
|
DEFAULT_ORDER = [
|
||||||
|
"me",
|
||||||
|
"recent_activity",
|
||||||
|
"top_ranks",
|
||||||
|
"medals",
|
||||||
|
"historical",
|
||||||
|
"beatmaps",
|
||||||
|
"kudosu",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapCardSize(str, Enum):
|
||||||
|
NORMAL = "normal"
|
||||||
|
EXTRA = "extra"
|
||||||
|
|
||||||
|
|
||||||
|
class BeatmapDownload(str, Enum):
|
||||||
|
ALL = "all"
|
||||||
|
NO_VIDEO = "no_video"
|
||||||
|
direct = "direct"
|
||||||
|
|
||||||
|
|
||||||
|
class ScoringMode(str, Enum):
|
||||||
|
STANDARDISED = "standardised"
|
||||||
|
CLASSIC = "classic"
|
||||||
|
|
||||||
|
|
||||||
|
class UserListFilter(str, Enum):
|
||||||
|
ALL = "all"
|
||||||
|
ONLINE = "online"
|
||||||
|
OFFLINE = "offline"
|
||||||
|
|
||||||
|
|
||||||
|
class UserListSort(str, Enum):
|
||||||
|
LAST_VISIT = "last_visit"
|
||||||
|
RANK = "rank"
|
||||||
|
USERNAME = "username"
|
||||||
|
|
||||||
|
|
||||||
|
class UserListView(str, Enum):
|
||||||
|
CARD = "card"
|
||||||
|
LIST = "list"
|
||||||
|
BRICK = "brick"
|
||||||
|
|
||||||
|
|
||||||
|
class UserPreference(SQLModel, table=True):
|
||||||
|
user_id: int = Field(
|
||||||
|
exclude=True, sa_column=Column(BigInteger, ForeignKey("lazer_users.id", ondelete="CASCADE"), primary_key=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
theme: str = "light"
|
||||||
|
# refer to https://github.com/ppy/osu/blob/30fd40efd16a651a6c00b5c89289a85ffcbe546b/osu.Game/Localisation/Language.cs
|
||||||
|
# zh_hant -> zh-tw
|
||||||
|
language: str = "en"
|
||||||
|
extra: dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
|
||||||
|
|
||||||
|
# https://github.com/ppy/osu-web/blob/cae2fdf03cfb8c30c8e332cfb142e03188ceffef/app/Models/UserProfileCustomization.php#L20-L38
|
||||||
|
audio_autoplay: bool = False
|
||||||
|
audio_muted: bool = False
|
||||||
|
audio_volume: float = 0.45
|
||||||
|
beatmapset_card_size: BeatmapCardSize = BeatmapCardSize.NORMAL
|
||||||
|
beatmap_download: BeatmapDownload = BeatmapDownload.ALL
|
||||||
|
beatmapset_show_nsfw: bool = False
|
||||||
|
|
||||||
|
# comments_show_deleted: bool = False
|
||||||
|
# forum_posts_show_deleted: bool = False
|
||||||
|
|
||||||
|
extras_order: list[str] = Field(
|
||||||
|
default_factory=lambda: DEFAULT_ORDER,
|
||||||
|
sa_column=Column(JSON),
|
||||||
|
exclude=True,
|
||||||
|
)
|
||||||
|
legacy_score_only: bool = False # lazer mode
|
||||||
|
profile_cover_expanded: bool = True
|
||||||
|
scoring_mode: ScoringMode = ScoringMode.STANDARDISED
|
||||||
|
user_list_filter: UserListFilter = UserListFilter.ALL
|
||||||
|
user_list_sort: UserListSort = UserListSort.LAST_VISIT
|
||||||
|
user_list_view: UserListView = UserListView.CARD
|
||||||
|
|
||||||
|
user: "User" = Relationship(back_populates="user_preference")
|
||||||
129
app/database/verification.py
Normal file
129
app/database/verification.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"""
|
||||||
|
邮件验证相关数据库模型
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING, Literal, Optional
|
||||||
|
|
||||||
|
from app.helpers.geoip_helper import GeoIPHelper
|
||||||
|
from app.models.model import UserAgentInfo, UTCBaseModel
|
||||||
|
from app.utils import extract_user_agent, utcnow
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from sqlalchemy import BigInteger, Column, ForeignKey
|
||||||
|
from sqlmodel import VARCHAR, DateTime, Field, Integer, Relationship, SQLModel, Text
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .auth import OAuthToken
|
||||||
|
|
||||||
|
|
||||||
|
class Location(BaseModel):
|
||||||
|
country: str = ""
|
||||||
|
city: str = ""
|
||||||
|
country_code: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class EmailVerification(SQLModel, table=True):
|
||||||
|
"""邮件验证记录"""
|
||||||
|
|
||||||
|
__tablename__: str = "email_verifications"
|
||||||
|
|
||||||
|
id: int | None = Field(default=None, primary_key=True)
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
||||||
|
email: str = Field(index=True)
|
||||||
|
verification_code: str = Field(max_length=8) # 8位验证码
|
||||||
|
created_at: datetime = Field(default_factory=utcnow)
|
||||||
|
expires_at: datetime = Field() # 验证码过期时间
|
||||||
|
is_used: bool = Field(default=False) # 是否已使用
|
||||||
|
used_at: datetime | None = Field(default=None)
|
||||||
|
ip_address: str | None = Field(default=None) # 请求IP
|
||||||
|
user_agent: str | None = Field(default=None) # 用户代理
|
||||||
|
|
||||||
|
|
||||||
|
class LoginSessionBase(SQLModel):
|
||||||
|
"""登录会话记录"""
|
||||||
|
|
||||||
|
id: int = Field(default=None, primary_key=True)
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
||||||
|
ip_address: str = Field(sa_column=Column(VARCHAR(45), nullable=False), default="127.0.0.1", exclude=True)
|
||||||
|
user_agent: str | None = Field(default=None, sa_column=Column(Text))
|
||||||
|
is_verified: bool = Field(default=False) # 是否已验证
|
||||||
|
created_at: datetime = Field(default_factory=lambda: utcnow())
|
||||||
|
verified_at: datetime | None = Field(default=None)
|
||||||
|
expires_at: datetime = Field() # 会话过期时间
|
||||||
|
device_id: int | None = Field(
|
||||||
|
sa_column=Column(BigInteger, ForeignKey("trusted_devices.id", ondelete="SET NULL"), nullable=True, index=True),
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LoginSession(LoginSessionBase, table=True):
|
||||||
|
__tablename__: str = "login_sessions"
|
||||||
|
token_id: int | None = Field(
|
||||||
|
sa_column=Column(Integer, ForeignKey("oauth_tokens.id", ondelete="SET NULL"), nullable=True, index=True),
|
||||||
|
exclude=True,
|
||||||
|
)
|
||||||
|
is_new_device: bool = Field(default=False, exclude=True) # 是否新位置登录
|
||||||
|
web_uuid: str | None = Field(sa_column=Column(VARCHAR(36), nullable=True), default=None, exclude=True)
|
||||||
|
verification_method: str | None = Field(default=None, max_length=20, exclude=True) # 验证方法 (totp/mail)
|
||||||
|
|
||||||
|
device: Optional["TrustedDevice"] = Relationship(back_populates="sessions")
|
||||||
|
token: Optional["OAuthToken"] = Relationship(back_populates="login_session")
|
||||||
|
|
||||||
|
|
||||||
|
class LoginSessionResp(UTCBaseModel, LoginSessionBase):
|
||||||
|
user_agent_info: UserAgentInfo | None = None
|
||||||
|
location: Location | None = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db(cls, obj: LoginSession, get_geoip_helper: GeoIPHelper) -> "LoginSessionResp":
|
||||||
|
session = cls.model_validate(obj.model_dump())
|
||||||
|
session.user_agent_info = extract_user_agent(session.user_agent)
|
||||||
|
if obj.ip_address:
|
||||||
|
loc = get_geoip_helper.lookup(obj.ip_address)
|
||||||
|
session.location = Location(
|
||||||
|
country=loc.get("country_name", ""),
|
||||||
|
city=loc.get("city_name", ""),
|
||||||
|
country_code=loc.get("country_code", ""),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
session.location = None
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
class TrustedDeviceBase(SQLModel):
|
||||||
|
id: int = Field(default=None, primary_key=True)
|
||||||
|
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("lazer_users.id"), nullable=False, index=True))
|
||||||
|
ip_address: str = Field(sa_column=Column(VARCHAR(45), nullable=False), default="127.0.0.1", exclude=True)
|
||||||
|
user_agent: str = Field(sa_column=Column(Text, nullable=False))
|
||||||
|
client_type: Literal["web", "client"] = Field(sa_column=Column(VARCHAR(10), nullable=False), default="web")
|
||||||
|
created_at: datetime = Field(default_factory=utcnow)
|
||||||
|
last_used_at: datetime = Field(default_factory=utcnow)
|
||||||
|
expires_at: datetime = Field(sa_column=Column(DateTime))
|
||||||
|
|
||||||
|
|
||||||
|
class TrustedDevice(TrustedDeviceBase, table=True):
|
||||||
|
__tablename__: str = "trusted_devices"
|
||||||
|
web_uuid: str | None = Field(sa_column=Column(VARCHAR(36), nullable=True), default=None)
|
||||||
|
|
||||||
|
sessions: list["LoginSession"] = Relationship(back_populates="device", passive_deletes=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TrustedDeviceResp(UTCBaseModel, TrustedDeviceBase):
|
||||||
|
user_agent_info: UserAgentInfo | None = None
|
||||||
|
location: Location | None = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db(cls, device: TrustedDevice, get_geoip_helper: GeoIPHelper) -> "TrustedDeviceResp":
|
||||||
|
device_ = cls.model_validate(device.model_dump())
|
||||||
|
device_.user_agent_info = extract_user_agent(device_.user_agent)
|
||||||
|
if device.ip_address:
|
||||||
|
loc = get_geoip_helper.lookup(device.ip_address)
|
||||||
|
device_.location = Location(
|
||||||
|
country=loc.get("country_name", ""),
|
||||||
|
city=loc.get("city_name", ""),
|
||||||
|
country_code=loc.get("country_code", ""),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
device_.location = None
|
||||||
|
return device_
|
||||||
@@ -1,4 +1 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .database import get_db as get_db
|
|
||||||
from .user import get_current_user as get_current_user
|
|
||||||
|
|||||||
14
app/dependencies/api_version.py
Normal file
14
app/dependencies/api_version.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from fastapi import Depends, Header
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_version(version: int | None = Header(None, alias="x-api-version", include_in_schema=False)) -> int:
|
||||||
|
if version is None:
|
||||||
|
return 0
|
||||||
|
if version < 1:
|
||||||
|
raise ValueError
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
APIVersion = Annotated[int, Depends(get_api_version)]
|
||||||
@@ -1,8 +1,13 @@
|
|||||||
from __future__ import annotations
|
from typing import Annotated
|
||||||
|
|
||||||
from app.service.beatmap_download_service import download_service
|
from app.service.beatmap_download_service import BeatmapDownloadService, download_service
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
|
||||||
|
|
||||||
def get_beatmap_download_service():
|
def get_beatmap_download_service():
|
||||||
"""获取谱面下载服务实例"""
|
"""获取谱面下载服务实例"""
|
||||||
return download_service
|
return download_service
|
||||||
|
|
||||||
|
|
||||||
|
DownloadService = Annotated[BeatmapDownloadService, Depends(get_beatmap_download_service)]
|
||||||
|
|||||||
26
app/dependencies/cache.py
Normal file
26
app/dependencies/cache.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from app.dependencies.database import Redis
|
||||||
|
from app.service.beatmapset_cache_service import (
|
||||||
|
BeatmapsetCacheService as OriginBeatmapsetCacheService,
|
||||||
|
get_beatmapset_cache_service,
|
||||||
|
)
|
||||||
|
from app.service.user_cache_service import (
|
||||||
|
UserCacheService as OriginUserCacheService,
|
||||||
|
get_user_cache_service,
|
||||||
|
)
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
|
||||||
|
|
||||||
|
def get_beatmapset_cache_dependency(redis: Redis) -> OriginBeatmapsetCacheService:
|
||||||
|
"""获取beatmapset缓存服务依赖"""
|
||||||
|
return get_beatmapset_cache_service(redis)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_cache_dependency(redis: Redis) -> OriginUserCacheService:
|
||||||
|
return get_user_cache_service(redis)
|
||||||
|
|
||||||
|
|
||||||
|
BeatmapsetCacheService = Annotated[OriginBeatmapsetCacheService, Depends(get_beatmapset_cache_dependency)]
|
||||||
|
UserCacheService = Annotated[OriginUserCacheService, Depends(get_user_cache_dependency)]
|
||||||
10
app/dependencies/client_verification.py
Normal file
10
app/dependencies/client_verification.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from app.service.client_verification_service import (
|
||||||
|
ClientVerificationService as OriginalClientVerificationService,
|
||||||
|
get_client_verification_service,
|
||||||
|
)
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
|
||||||
|
ClientVerificationService = Annotated[OriginalClientVerificationService, Depends(get_client_verification_service)]
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import AsyncIterator, Callable
|
from collections.abc import AsyncIterator, Callable
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
@@ -10,7 +9,6 @@ from app.config import settings
|
|||||||
|
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import redis as sync_redis
|
|
||||||
import redis.asyncio as redis
|
import redis.asyncio as redis
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine
|
from sqlalchemy.ext.asyncio import create_async_engine
|
||||||
from sqlmodel import SQLModel
|
from sqlmodel import SQLModel
|
||||||
@@ -37,18 +35,20 @@ engine = create_async_engine(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Redis 连接
|
# Redis 连接
|
||||||
redis_client = redis.from_url(settings.redis_url, decode_responses=True)
|
redis_client = redis.from_url(settings.redis_url, decode_responses=True, db=0)
|
||||||
|
|
||||||
# Redis 消息缓存连接 (db1) - 使用同步客户端在线程池中执行
|
# Redis 消息缓存连接 (db1)
|
||||||
redis_message_client = sync_redis.from_url(
|
redis_message_client = redis.from_url(settings.redis_url, decode_responses=True, db=1)
|
||||||
settings.redis_url, decode_responses=True, db=1
|
|
||||||
)
|
# Redis 二进制数据连接 (不自动解码响应,用于存储音频等二进制数据,db2)
|
||||||
|
redis_binary_client = redis.from_url(settings.redis_url, decode_responses=False, db=2)
|
||||||
|
|
||||||
|
# Redis 限流连接 (db3)
|
||||||
|
redis_rate_limit_client = redis.from_url(settings.redis_url, decode_responses=True, db=3)
|
||||||
|
|
||||||
|
|
||||||
# 数据库依赖
|
# 数据库依赖
|
||||||
db_session_context: ContextVar[AsyncSession | None] = ContextVar(
|
db_session_context: ContextVar[AsyncSession | None] = ContextVar("db_session_context", default=None)
|
||||||
"db_session_context", default=None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_db():
|
async def get_db():
|
||||||
@@ -65,8 +65,13 @@ async def get_db():
|
|||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
|
||||||
def with_db():
|
@asynccontextmanager
|
||||||
return AsyncSession(engine)
|
async def with_db():
|
||||||
|
async with AsyncSession(engine) as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
DBFactory = Callable[[], AsyncIterator[AsyncSession]]
|
DBFactory = Callable[[], AsyncIterator[AsyncSession]]
|
||||||
@@ -86,7 +91,15 @@ def get_redis():
|
|||||||
return redis_client
|
return redis_client
|
||||||
|
|
||||||
|
|
||||||
def get_redis_message():
|
Redis = Annotated[redis.Redis, Depends(get_redis)]
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_binary():
|
||||||
|
"""获取二进制数据专用的 Redis 客户端 (不自动解码响应)"""
|
||||||
|
return redis_binary_client
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_message() -> redis.Redis:
|
||||||
"""获取消息专用的 Redis 客户端 (db1)"""
|
"""获取消息专用的 Redis 客户端 (db1)"""
|
||||||
return redis_message_client
|
return redis_message_client
|
||||||
|
|
||||||
|
|||||||
@@ -1,31 +1,39 @@
|
|||||||
from __future__ import annotations
|
from typing import Annotated
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.dependencies.database import get_redis
|
from app.dependencies.database import get_redis
|
||||||
from app.fetcher import Fetcher
|
from app.fetcher import Fetcher as OriginFetcher
|
||||||
from app.log import logger
|
from app.fetcher._base import TokenAuthError
|
||||||
|
from app.log import fetcher_logger
|
||||||
|
|
||||||
fetcher: Fetcher | None = None
|
from fastapi import Depends
|
||||||
|
|
||||||
|
fetcher: OriginFetcher | None = None
|
||||||
|
logger = fetcher_logger("FetcherDependency")
|
||||||
|
|
||||||
|
|
||||||
async def get_fetcher() -> Fetcher:
|
async def get_fetcher() -> OriginFetcher:
|
||||||
global fetcher
|
global fetcher
|
||||||
if fetcher is None:
|
if fetcher is None:
|
||||||
fetcher = Fetcher(
|
fetcher = OriginFetcher(
|
||||||
settings.fetcher_client_id,
|
settings.fetcher_client_id,
|
||||||
settings.fetcher_client_secret,
|
settings.fetcher_client_secret,
|
||||||
settings.fetcher_scopes,
|
|
||||||
settings.fetcher_callback_url,
|
|
||||||
)
|
)
|
||||||
redis = get_redis()
|
redis = get_redis()
|
||||||
access_token = await redis.get(f"fetcher:access_token:{fetcher.client_id}")
|
access_token = await redis.get(f"fetcher:access_token:{fetcher.client_id}")
|
||||||
|
expire_at = await redis.get(f"fetcher:expire_at:{fetcher.client_id}")
|
||||||
|
if expire_at:
|
||||||
|
fetcher.token_expiry = int(float(expire_at))
|
||||||
if access_token:
|
if access_token:
|
||||||
fetcher.access_token = str(access_token)
|
fetcher.access_token = str(access_token)
|
||||||
refresh_token = await redis.get(f"fetcher:refresh_token:{fetcher.client_id}")
|
# Always ensure the access token is valid, regardless of initial state
|
||||||
if refresh_token:
|
try:
|
||||||
fetcher.refresh_token = str(refresh_token)
|
await fetcher.ensure_valid_access_token()
|
||||||
if not fetcher.access_token or not fetcher.refresh_token:
|
except TokenAuthError as exc:
|
||||||
logger.opt(colors=True).info(
|
logger.warning(f"Failed to refresh fetcher access token during startup: {exc}. Will retry on demand.")
|
||||||
f"Login to initialize fetcher: <y>{fetcher.authorize_url}</y>"
|
except Exception as exc:
|
||||||
)
|
logger.exception("Unexpected error while initializing fetcher access token", exc_info=exc)
|
||||||
return fetcher
|
return fetcher
|
||||||
|
|
||||||
|
|
||||||
|
Fetcher = Annotated[OriginFetcher, Depends(get_fetcher)]
|
||||||
|
|||||||
@@ -2,14 +2,15 @@
|
|||||||
GeoIP dependency for FastAPI
|
GeoIP dependency for FastAPI
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.helpers.geoip_helper import GeoIPHelper
|
from app.helpers.geoip_helper import GeoIPHelper
|
||||||
|
|
||||||
|
from fastapi import Depends, Request
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def get_geoip_helper() -> GeoIPHelper:
|
def get_geoip_helper() -> GeoIPHelper:
|
||||||
@@ -26,7 +27,7 @@ def get_geoip_helper() -> GeoIPHelper:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip(request) -> str:
|
def get_client_ip(request: Request) -> str:
|
||||||
"""
|
"""
|
||||||
获取客户端真实 IP 地址
|
获取客户端真实 IP 地址
|
||||||
支持 IPv4 和 IPv6,考虑代理、负载均衡器等情况
|
支持 IPv4 和 IPv6,考虑代理、负载均衡器等情况
|
||||||
@@ -66,6 +67,10 @@ def get_client_ip(request) -> str:
|
|||||||
return client_ip if is_valid_ip(client_ip) else "127.0.0.1"
|
return client_ip if is_valid_ip(client_ip) else "127.0.0.1"
|
||||||
|
|
||||||
|
|
||||||
|
IPAddress = Annotated[str, Depends(get_client_ip)]
|
||||||
|
GeoIPService = Annotated[GeoIPHelper, Depends(get_geoip_helper)]
|
||||||
|
|
||||||
|
|
||||||
def is_valid_ip(ip_str: str) -> bool:
|
def is_valid_ip(ip_str: str) -> bool:
|
||||||
"""
|
"""
|
||||||
验证 IP 地址是否有效(支持 IPv4 和 IPv6)
|
验证 IP 地址是否有效(支持 IPv4 和 IPv6)
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
@@ -7,7 +5,7 @@ from fastapi.exceptions import RequestValidationError
|
|||||||
from pydantic import BaseModel, ValidationError
|
from pydantic import BaseModel, ValidationError
|
||||||
|
|
||||||
|
|
||||||
def BodyOrForm[T: BaseModel](model: type[T]):
|
def BodyOrForm[T: BaseModel](model: type[T]): # noqa: N802
|
||||||
async def dependency(
|
async def dependency(
|
||||||
request: Request,
|
request: Request,
|
||||||
) -> T:
|
) -> T:
|
||||||
|
|||||||
12
app/dependencies/rate_limit.py
Normal file
12
app/dependencies/rate_limit.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from app.config import settings
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi_limiter.depends import RateLimiter
|
||||||
|
|
||||||
|
if settings.enable_rate_limit:
|
||||||
|
LIMITERS = [
|
||||||
|
Depends(RateLimiter(times=1200, minutes=1)),
|
||||||
|
Depends(RateLimiter(times=200, seconds=1)),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
LIMITERS = []
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import UTC
|
from datetime import UTC
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
|
||||||
@@ -16,7 +15,7 @@ def get_scheduler() -> AsyncIOScheduler:
|
|||||||
global scheduler
|
global scheduler
|
||||||
if scheduler is None:
|
if scheduler is None:
|
||||||
init_scheduler()
|
init_scheduler()
|
||||||
return scheduler # pyright: ignore[reportReturnType]
|
return cast(AsyncIOScheduler, scheduler)
|
||||||
|
|
||||||
|
|
||||||
def start_scheduler():
|
def start_scheduler():
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
from __future__ import annotations
|
from typing import Annotated, cast
|
||||||
|
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from app.config import (
|
from app.config import (
|
||||||
AWSS3StorageSettings,
|
AWSS3StorageSettings,
|
||||||
@@ -9,11 +7,13 @@ from app.config import (
|
|||||||
StorageServiceType,
|
StorageServiceType,
|
||||||
settings,
|
settings,
|
||||||
)
|
)
|
||||||
from app.storage import StorageService
|
from app.storage import StorageService as OriginStorageService
|
||||||
from app.storage.cloudflare_r2 import AWSS3StorageService, CloudflareR2StorageService
|
from app.storage.cloudflare_r2 import AWSS3StorageService, CloudflareR2StorageService
|
||||||
from app.storage.local import LocalStorageService
|
from app.storage.local import LocalStorageService
|
||||||
|
|
||||||
storage: StorageService | None = None
|
from fastapi import Depends
|
||||||
|
|
||||||
|
storage: OriginStorageService | None = None
|
||||||
|
|
||||||
|
|
||||||
def init_storage_service():
|
def init_storage_service():
|
||||||
@@ -50,3 +50,6 @@ def get_storage_service():
|
|||||||
if storage is None:
|
if storage is None:
|
||||||
return init_storage_service()
|
return init_storage_service()
|
||||||
return storage
|
return storage
|
||||||
|
|
||||||
|
|
||||||
|
StorageService = Annotated[OriginStorageService, Depends(get_storage_service)]
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
|
|
||||||
from app.auth import get_token_by_access_token
|
from app.auth import get_token_by_access_token
|
||||||
from app.config import settings
|
from app.config import settings
|
||||||
|
from app.const import SUPPORT_TOTP_VERIFICATION_VER
|
||||||
from app.database import User
|
from app.database import User
|
||||||
from app.database.auth import V1APIKeys
|
from app.database.auth import OAuthToken, V1APIKeys
|
||||||
from app.models.oauth import OAuth2ClientCredentialsBearer
|
from app.models.oauth import OAuth2ClientCredentialsBearer
|
||||||
|
|
||||||
from .database import Database
|
from .api_version import APIVersion
|
||||||
|
from .database import Database, get_redis
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException
|
from fastapi import Depends, HTTPException, Security
|
||||||
from fastapi.security import (
|
from fastapi.security import (
|
||||||
APIKeyQuery,
|
APIKeyQuery,
|
||||||
HTTPBearer,
|
HTTPBearer,
|
||||||
@@ -18,6 +18,7 @@ from fastapi.security import (
|
|||||||
OAuth2PasswordBearer,
|
OAuth2PasswordBearer,
|
||||||
SecurityScopes,
|
SecurityScopes,
|
||||||
)
|
)
|
||||||
|
from redis.asyncio import Redis
|
||||||
from sqlmodel import select
|
from sqlmodel import select
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
@@ -29,6 +30,7 @@ oauth2_password = OAuth2PasswordBearer(
|
|||||||
scopes={"*": "允许访问全部 API。"},
|
scopes={"*": "允许访问全部 API。"},
|
||||||
description="osu!lazer 或网页客户端密码登录认证,具有全部权限",
|
description="osu!lazer 或网页客户端密码登录认证,具有全部权限",
|
||||||
scheme_name="Password Grant",
|
scheme_name="Password Grant",
|
||||||
|
auto_error=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
oauth2_code = OAuth2AuthorizationCodeBearer(
|
oauth2_code = OAuth2AuthorizationCodeBearer(
|
||||||
@@ -47,6 +49,7 @@ oauth2_code = OAuth2AuthorizationCodeBearer(
|
|||||||
},
|
},
|
||||||
description="osu! OAuth 认证 (授权码认证)",
|
description="osu! OAuth 认证 (授权码认证)",
|
||||||
scheme_name="Authorization Code Grant",
|
scheme_name="Authorization Code Grant",
|
||||||
|
auto_error=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
oauth2_client_credentials = OAuth2ClientCredentialsBearer(
|
oauth2_client_credentials = OAuth2ClientCredentialsBearer(
|
||||||
@@ -57,6 +60,7 @@ oauth2_client_credentials = OAuth2ClientCredentialsBearer(
|
|||||||
},
|
},
|
||||||
description="osu! OAuth 认证 (客户端凭证流)",
|
description="osu! OAuth 认证 (客户端凭证流)",
|
||||||
scheme_name="Client Credentials Grant",
|
scheme_name="Client Credentials Grant",
|
||||||
|
auto_error=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
v1_api_key = APIKeyQuery(name="k", scheme_name="V1 API Key", description="v1 API 密钥")
|
v1_api_key = APIKeyQuery(name="k", scheme_name="V1 API Key", description="v1 API 密钥")
|
||||||
@@ -70,17 +74,18 @@ async def v1_authorize(
|
|||||||
if not api_key:
|
if not api_key:
|
||||||
raise HTTPException(status_code=401, detail="Missing API key")
|
raise HTTPException(status_code=401, detail="Missing API key")
|
||||||
|
|
||||||
api_key_record = (
|
api_key_record = (await db.exec(select(V1APIKeys).where(V1APIKeys.key == api_key))).first()
|
||||||
await db.exec(select(V1APIKeys).where(V1APIKeys.key == api_key))
|
|
||||||
).first()
|
|
||||||
if not api_key_record:
|
if not api_key_record:
|
||||||
raise HTTPException(status_code=401, detail="Invalid API key")
|
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||||
|
|
||||||
|
|
||||||
async def get_client_user(
|
async def get_client_user_and_token(
|
||||||
db: Database,
|
db: Database,
|
||||||
token: Annotated[str, Depends(oauth2_password)],
|
token: Annotated[str | None, Depends(oauth2_password)],
|
||||||
):
|
) -> tuple[User, OAuthToken]:
|
||||||
|
if token is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||||
|
|
||||||
token_record = await get_token_by_access_token(db, token)
|
token_record = await get_token_by_access_token(db, token)
|
||||||
if not token_record:
|
if not token_record:
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
||||||
@@ -89,24 +94,52 @@ async def get_client_user(
|
|||||||
if not user:
|
if not user:
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
||||||
|
|
||||||
await db.refresh(user)
|
return user, token_record
|
||||||
|
|
||||||
|
|
||||||
|
UserAndToken = tuple[User, OAuthToken]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_client_user_no_verified(user_and_token: UserAndToken = Depends(get_client_user_and_token)):
|
||||||
|
return user_and_token[0]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_client_user(
|
||||||
|
db: Database,
|
||||||
|
redis: Annotated[Redis, Depends(get_redis)],
|
||||||
|
api_version: APIVersion,
|
||||||
|
user_and_token: UserAndToken = Depends(get_client_user_and_token),
|
||||||
|
):
|
||||||
|
from app.service.verification_service import LoginSessionService
|
||||||
|
|
||||||
|
user, token = user_and_token
|
||||||
|
|
||||||
|
if await LoginSessionService.check_is_need_verification(db, user.id, token.id):
|
||||||
|
# 获取当前验证方式
|
||||||
|
verify_method = None
|
||||||
|
if api_version >= SUPPORT_TOTP_VERIFICATION_VER:
|
||||||
|
verify_method = await LoginSessionService.get_login_method(user.id, token.id, redis)
|
||||||
|
|
||||||
|
if verify_method is None:
|
||||||
|
# 智能选择验证方式(参考 osu-web State.php:36)
|
||||||
|
totp_key = await user.awaitable_attrs.totp_key
|
||||||
|
verify_method = "totp" if totp_key is not None and api_version >= SUPPORT_TOTP_VERIFICATION_VER else "mail"
|
||||||
|
|
||||||
|
# 设置选择的验证方法到Redis中,避免重复选择
|
||||||
|
if api_version >= SUPPORT_TOTP_VERIFICATION_VER:
|
||||||
|
await LoginSessionService.set_login_method(user.id, token.id, verify_method, redis)
|
||||||
|
|
||||||
|
# 返回符合 osu! API 标准的错误响应
|
||||||
|
error_response = {"error": "User not verified", "method": verify_method}
|
||||||
|
raise HTTPException(status_code=401, detail=error_response)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(
|
async def _validate_token(
|
||||||
db: Database,
|
db: Database,
|
||||||
|
token: str,
|
||||||
security_scopes: SecurityScopes,
|
security_scopes: SecurityScopes,
|
||||||
token_pw: Annotated[str | None, Depends(oauth2_password)] = None,
|
) -> UserAndToken:
|
||||||
token_code: Annotated[str | None, Depends(oauth2_code)] = None,
|
|
||||||
token_client_credentials: Annotated[
|
|
||||||
str | None, Depends(oauth2_client_credentials)
|
|
||||||
] = None,
|
|
||||||
) -> User:
|
|
||||||
"""获取当前认证用户"""
|
|
||||||
token = token_pw or token_code or token_client_credentials
|
|
||||||
if not token:
|
|
||||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
|
||||||
|
|
||||||
token_record = await get_token_by_access_token(db, token)
|
token_record = await get_token_by_access_token(db, token)
|
||||||
if not token_record:
|
if not token_record:
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
||||||
@@ -119,13 +152,47 @@ async def get_current_user(
|
|||||||
if not is_client:
|
if not is_client:
|
||||||
for scope in security_scopes.scopes:
|
for scope in security_scopes.scopes:
|
||||||
if scope not in token_record.scope.split(","):
|
if scope not in token_record.scope.split(","):
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=403, detail=f"Insufficient scope: {scope}")
|
||||||
status_code=403, detail=f"Insufficient scope: {scope}"
|
|
||||||
)
|
|
||||||
|
|
||||||
user = (await db.exec(select(User).where(User.id == token_record.user_id))).first()
|
user = (await db.exec(select(User).where(User.id == token_record.user_id))).first()
|
||||||
if not user:
|
if not user:
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
raise HTTPException(status_code=401, detail="Invalid or expired token")
|
||||||
|
return user, token_record
|
||||||
|
|
||||||
await db.refresh(user)
|
|
||||||
return user
|
async def get_current_user_and_token(
|
||||||
|
db: Database,
|
||||||
|
security_scopes: SecurityScopes,
|
||||||
|
token_pw: Annotated[str | None, Depends(oauth2_password)] = None,
|
||||||
|
token_code: Annotated[str | None, Depends(oauth2_code)] = None,
|
||||||
|
token_client_credentials: Annotated[str | None, Depends(oauth2_client_credentials)] = None,
|
||||||
|
) -> UserAndToken:
|
||||||
|
"""获取当前认证用户"""
|
||||||
|
token = token_pw or token_code or token_client_credentials
|
||||||
|
if not token:
|
||||||
|
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||||
|
|
||||||
|
return await _validate_token(db, token, security_scopes)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
user_and_token: UserAndToken = Depends(get_current_user_and_token),
|
||||||
|
) -> User:
|
||||||
|
return user_and_token[0]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_optional_user(
|
||||||
|
db: Database,
|
||||||
|
security_scopes: SecurityScopes,
|
||||||
|
token_pw: Annotated[str | None, Depends(oauth2_password)] = None,
|
||||||
|
token_code: Annotated[str | None, Depends(oauth2_code)] = None,
|
||||||
|
token_client_credentials: Annotated[str | None, Depends(oauth2_client_credentials)] = None,
|
||||||
|
) -> User | None:
|
||||||
|
token = token_pw or token_code or token_client_credentials
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return (await _validate_token(db, token, security_scopes))[0]
|
||||||
|
|
||||||
|
|
||||||
|
ClientUser = Annotated[User, Security(get_client_user, scopes=["*"])]
|
||||||
|
|||||||
13
app/dependencies/user_agent.py
Normal file
13
app/dependencies/user_agent.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from app.models.model import UserAgentInfo as UserAgentInfoModel
|
||||||
|
from app.utils import extract_user_agent
|
||||||
|
|
||||||
|
from fastapi import Depends, Header
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_agent_info(user_agent: str | None = Header(None, include_in_schema=False)) -> UserAgentInfoModel:
|
||||||
|
return extract_user_agent(user_agent)
|
||||||
|
|
||||||
|
|
||||||
|
UserAgentInfo = Annotated[UserAgentInfoModel, Depends(get_user_agent_info)]
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
|
|
||||||
class SignalRException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvokeException(SignalRException):
|
|
||||||
def __init__(self, message: str) -> None:
|
|
||||||
self.message = message
|
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .beatmap import BeatmapFetcher
|
from .beatmap import BeatmapFetcher
|
||||||
from .beatmap_raw import BeatmapRawFetcher
|
from .beatmap_raw import BeatmapRawFetcher
|
||||||
from .beatmapset import BeatmapsetFetcher
|
from .beatmapset import BeatmapsetFetcher
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from __future__ import annotations
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from app.dependencies.database import get_redis
|
from app.dependencies.database import get_redis
|
||||||
from app.log import logger
|
from app.log import fetcher_logger
|
||||||
|
|
||||||
from httpx import AsyncClient
|
from httpx import AsyncClient, HTTPStatusError, TimeoutException
|
||||||
|
|
||||||
|
|
||||||
class TokenAuthError(Exception):
|
class TokenAuthError(Exception):
|
||||||
@@ -14,14 +14,69 @@ class TokenAuthError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PassiveRateLimiter:
|
||||||
|
"""
|
||||||
|
被动速率限制器
|
||||||
|
当收到 429 响应时,读取 Retry-After 头并暂停所有请求
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._retry_after_time: float | None = None
|
||||||
|
self._waiting_tasks: set[asyncio.Task] = set()
|
||||||
|
|
||||||
|
async def wait_if_limited(self) -> None:
|
||||||
|
"""如果正在限流中,等待限流解除"""
|
||||||
|
async with self._lock:
|
||||||
|
if self._retry_after_time is not None:
|
||||||
|
current_time = time.time()
|
||||||
|
if current_time < self._retry_after_time:
|
||||||
|
wait_seconds = self._retry_after_time - current_time
|
||||||
|
logger.warning(f"Rate limited, waiting {wait_seconds:.2f} seconds")
|
||||||
|
await asyncio.sleep(wait_seconds)
|
||||||
|
self._retry_after_time = None
|
||||||
|
|
||||||
|
async def handle_rate_limit(self, retry_after: str | int | None) -> None:
|
||||||
|
"""
|
||||||
|
处理 429 响应,设置限流时间
|
||||||
|
|
||||||
|
Args:
|
||||||
|
retry_after: Retry-After 头的值,可以是秒数或 HTTP 日期
|
||||||
|
"""
|
||||||
|
async with self._lock:
|
||||||
|
if retry_after is None:
|
||||||
|
# 如果没有 Retry-After 头,默认等待 60 秒
|
||||||
|
wait_seconds = 60
|
||||||
|
elif isinstance(retry_after, int):
|
||||||
|
wait_seconds = retry_after
|
||||||
|
elif retry_after.isdigit():
|
||||||
|
wait_seconds = int(retry_after)
|
||||||
|
else:
|
||||||
|
# 尝试解析 HTTP 日期格式
|
||||||
|
try:
|
||||||
|
retry_time = datetime.strptime(retry_after, "%a, %d %b %Y %H:%M:%S %Z")
|
||||||
|
wait_seconds = max(0, (retry_time - datetime.utcnow()).total_seconds())
|
||||||
|
except ValueError:
|
||||||
|
# 解析失败,默认等待 60 秒
|
||||||
|
wait_seconds = 60
|
||||||
|
|
||||||
|
self._retry_after_time = time.time() + wait_seconds
|
||||||
|
logger.warning(f"Rate limit triggered, will retry after {wait_seconds} seconds")
|
||||||
|
|
||||||
|
|
||||||
|
logger = fetcher_logger("Fetcher")
|
||||||
|
|
||||||
|
|
||||||
class BaseFetcher:
|
class BaseFetcher:
|
||||||
|
# 类级别的 rate limiter,所有实例共享
|
||||||
|
_rate_limiter = PassiveRateLimiter()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
client_id: str,
|
client_id: str,
|
||||||
client_secret: str,
|
client_secret: str,
|
||||||
scope: list[str] = ["public"],
|
scope: list[str] = ["public"],
|
||||||
callback_url: str = "",
|
callback_url: str = "",
|
||||||
max_retries: int = 3,
|
|
||||||
):
|
):
|
||||||
self.client_id = client_id
|
self.client_id = client_id
|
||||||
self.client_secret = client_secret
|
self.client_secret = client_secret
|
||||||
@@ -30,16 +85,16 @@ class BaseFetcher:
|
|||||||
self.token_expiry: int = 0
|
self.token_expiry: int = 0
|
||||||
self.callback_url: str = callback_url
|
self.callback_url: str = callback_url
|
||||||
self.scope = scope
|
self.scope = scope
|
||||||
self.max_retries = max_retries
|
self._token_lock = asyncio.Lock()
|
||||||
self._auth_retry_count = 0 # 授权重试计数器
|
|
||||||
|
|
||||||
@property
|
# NOTE: Reserve for user-based fetchers
|
||||||
def authorize_url(self) -> str:
|
# @property
|
||||||
return (
|
# def authorize_url(self) -> str:
|
||||||
f"https://osu.ppy.sh/oauth/authorize?client_id={self.client_id}"
|
# return (
|
||||||
f"&response_type=code&scope={' '.join(self.scope)}"
|
# f"https://osu.ppy.sh/oauth/authorize?client_id={self.client_id}"
|
||||||
f"&redirect_uri={self.callback_url}"
|
# f"&response_type=code&scope={quote(' '.join(self.scope))}"
|
||||||
)
|
# f"&redirect_uri={self.callback_url}"
|
||||||
|
# )
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def header(self) -> dict[str, str]:
|
def header(self) -> dict[str, str]:
|
||||||
@@ -50,216 +105,132 @@ class BaseFetcher:
|
|||||||
|
|
||||||
async def request_api(self, url: str, method: str = "GET", **kwargs) -> dict:
|
async def request_api(self, url: str, method: str = "GET", **kwargs) -> dict:
|
||||||
"""
|
"""
|
||||||
发送 API 请求,具有智能重试和自动重新授权机制
|
发送 API 请求,支持被动速率限制
|
||||||
"""
|
"""
|
||||||
return await self._request_with_retry(url, method, **kwargs)
|
await self.ensure_valid_access_token()
|
||||||
|
|
||||||
async def _request_with_retry(
|
headers = kwargs.pop("headers", {}).copy()
|
||||||
self, url: str, method: str = "GET", max_retries: int | None = None, **kwargs
|
attempt = 0
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
带重试机制的请求方法
|
|
||||||
"""
|
|
||||||
if max_retries is None:
|
|
||||||
max_retries = self.max_retries
|
|
||||||
|
|
||||||
last_error = None
|
while attempt < 2:
|
||||||
|
# 在发送请求前等待速率限制
|
||||||
|
await self._rate_limiter.wait_if_limited()
|
||||||
|
|
||||||
for attempt in range(max_retries + 1):
|
request_headers = {**headers, **self.header}
|
||||||
try:
|
request_kwargs = kwargs.copy()
|
||||||
# 检查 token 是否过期
|
|
||||||
if self.is_token_expired():
|
|
||||||
await self.refresh_access_token()
|
|
||||||
|
|
||||||
header = kwargs.pop("headers", {})
|
async with AsyncClient() as client:
|
||||||
header.update(self.header)
|
try:
|
||||||
|
|
||||||
async with AsyncClient() as client:
|
|
||||||
response = await client.request(
|
response = await client.request(
|
||||||
method,
|
method,
|
||||||
url,
|
url,
|
||||||
headers=header,
|
headers=request_headers,
|
||||||
**kwargs,
|
**request_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 处理 401 错误
|
|
||||||
if response.status_code == 401:
|
|
||||||
self._auth_retry_count += 1
|
|
||||||
logger.warning(
|
|
||||||
f"Received 401 error (attempt {attempt + 1}/{max_retries + 1}) "
|
|
||||||
f"for {url}, auth retry count: {self._auth_retry_count}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 如果达到最大重试次数,触发重新授权
|
|
||||||
if self._auth_retry_count >= self.max_retries:
|
|
||||||
await self._trigger_reauthorization()
|
|
||||||
raise TokenAuthError(
|
|
||||||
f"Authentication failed after {self._auth_retry_count} attempts. "
|
|
||||||
f"Please re-authorize using: {self.authorize_url}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 如果还有重试机会,刷新 token 后继续
|
|
||||||
if attempt < max_retries:
|
|
||||||
await self.refresh_access_token()
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# 最后一次重试也失败了
|
|
||||||
await self._trigger_reauthorization()
|
|
||||||
raise TokenAuthError(
|
|
||||||
f"Max retries ({max_retries}) exceeded for authentication. "
|
|
||||||
f"Please re-authorize using: {self.authorize_url}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 请求成功,重置重试计数器
|
|
||||||
self._auth_retry_count = 0
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
except TokenAuthError:
|
except HTTPStatusError as e:
|
||||||
# 重新抛出授权错误
|
# 处理 429 速率限制响应
|
||||||
raise
|
if e.response.status_code == 429:
|
||||||
except Exception as e:
|
retry_after = e.response.headers.get("Retry-After")
|
||||||
last_error = e
|
logger.warning(f"Rate limited for {url}, Retry-After: {retry_after}")
|
||||||
if attempt < max_retries:
|
await self._rate_limiter.handle_rate_limit(retry_after)
|
||||||
logger.warning(
|
# 速率限制后重试当前请求(不增加 attempt)
|
||||||
f"Request failed (attempt {attempt + 1}/{max_retries + 1}): {e}, retrying..."
|
continue
|
||||||
)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Request failed after {max_retries + 1} attempts: {e}"
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
# 如果所有重试都失败了
|
# 处理 401 未授权响应
|
||||||
if last_error:
|
if e.response.status_code == 401:
|
||||||
raise last_error
|
attempt += 1
|
||||||
else:
|
logger.warning(f"Received 401 error for {url}, attempt {attempt}")
|
||||||
raise Exception(f"Request to {url} failed after {max_retries + 1} attempts")
|
await self._handle_unauthorized()
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 其他 HTTP 错误直接抛出
|
||||||
|
raise
|
||||||
|
|
||||||
|
await self._clear_access_token()
|
||||||
|
logger.warning(f"Failed to authorize after retries for {url}, cleaned up tokens")
|
||||||
|
await self.grant_access_token()
|
||||||
|
raise TokenAuthError(f"Failed to authorize after retries for {url}")
|
||||||
|
|
||||||
def is_token_expired(self) -> bool:
|
def is_token_expired(self) -> bool:
|
||||||
return self.token_expiry <= int(time.time())
|
if not isinstance(self.token_expiry, int):
|
||||||
|
return True
|
||||||
|
return self.token_expiry <= int(time.time()) or not self.access_token
|
||||||
|
|
||||||
async def grant_access_token(self, code: str) -> None:
|
async def grant_access_token(self, retries: int = 3, backoff: float = 1.0) -> None:
|
||||||
async with AsyncClient() as client:
|
last_error: Exception | None = None
|
||||||
response = await client.post(
|
async with AsyncClient(timeout=30.0) as client:
|
||||||
"https://osu.ppy.sh/oauth/token",
|
for attempt in range(1, retries + 1):
|
||||||
data={
|
try:
|
||||||
"client_id": self.client_id,
|
response = await client.post(
|
||||||
"client_secret": self.client_secret,
|
"https://osu.ppy.sh/oauth/token",
|
||||||
"grant_type": "authorization_code",
|
data={
|
||||||
"redirect_uri": self.callback_url,
|
"client_id": self.client_id,
|
||||||
"code": code,
|
"client_secret": self.client_secret,
|
||||||
},
|
"grant_type": "client_credentials",
|
||||||
)
|
"scope": "public",
|
||||||
response.raise_for_status()
|
},
|
||||||
token_data = response.json()
|
)
|
||||||
self.access_token = token_data["access_token"]
|
response.raise_for_status()
|
||||||
self.refresh_token = token_data.get("refresh_token", "")
|
token_data = response.json()
|
||||||
self.token_expiry = int(time.time()) + token_data["expires_in"]
|
self.access_token = token_data["access_token"]
|
||||||
redis = get_redis()
|
self.token_expiry = int(time.time()) + token_data["expires_in"]
|
||||||
await redis.set(
|
redis = get_redis()
|
||||||
f"fetcher:access_token:{self.client_id}",
|
await redis.set(
|
||||||
self.access_token,
|
f"fetcher:access_token:{self.client_id}",
|
||||||
ex=token_data["expires_in"],
|
self.access_token,
|
||||||
)
|
ex=token_data["expires_in"],
|
||||||
await redis.set(
|
)
|
||||||
f"fetcher:refresh_token:{self.client_id}",
|
await redis.set(
|
||||||
self.refresh_token,
|
f"fetcher:expire_at:{self.client_id}",
|
||||||
)
|
self.token_expiry,
|
||||||
|
ex=token_data["expires_in"],
|
||||||
|
)
|
||||||
|
logger.success(
|
||||||
|
f"Granted new access token for client {self.client_id}, "
|
||||||
|
f"expires in {token_data['expires_in']} seconds"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
async def refresh_access_token(self) -> None:
|
except TimeoutException as exc:
|
||||||
try:
|
last_error = exc
|
||||||
logger.info(f"Refreshing access token for client {self.client_id}")
|
logger.warning(
|
||||||
async with AsyncClient() as client:
|
f"Timed out while requesting access token for "
|
||||||
response = await client.post(
|
f"client {self.client_id} (attempt {attempt}/{retries})"
|
||||||
"https://osu.ppy.sh/oauth/token",
|
)
|
||||||
data={
|
except HTTPStatusError as exc:
|
||||||
"client_id": self.client_id,
|
last_error = exc
|
||||||
"client_secret": self.client_secret,
|
logger.warning(
|
||||||
"grant_type": "refresh_token",
|
f"HTTP error while requesting access token for client {self.client_id}"
|
||||||
"refresh_token": self.refresh_token,
|
f" (status: {exc.response.status_code}, attempt {attempt}/{retries})"
|
||||||
},
|
)
|
||||||
)
|
except Exception as exc:
|
||||||
response.raise_for_status()
|
last_error = exc
|
||||||
token_data = response.json()
|
logger.exception(
|
||||||
self.access_token = token_data["access_token"]
|
f"Unexpected error while requesting access token for client {self.client_id}"
|
||||||
self.refresh_token = token_data.get("refresh_token", "")
|
f" (attempt {attempt}/{retries})"
|
||||||
self.token_expiry = int(time.time()) + token_data["expires_in"]
|
)
|
||||||
redis = get_redis()
|
|
||||||
await redis.set(
|
|
||||||
f"fetcher:access_token:{self.client_id}",
|
|
||||||
self.access_token,
|
|
||||||
ex=token_data["expires_in"],
|
|
||||||
)
|
|
||||||
await redis.set(
|
|
||||||
f"fetcher:refresh_token:{self.client_id}",
|
|
||||||
self.refresh_token,
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
f"Successfully refreshed access token for client {self.client_id}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to refresh access token for client {self.client_id}: {e}"
|
|
||||||
)
|
|
||||||
# 清除无效的 token,要求重新授权
|
|
||||||
self.access_token = ""
|
|
||||||
self.refresh_token = ""
|
|
||||||
self.token_expiry = 0
|
|
||||||
redis = get_redis()
|
|
||||||
await redis.delete(f"fetcher:access_token:{self.client_id}")
|
|
||||||
await redis.delete(f"fetcher:refresh_token:{self.client_id}")
|
|
||||||
logger.warning(
|
|
||||||
f"Cleared invalid tokens. Please re-authorize: {self.authorize_url}"
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def _trigger_reauthorization(self) -> None:
|
if attempt < retries:
|
||||||
"""
|
await asyncio.sleep(backoff * attempt)
|
||||||
触发重新授权流程
|
|
||||||
清除所有 token 并重置重试计数器
|
raise TokenAuthError("Failed to grant access token after retries") from last_error
|
||||||
"""
|
|
||||||
logger.error(
|
async def ensure_valid_access_token(self) -> None:
|
||||||
f"Authentication failed after {self._auth_retry_count} attempts. "
|
if self.is_token_expired():
|
||||||
f"Triggering reauthorization for client {self.client_id}"
|
await self.grant_access_token()
|
||||||
)
|
|
||||||
|
async def _handle_unauthorized(self) -> None:
|
||||||
|
await self.grant_access_token()
|
||||||
|
|
||||||
|
async def _clear_access_token(self) -> None:
|
||||||
|
logger.warning(f"Clearing access token for client {self.client_id}")
|
||||||
|
|
||||||
# 清除内存中的 token
|
|
||||||
self.access_token = ""
|
self.access_token = ""
|
||||||
self.refresh_token = ""
|
|
||||||
self.token_expiry = 0
|
self.token_expiry = 0
|
||||||
self._auth_retry_count = 0 # 重置重试计数器
|
|
||||||
|
|
||||||
# 清除 Redis 中的 token
|
|
||||||
redis = get_redis()
|
redis = get_redis()
|
||||||
await redis.delete(f"fetcher:access_token:{self.client_id}")
|
await redis.delete(f"fetcher:access_token:{self.client_id}")
|
||||||
await redis.delete(f"fetcher:refresh_token:{self.client_id}")
|
await redis.delete(f"fetcher:expire_at:{self.client_id}")
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
f"All tokens cleared for client {self.client_id}. "
|
|
||||||
f"Please re-authorize using: {self.authorize_url}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def reset_auth_retry_count(self) -> None:
|
|
||||||
"""
|
|
||||||
重置授权重试计数器
|
|
||||||
可以在手动重新授权后调用
|
|
||||||
"""
|
|
||||||
self._auth_retry_count = 0
|
|
||||||
logger.info(f"Auth retry count reset for client {self.client_id}")
|
|
||||||
|
|
||||||
def get_auth_status(self) -> dict:
|
|
||||||
"""
|
|
||||||
获取当前授权状态信息
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"client_id": self.client_id,
|
|
||||||
"has_access_token": bool(self.access_token),
|
|
||||||
"has_refresh_token": bool(self.refresh_token),
|
|
||||||
"token_expired": self.is_token_expired(),
|
|
||||||
"auth_retry_count": self._auth_retry_count,
|
|
||||||
"max_retries": self.max_retries,
|
|
||||||
"authorize_url": self.authorize_url,
|
|
||||||
"needs_reauth": self._auth_retry_count >= self.max_retries,
|
|
||||||
}
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user