Merge branch 'main' into feat/multiplayer-api
This commit is contained in:
422
.gitignore
vendored
422
.gitignore
vendored
@@ -1,209 +1,213 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
#uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
#poetry.toml
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
#pdm.lock
|
||||
#pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
#pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
# .vscode/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Cursor
|
||||
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
||||
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
||||
# refer to https://docs.cursor.com/context/ignore-files
|
||||
.cursorignore
|
||||
.cursorindexingignore
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
bancho.py-master/*
|
||||
.vscode/settings.json
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
#uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
#poetry.toml
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
#pdm.lock
|
||||
#pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
#pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
# .vscode/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Cursor
|
||||
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
||||
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
||||
# refer to https://docs.cursor.com/context/ignore-files
|
||||
.cursorignore
|
||||
.cursorindexingignore
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
bancho.py-master/*
|
||||
.vscode/settings.json
|
||||
|
||||
# runtime file
|
||||
replays/
|
||||
osu-master/*
|
||||
@@ -4,8 +4,8 @@
|
||||
|
||||
## 文件说明
|
||||
|
||||
1. **`migrations/add_missing_fields.sql`** - 创建 Lazer API 专用表结构
|
||||
2. **`migrations/sync_legacy_data.sql`** - 数据同步脚本
|
||||
1. **`migrations_old/add_missing_fields.sql`** - 创建 Lazer API 专用表结构
|
||||
2. **`migrations_old/sync_legacy_data.sql`** - 数据同步脚本
|
||||
3. **`sync_data.py`** - 交互式数据同步工具
|
||||
4. **`quick_sync.py`** - 快速同步脚本(使用项目配置)
|
||||
|
||||
@@ -43,10 +43,10 @@ python sync_data.py
|
||||
|
||||
```bash
|
||||
# 1. 创建表结构
|
||||
mysql -u username -p database_name < migrations/add_missing_fields.sql
|
||||
mysql -u username -p database_name < migrations_old/add_missing_fields.sql
|
||||
|
||||
# 2. 同步数据
|
||||
mysql -u username -p database_name < migrations/sync_legacy_data.sql
|
||||
mysql -u username -p database_name < migrations_old/sync_legacy_data.sql
|
||||
```
|
||||
|
||||
## 同步内容
|
||||
|
||||
42
MIGRATE_GUIDE.md
Normal file
42
MIGRATE_GUIDE.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# 数据库迁移指南
|
||||
|
||||
## 连接
|
||||
|
||||
使用默认的环境变量(`DATABASE_URL`)连接,如果不存在会从 `alembic.ini` 里读取 `sqlalchemy.url`。
|
||||
|
||||
## 创建迁移
|
||||
|
||||
修改数据库模型定义后,使用以下命令创建新的迁移脚本:
|
||||
|
||||
```bash
|
||||
alembic revision --autogenerate -m "描述你的迁移"
|
||||
```
|
||||
|
||||
请注意,以下修改操作无法生成自动迁移,请手动修改生成的迁移文件
|
||||
|
||||
- 修改表名
|
||||
- 修改列名
|
||||
- 匿名命名的约束
|
||||
|
||||
## 升级/回滚迁移
|
||||
|
||||
要应用所有未应用的迁移脚本,请运行:
|
||||
|
||||
```bash
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
要升级/回滚版本,可以使用以下命令:
|
||||
|
||||
```bash
|
||||
# 回滚一个版本
|
||||
alembic downgrade -1
|
||||
# 升级两个版本
|
||||
alembic upgrade +2
|
||||
# 回滚到最初版本
|
||||
alembic downgrade base
|
||||
# 升级到特定版本
|
||||
alembic upgrade <revision>
|
||||
```
|
||||
|
||||
详情参考:[alembic 文档](https://alembic.sqlalchemy.org/en/latest/tutorial.html).
|
||||
@@ -205,6 +205,10 @@ curl -X POST http://localhost:8000/oauth/token \
|
||||
- 添加管理面板
|
||||
- 实现数据导入/导出功能
|
||||
|
||||
### 迁移数据库
|
||||
|
||||
参考[数据库迁移指南](./MIGRATE_GUIDE.md)
|
||||
|
||||
## 许可证
|
||||
|
||||
MIT License
|
||||
|
||||
147
alembic.ini
Normal file
147
alembic.ini
Normal file
@@ -0,0 +1,147 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
sqlalchemy.url = mysql+aiomysql://root:password@127.0.0.1:3306/osu_api
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
hooks = ruff
|
||||
ruff.type = exec
|
||||
ruff.executable = ruff
|
||||
ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -10,6 +10,7 @@ from app.database import (
|
||||
OAuthToken,
|
||||
User as DBUser,
|
||||
)
|
||||
from app.log import logger
|
||||
|
||||
import bcrypt
|
||||
from jose import JWTError, jwt
|
||||
@@ -47,8 +48,8 @@ def verify_password_legacy(plain_password: str, bcrypt_hash: str) -> bool:
|
||||
bcrypt_cache[bcrypt_hash] = pw_md5
|
||||
|
||||
return is_valid
|
||||
except Exception as e:
|
||||
print(f"Password verification error: {e}")
|
||||
except Exception:
|
||||
logger.exception("Password verification error")
|
||||
return False
|
||||
|
||||
|
||||
@@ -104,8 +105,8 @@ async def authenticate_user_legacy(
|
||||
# 缓存验证结果
|
||||
bcrypt_cache[user.pw_bcrypt] = pw_md5.encode()
|
||||
return user
|
||||
except Exception as e:
|
||||
print(f"Authentication error for user {name}: {e}")
|
||||
except Exception:
|
||||
logger.exception(f"Authentication error for user {name}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ class Settings:
|
||||
|
||||
# SignalR 设置
|
||||
SIGNALR_NEGOTIATE_TIMEOUT: int = int(os.getenv("SIGNALR_NEGOTIATE_TIMEOUT", "30"))
|
||||
SIGNALR_PING_INTERVAL: int = int(os.getenv("SIGNALR_PING_INTERVAL", "120"))
|
||||
SIGNALR_PING_INTERVAL: int = int(os.getenv("SIGNALR_PING_INTERVAL", "15"))
|
||||
|
||||
# Fetcher 设置
|
||||
FETCHER_CLIENT_ID: str = os.getenv("FETCHER_CLIENT_ID", "")
|
||||
@@ -44,5 +44,8 @@ class Settings:
|
||||
"FETCHER_CALLBACK_URL", "http://localhost:8000/fetcher/callback"
|
||||
)
|
||||
|
||||
# 日志设置
|
||||
LOG_LEVEL: str = os.getenv("LOG_LEVEL", "INFO").upper()
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
||||
@@ -9,6 +9,13 @@ from .beatmapset import (
|
||||
)
|
||||
from .legacy import LegacyOAuthToken, LegacyUserStatistics
|
||||
from .relationship import Relationship, RelationshipResp, RelationshipType
|
||||
from .score import (
|
||||
Score,
|
||||
ScoreBase,
|
||||
ScoreResp,
|
||||
ScoreStatistics,
|
||||
)
|
||||
from .score_token import ScoreToken, ScoreTokenResp
|
||||
from .team import Team, TeamMember
|
||||
from .user import (
|
||||
DailyChallengeStats,
|
||||
@@ -57,6 +64,12 @@ __all__ = [
|
||||
"Relationship",
|
||||
"RelationshipResp",
|
||||
"RelationshipType",
|
||||
"Score",
|
||||
"ScoreBase",
|
||||
"ScoreResp",
|
||||
"ScoreStatistics",
|
||||
"ScoreToken",
|
||||
"ScoreTokenResp",
|
||||
"Team",
|
||||
"TeamMember",
|
||||
"User",
|
||||
|
||||
@@ -2,7 +2,7 @@ from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import Column, DateTime
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .user import User
|
||||
@@ -12,7 +12,9 @@ class OAuthToken(SQLModel, table=True):
|
||||
__tablename__ = "oauth_tokens" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(
|
||||
sa_column=Column(BigInteger, ForeignKey("users.id"), index=True)
|
||||
)
|
||||
access_token: str = Field(max_length=500, unique=True)
|
||||
refresh_token: str = Field(max_length=500, unique=True)
|
||||
token_type: str = Field(default="Bearer", max_length=20)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from app.fetcher import Fetcher
|
||||
from app.models.beatmap import BeatmapRankStatus
|
||||
from app.models.score import MODE_TO_INT, GameMode
|
||||
|
||||
@@ -11,6 +11,9 @@ from sqlalchemy.orm import joinedload
|
||||
from sqlmodel import VARCHAR, Field, Relationship, SQLModel, select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.fetcher import Fetcher
|
||||
|
||||
|
||||
class BeatmapOwner(SQLModel):
|
||||
id: int
|
||||
@@ -65,6 +68,10 @@ class Beatmap(BeatmapBase, table=True):
|
||||
# optional
|
||||
beatmapset: Beatmapset = Relationship(back_populates="beatmaps")
|
||||
|
||||
@property
|
||||
def can_ranked(self) -> bool:
|
||||
return self.beatmap_status > BeatmapRankStatus.PENDING
|
||||
|
||||
@classmethod
|
||||
async def from_resp(cls, session: AsyncSession, resp: "BeatmapResp") -> "Beatmap":
|
||||
d = resp.model_dump()
|
||||
@@ -79,7 +86,16 @@ class Beatmap(BeatmapBase, table=True):
|
||||
)
|
||||
session.add(beatmap)
|
||||
await session.commit()
|
||||
await session.refresh(beatmap)
|
||||
beatmap = (
|
||||
await session.exec(
|
||||
select(Beatmap)
|
||||
.options(
|
||||
joinedload(Beatmap.beatmapset).selectinload(Beatmapset.beatmaps) # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
.where(Beatmap.id == resp.id)
|
||||
)
|
||||
).first()
|
||||
assert beatmap is not None, "Beatmap should not be None after commit"
|
||||
return beatmap
|
||||
|
||||
@classmethod
|
||||
@@ -107,19 +123,25 @@ class Beatmap(BeatmapBase, table=True):
|
||||
|
||||
@classmethod
|
||||
async def get_or_fetch(
|
||||
cls, session: AsyncSession, bid: int, fetcher: Fetcher
|
||||
cls,
|
||||
session: AsyncSession,
|
||||
fetcher: "Fetcher",
|
||||
bid: int | None = None,
|
||||
md5: str | None = None,
|
||||
) -> "Beatmap":
|
||||
beatmap = (
|
||||
await session.exec(
|
||||
select(Beatmap)
|
||||
.where(Beatmap.id == bid)
|
||||
.where(
|
||||
Beatmap.id == bid if bid is not None else Beatmap.checksum == md5
|
||||
)
|
||||
.options(
|
||||
joinedload(Beatmap.beatmapset).selectinload(Beatmapset.beatmaps) # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not beatmap:
|
||||
resp = await fetcher.get_beatmap(bid)
|
||||
resp = await fetcher.get_beatmap(bid, md5)
|
||||
r = await session.exec(
|
||||
select(Beatmapset.id).where(Beatmapset.id == resp.beatmapset_id)
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ from datetime import datetime
|
||||
from typing import TYPE_CHECKING, TypedDict, cast
|
||||
|
||||
from app.models.beatmap import BeatmapRankStatus, Genre, Language
|
||||
from app.models.score import GameMode
|
||||
|
||||
from pydantic import BaseModel, model_serializer
|
||||
from sqlalchemy import DECIMAL, JSON, Column, DateTime, Text
|
||||
@@ -68,7 +69,7 @@ class BeatmapNomination(TypedDict):
|
||||
beatmapset_id: int
|
||||
reset: bool
|
||||
user_id: int
|
||||
rulesets: list[str] | None
|
||||
rulesets: list[GameMode] | None
|
||||
|
||||
|
||||
class BeatmapDescription(SQLModel):
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import JSON, Column, DateTime
|
||||
from sqlalchemy.orm import Mapped
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .user import User
|
||||
@@ -16,7 +16,7 @@ class LegacyUserStatistics(SQLModel, table=True):
|
||||
__tablename__ = "user_statistics" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
mode: str = Field(max_length=10) # osu, taiko, fruits, mania
|
||||
|
||||
# 基本统计
|
||||
@@ -77,7 +77,7 @@ class LegacyOAuthToken(SQLModel, table=True):
|
||||
__tablename__ = "legacy_oauth_tokens" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
access_token: str = Field(max_length=255, index=True)
|
||||
refresh_token: str = Field(max_length=255, index=True)
|
||||
expires_at: datetime = Field(sa_column=Column(DateTime))
|
||||
|
||||
@@ -4,7 +4,10 @@ from .user import User
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlmodel import (
|
||||
BigInteger,
|
||||
Column,
|
||||
Field,
|
||||
ForeignKey,
|
||||
Relationship as SQLRelationship,
|
||||
SQLModel,
|
||||
select,
|
||||
@@ -20,10 +23,22 @@ class RelationshipType(str, Enum):
|
||||
class Relationship(SQLModel, table=True):
|
||||
__tablename__ = "relationship" # pyright: ignore[reportAssignmentType]
|
||||
user_id: int = Field(
|
||||
default=None, foreign_key="users.id", primary_key=True, index=True
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
index=True,
|
||||
),
|
||||
)
|
||||
target_id: int = Field(
|
||||
default=None, foreign_key="users.id", primary_key=True, index=True
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
index=True,
|
||||
),
|
||||
)
|
||||
type: RelationshipType = Field(default=RelationshipType.FOLLOW, nullable=False)
|
||||
target: "User" = SQLRelationship(
|
||||
|
||||
@@ -2,15 +2,36 @@ from datetime import datetime
|
||||
import math
|
||||
|
||||
from app.database.user import User
|
||||
from app.models.beatmap import BeatmapRankStatus
|
||||
from app.models.mods import APIMod
|
||||
from app.models.score import MODE_TO_INT, GameMode, Rank
|
||||
from app.models.score import (
|
||||
MODE_TO_INT,
|
||||
GameMode,
|
||||
HitResult,
|
||||
LeaderboardType,
|
||||
Rank,
|
||||
ScoreStatistics,
|
||||
)
|
||||
|
||||
from .beatmap import Beatmap, BeatmapResp
|
||||
from .beatmapset import BeatmapsetResp
|
||||
from .beatmapset import Beatmapset, BeatmapsetResp
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import Column, DateTime
|
||||
from sqlmodel import JSON, BigInteger, Field, Relationship, SQLModel
|
||||
from sqlalchemy import Column, ColumnExpressionArgument, DateTime
|
||||
from sqlalchemy.orm import aliased, joinedload
|
||||
from sqlmodel import (
|
||||
JSON,
|
||||
BigInteger,
|
||||
Field,
|
||||
ForeignKey,
|
||||
Relationship,
|
||||
SQLModel,
|
||||
col,
|
||||
false,
|
||||
func,
|
||||
select,
|
||||
)
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
from sqlmodel.sql._expression_select_cls import SelectOfScalar
|
||||
|
||||
|
||||
class ScoreBase(SQLModel):
|
||||
@@ -34,6 +55,9 @@ class ScoreBase(SQLModel):
|
||||
room_id: int | None = Field(default=None) # multiplayer
|
||||
started_at: datetime = Field(sa_column=Column(DateTime))
|
||||
total_score: int = Field(default=0, sa_column=Column(BigInteger))
|
||||
total_score_without_mods: int = Field(
|
||||
default=0, sa_column=Column(BigInteger), exclude=True
|
||||
)
|
||||
type: str
|
||||
|
||||
# optional
|
||||
@@ -41,22 +65,20 @@ class ScoreBase(SQLModel):
|
||||
position: int | None = Field(default=None) # multiplayer
|
||||
|
||||
|
||||
class ScoreStatistics(BaseModel):
|
||||
count_miss: int
|
||||
count_50: int
|
||||
count_100: int
|
||||
count_300: int
|
||||
count_geki: int
|
||||
count_katu: int
|
||||
count_large_tick_miss: int | None = None
|
||||
count_slider_tail_hit: int | None = None
|
||||
|
||||
|
||||
class Score(ScoreBase, table=True):
|
||||
__tablename__ = "scores" # pyright: ignore[reportAssignmentType]
|
||||
id: int = Field(primary_key=True)
|
||||
id: int | None = Field(
|
||||
default=None, sa_column=Column(BigInteger, autoincrement=True, primary_key=True)
|
||||
)
|
||||
beatmap_id: int = Field(index=True, foreign_key="beatmaps.id")
|
||||
user_id: int = Field(foreign_key="users.id", index=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
index=True,
|
||||
),
|
||||
)
|
||||
# ScoreStatistics
|
||||
n300: int = Field(exclude=True)
|
||||
n100: int = Field(exclude=True)
|
||||
@@ -72,9 +94,51 @@ class Score(ScoreBase, table=True):
|
||||
beatmap: "Beatmap" = Relationship()
|
||||
user: "User" = Relationship()
|
||||
|
||||
@property
|
||||
def is_perfect_combo(self) -> bool:
|
||||
return self.max_combo == self.beatmap.max_combo
|
||||
|
||||
@staticmethod
|
||||
def select_clause() -> SelectOfScalar["Score"]:
|
||||
return select(Score).options(
|
||||
joinedload(Score.beatmap) # pyright: ignore[reportArgumentType]
|
||||
.joinedload(Beatmap.beatmapset) # pyright: ignore[reportArgumentType]
|
||||
.selectinload(
|
||||
Beatmapset.beatmaps # pyright: ignore[reportArgumentType]
|
||||
),
|
||||
joinedload(Score.user).joinedload(User.lazer_profile), # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def select_clause_unique(
|
||||
*where_clauses: ColumnExpressionArgument[bool] | bool,
|
||||
) -> SelectOfScalar["Score"]:
|
||||
rownum = (
|
||||
func.row_number()
|
||||
.over(
|
||||
partition_by=col(Score.user_id), order_by=col(Score.total_score).desc()
|
||||
)
|
||||
.label("rn")
|
||||
)
|
||||
subq = select(Score, rownum).where(*where_clauses).subquery()
|
||||
best = aliased(Score, subq, adapt_on_names=True)
|
||||
return (
|
||||
select(best)
|
||||
.where(subq.c.rn == 1)
|
||||
.options(
|
||||
joinedload(best.beatmap) # pyright: ignore[reportArgumentType]
|
||||
.joinedload(Beatmap.beatmapset) # pyright: ignore[reportArgumentType]
|
||||
.selectinload(
|
||||
Beatmapset.beatmaps # pyright: ignore[reportArgumentType]
|
||||
),
|
||||
joinedload(best.user).joinedload(User.lazer_profile), # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ScoreResp(ScoreBase):
|
||||
id: int
|
||||
user_id: int
|
||||
is_perfect_combo: bool = False
|
||||
legacy_perfect: bool = False
|
||||
legacy_total_score: int = 0 # FIXME
|
||||
@@ -85,10 +149,13 @@ class ScoreResp(ScoreBase):
|
||||
beatmapset: BeatmapsetResp | None = None
|
||||
# FIXME: user: APIUser | None = None
|
||||
statistics: ScoreStatistics | None = None
|
||||
rank_global: int | None = None
|
||||
rank_country: int | None = None
|
||||
|
||||
@classmethod
|
||||
def from_db(cls, score: Score) -> "ScoreResp":
|
||||
async def from_db(cls, session: AsyncSession, score: Score) -> "ScoreResp":
|
||||
s = cls.model_validate(score.model_dump())
|
||||
assert score.id
|
||||
s.beatmap = BeatmapResp.from_db(score.beatmap)
|
||||
s.beatmapset = BeatmapsetResp.from_db(score.beatmap.beatmapset)
|
||||
s.is_perfect_combo = s.max_combo == s.beatmap.max_combo
|
||||
@@ -97,14 +164,220 @@ class ScoreResp(ScoreBase):
|
||||
if score.best_id:
|
||||
# https://osu.ppy.sh/wiki/Performance_points/Weighting_system
|
||||
s.weight = math.pow(0.95, score.best_id)
|
||||
s.statistics = ScoreStatistics(
|
||||
count_miss=score.nmiss,
|
||||
count_50=score.n50,
|
||||
count_100=score.n100,
|
||||
count_300=score.n300,
|
||||
count_geki=score.ngeki,
|
||||
count_katu=score.nkatu,
|
||||
count_large_tick_miss=score.nlarge_tick_miss,
|
||||
count_slider_tail_hit=score.nslider_tail_hit,
|
||||
s.statistics = {
|
||||
HitResult.MISS: score.nmiss,
|
||||
HitResult.MEH: score.n50,
|
||||
HitResult.OK: score.n100,
|
||||
HitResult.GREAT: score.n300,
|
||||
HitResult.PERFECT: score.ngeki,
|
||||
HitResult.GOOD: score.nkatu,
|
||||
}
|
||||
if score.nlarge_tick_miss is not None:
|
||||
s.statistics[HitResult.LARGE_TICK_MISS] = score.nlarge_tick_miss
|
||||
if score.nslider_tail_hit is not None:
|
||||
s.statistics[HitResult.SLIDER_TAIL_HIT] = score.nslider_tail_hit
|
||||
# s.user = await convert_db_user_to_api_user(score.user)
|
||||
s.rank_global = (
|
||||
await get_score_position_by_id(
|
||||
session,
|
||||
score.map_md5,
|
||||
score.id,
|
||||
mode=score.gamemode,
|
||||
user=score.user,
|
||||
)
|
||||
or None
|
||||
)
|
||||
s.rank_country = (
|
||||
await get_score_position_by_id(
|
||||
session,
|
||||
score.map_md5,
|
||||
score.id,
|
||||
score.gamemode,
|
||||
score.user,
|
||||
)
|
||||
or None
|
||||
)
|
||||
return s
|
||||
|
||||
|
||||
async def get_leaderboard(
|
||||
session: AsyncSession,
|
||||
beatmap_md5: str,
|
||||
mode: GameMode,
|
||||
type: LeaderboardType = LeaderboardType.GLOBAL,
|
||||
mods: list[APIMod] | None = None,
|
||||
user: User | None = None,
|
||||
limit: int = 50,
|
||||
) -> list[Score]:
|
||||
scores = []
|
||||
if type == LeaderboardType.GLOBAL:
|
||||
query = (
|
||||
select(Score)
|
||||
.where(
|
||||
col(Beatmap.beatmap_status).in_(
|
||||
[
|
||||
BeatmapRankStatus.RANKED,
|
||||
BeatmapRankStatus.LOVED,
|
||||
BeatmapRankStatus.QUALIFIED,
|
||||
BeatmapRankStatus.APPROVED,
|
||||
]
|
||||
),
|
||||
Score.map_md5 == beatmap_md5,
|
||||
Score.gamemode == mode,
|
||||
col(Score.passed).is_(True),
|
||||
Score.mods == mods if user and user.is_supporter else false(),
|
||||
)
|
||||
.limit(limit)
|
||||
.order_by(
|
||||
col(Score.total_score).desc(),
|
||||
)
|
||||
)
|
||||
result = await session.exec(query)
|
||||
scores = list[Score](result.all())
|
||||
elif type == LeaderboardType.FRIENDS and user and user.is_supporter:
|
||||
# TODO
|
||||
...
|
||||
elif type == LeaderboardType.TEAM and user and user.team_membership:
|
||||
team_id = user.team_membership.team_id
|
||||
query = (
|
||||
select(Score)
|
||||
.join(Beatmap)
|
||||
.options(joinedload(Score.user)) # pyright: ignore[reportArgumentType]
|
||||
.where(
|
||||
Score.map_md5 == beatmap_md5,
|
||||
Score.gamemode == mode,
|
||||
col(Score.passed).is_(True),
|
||||
col(Score.user.team_membership).is_not(None),
|
||||
Score.user.team_membership.team_id == team_id, # pyright: ignore[reportOptionalMemberAccess]
|
||||
Score.mods == mods if user and user.is_supporter else false(),
|
||||
)
|
||||
.limit(limit)
|
||||
.order_by(
|
||||
col(Score.total_score).desc(),
|
||||
)
|
||||
)
|
||||
result = await session.exec(query)
|
||||
scores = list[Score](result.all())
|
||||
if user:
|
||||
user_score = (
|
||||
await session.exec(
|
||||
select(Score).where(
|
||||
Score.map_md5 == beatmap_md5,
|
||||
Score.gamemode == mode,
|
||||
Score.user_id == user.id,
|
||||
col(Score.passed).is_(True),
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if user_score and user_score not in scores:
|
||||
scores.append(user_score)
|
||||
return scores
|
||||
|
||||
|
||||
async def get_score_position_by_user(
|
||||
session: AsyncSession,
|
||||
beatmap_md5: str,
|
||||
user: User,
|
||||
mode: GameMode,
|
||||
type: LeaderboardType = LeaderboardType.GLOBAL,
|
||||
mods: list[APIMod] | None = None,
|
||||
) -> int:
|
||||
where_clause = [
|
||||
Score.map_md5 == beatmap_md5,
|
||||
Score.gamemode == mode,
|
||||
col(Score.passed).is_(True),
|
||||
col(Beatmap.beatmap_status).in_(
|
||||
[
|
||||
BeatmapRankStatus.RANKED,
|
||||
BeatmapRankStatus.LOVED,
|
||||
BeatmapRankStatus.QUALIFIED,
|
||||
BeatmapRankStatus.APPROVED,
|
||||
]
|
||||
),
|
||||
]
|
||||
if mods and user.is_supporter:
|
||||
where_clause.append(Score.mods == mods)
|
||||
else:
|
||||
where_clause.append(false())
|
||||
if type == LeaderboardType.FRIENDS and user.is_supporter:
|
||||
# TODO
|
||||
...
|
||||
elif type == LeaderboardType.TEAM and user.team_membership:
|
||||
team_id = user.team_membership.team_id
|
||||
where_clause.append(
|
||||
col(Score.user.team_membership).is_not(None),
|
||||
)
|
||||
where_clause.append(
|
||||
Score.user.team_membership.team_id == team_id, # pyright: ignore[reportOptionalMemberAccess]
|
||||
)
|
||||
rownum = (
|
||||
func.row_number()
|
||||
.over(
|
||||
partition_by=Score.map_md5,
|
||||
order_by=col(Score.total_score).desc(),
|
||||
)
|
||||
.label("row_number")
|
||||
)
|
||||
subq = select(Score, rownum).join(Beatmap).where(*where_clause).subquery()
|
||||
stmt = select(subq.c.row_number).where(subq.c.user == user)
|
||||
result = await session.exec(stmt)
|
||||
s = result.one_or_none()
|
||||
return s if s else 0
|
||||
|
||||
|
||||
async def get_score_position_by_id(
|
||||
session: AsyncSession,
|
||||
beatmap_md5: str,
|
||||
score_id: int,
|
||||
mode: GameMode,
|
||||
user: User | None = None,
|
||||
type: LeaderboardType = LeaderboardType.GLOBAL,
|
||||
mods: list[APIMod] | None = None,
|
||||
) -> int:
|
||||
where_clause = [
|
||||
Score.map_md5 == beatmap_md5,
|
||||
Score.id == score_id,
|
||||
Score.gamemode == mode,
|
||||
col(Score.passed).is_(True),
|
||||
col(Beatmap.beatmap_status).in_(
|
||||
[
|
||||
BeatmapRankStatus.RANKED,
|
||||
BeatmapRankStatus.LOVED,
|
||||
BeatmapRankStatus.QUALIFIED,
|
||||
BeatmapRankStatus.APPROVED,
|
||||
]
|
||||
),
|
||||
]
|
||||
if mods and user and user.is_supporter:
|
||||
where_clause.append(Score.mods == mods)
|
||||
elif mods:
|
||||
where_clause.append(false())
|
||||
rownum = (
|
||||
func.row_number()
|
||||
.over(
|
||||
partition_by=[col(Score.user_id), col(Score.map_md5)],
|
||||
order_by=col(Score.total_score).desc(),
|
||||
)
|
||||
.label("rownum")
|
||||
)
|
||||
subq = (
|
||||
select(Score.user_id, Score.id, Score.total_score, rownum)
|
||||
.join(Beatmap)
|
||||
.where(*where_clause)
|
||||
.subquery()
|
||||
)
|
||||
best_scores = aliased(subq)
|
||||
overall_rank = (
|
||||
func.rank().over(order_by=best_scores.c.total_score.desc()).label("global_rank")
|
||||
)
|
||||
final_q = (
|
||||
select(best_scores.c.id, overall_rank)
|
||||
.select_from(best_scores)
|
||||
.where(best_scores.c.rownum == 1)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
stmt = select(final_q.c.global_rank).where(final_q.c.id == score_id)
|
||||
result = await session.exec(stmt)
|
||||
s = result.one_or_none()
|
||||
return s if s else 0
|
||||
|
||||
50
app/database/score_token.py
Normal file
50
app/database/score_token.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
|
||||
from app.models.score import GameMode
|
||||
|
||||
from .beatmap import Beatmap
|
||||
from .user import User
|
||||
|
||||
from sqlalchemy import Column, DateTime, Index
|
||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
||||
|
||||
|
||||
class ScoreTokenBase(SQLModel):
|
||||
score_id: int | None = Field(sa_column=Column(BigInteger), default=None)
|
||||
ruleset_id: GameMode
|
||||
playlist_item_id: int | None = Field(default=None) # playlist
|
||||
created_at: datetime = Field(
|
||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
||||
)
|
||||
|
||||
|
||||
class ScoreToken(ScoreTokenBase, table=True):
|
||||
__tablename__ = "score_tokens" # pyright: ignore[reportAssignmentType]
|
||||
__table_args__ = (Index("idx_user_playlist", "user_id", "playlist_item_id"),)
|
||||
|
||||
id: int | None = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
primary_key=True,
|
||||
index=True,
|
||||
autoincrement=True,
|
||||
),
|
||||
)
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
beatmap_id: int = Field(foreign_key="beatmaps.id")
|
||||
user: "User" = Relationship()
|
||||
beatmap: "Beatmap" = Relationship()
|
||||
|
||||
|
||||
class ScoreTokenResp(ScoreTokenBase):
|
||||
id: int
|
||||
user_id: int
|
||||
beatmap_id: int
|
||||
|
||||
@classmethod
|
||||
def from_db(cls, obj: ScoreToken) -> "ScoreTokenResp":
|
||||
return cls.model_validate(obj)
|
||||
@@ -2,8 +2,7 @@ from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import Column, DateTime
|
||||
from sqlalchemy.orm import Mapped
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .user import User
|
||||
@@ -20,18 +19,18 @@ class Team(SQLModel, table=True):
|
||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
||||
)
|
||||
|
||||
members: Mapped[list["TeamMember"]] = Relationship(back_populates="team")
|
||||
members: list["TeamMember"] = Relationship(back_populates="team")
|
||||
|
||||
|
||||
class TeamMember(SQLModel, table=True):
|
||||
__tablename__ = "team_members" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
team_id: int = Field(foreign_key="teams.id")
|
||||
joined_at: datetime = Field(
|
||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
||||
)
|
||||
|
||||
user: Mapped["User"] = Relationship(back_populates="team_membership")
|
||||
team: Mapped["Team"] = Relationship(back_populates="members")
|
||||
user: "User" = Relationship(back_populates="team_membership")
|
||||
team: "Team" = Relationship(back_populates="members")
|
||||
|
||||
@@ -7,16 +7,19 @@ from .team import TeamMember
|
||||
|
||||
from sqlalchemy import DECIMAL, JSON, Column, Date, DateTime, Text
|
||||
from sqlalchemy.dialects.mysql import VARCHAR
|
||||
from sqlmodel import BigInteger, Field, Relationship, SQLModel
|
||||
from sqlalchemy.orm import joinedload, selectinload
|
||||
from sqlmodel import BigInteger, Field, ForeignKey, Relationship, SQLModel, select
|
||||
|
||||
|
||||
class User(SQLModel, table=True):
|
||||
__tablename__ = "users" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
# 主键
|
||||
id: int = Field(default=None, primary_key=True, index=True, nullable=False)
|
||||
id: int = Field(
|
||||
default=None, sa_column=Column(BigInteger, primary_key=True, index=True)
|
||||
)
|
||||
|
||||
# 基本信息(匹配 migrations 中的结构)
|
||||
# 基本信息(匹配 migrations_old 中的结构)
|
||||
name: str = Field(max_length=32, unique=True, index=True) # 用户名
|
||||
safe_name: str = Field(max_length=32, unique=True, index=True) # 安全用户名
|
||||
email: str = Field(max_length=254, unique=True, index=True)
|
||||
@@ -65,6 +68,10 @@ class User(SQLModel, table=True):
|
||||
latest_activity = getattr(self, "latest_activity", 0)
|
||||
return datetime.fromtimestamp(latest_activity) if latest_activity > 0 else None
|
||||
|
||||
@property
|
||||
def is_supporter(self):
|
||||
return self.lazer_profile.is_supporter if self.lazer_profile else False
|
||||
|
||||
# 关联关系
|
||||
lazer_profile: Optional["LazerUserProfile"] = Relationship(back_populates="user")
|
||||
lazer_statistics: list["LazerUserStatistics"] = Relationship(back_populates="user")
|
||||
@@ -76,7 +83,7 @@ class User(SQLModel, table=True):
|
||||
back_populates="user"
|
||||
)
|
||||
statistics: list["LegacyUserStatistics"] = Relationship(back_populates="user")
|
||||
team_membership: list["TeamMember"] = Relationship(back_populates="user")
|
||||
team_membership: Optional["TeamMember"] = Relationship(back_populates="user")
|
||||
daily_challenge_stats: Optional["DailyChallengeStats"] = Relationship(
|
||||
back_populates="user"
|
||||
)
|
||||
@@ -94,6 +101,26 @@ class User(SQLModel, table=True):
|
||||
back_populates="user"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def all_select_clause(cls):
|
||||
return select(cls).options(
|
||||
joinedload(cls.lazer_profile), # pyright: ignore[reportArgumentType]
|
||||
joinedload(cls.lazer_counts), # pyright: ignore[reportArgumentType]
|
||||
joinedload(cls.daily_challenge_stats), # pyright: ignore[reportArgumentType]
|
||||
joinedload(cls.avatar), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_statistics), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_achievements), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_profile_sections), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.statistics), # pyright: ignore[reportArgumentType]
|
||||
joinedload(cls.team_membership), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.rank_history), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.active_banners), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_badges), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_monthly_playcounts), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_previous_usernames), # pyright: ignore[reportArgumentType]
|
||||
selectinload(cls.lazer_replays_watched), # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
|
||||
|
||||
# ============================================
|
||||
# Lazer API 专用表模型
|
||||
@@ -103,7 +130,14 @@ class User(SQLModel, table=True):
|
||||
class LazerUserProfile(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_profiles" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
user_id: int = Field(foreign_key="users.id", primary_key=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
),
|
||||
)
|
||||
|
||||
# 基本状态字段
|
||||
is_active: bool = Field(default=True)
|
||||
@@ -159,7 +193,7 @@ class LazerUserProfileSections(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_profile_sections" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
section_name: str = Field(sa_column=Column(VARCHAR(50)))
|
||||
display_order: int | None = Field(default=None)
|
||||
|
||||
@@ -176,7 +210,14 @@ class LazerUserProfileSections(SQLModel, table=True):
|
||||
class LazerUserCountry(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_countries" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
user_id: int = Field(foreign_key="users.id", primary_key=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
),
|
||||
)
|
||||
code: str = Field(max_length=2)
|
||||
name: str = Field(max_length=100)
|
||||
|
||||
@@ -191,7 +232,14 @@ class LazerUserCountry(SQLModel, table=True):
|
||||
class LazerUserKudosu(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_kudosu" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
user_id: int = Field(foreign_key="users.id", primary_key=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
),
|
||||
)
|
||||
available: int = Field(default=0)
|
||||
total: int = Field(default=0)
|
||||
|
||||
@@ -206,7 +254,14 @@ class LazerUserKudosu(SQLModel, table=True):
|
||||
class LazerUserCounts(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_counts" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
user_id: int = Field(foreign_key="users.id", primary_key=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
),
|
||||
)
|
||||
|
||||
# 统计计数字段
|
||||
beatmap_playcounts_count: int = Field(default=0)
|
||||
@@ -241,7 +296,14 @@ class LazerUserCounts(SQLModel, table=True):
|
||||
class LazerUserStatistics(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_statistics" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
user_id: int = Field(foreign_key="users.id", primary_key=True)
|
||||
user_id: int = Field(
|
||||
default=None,
|
||||
sa_column=Column(
|
||||
BigInteger,
|
||||
ForeignKey("users.id"),
|
||||
primary_key=True,
|
||||
),
|
||||
)
|
||||
mode: str = Field(default="osu", max_length=10, primary_key=True)
|
||||
|
||||
# 基本命中统计
|
||||
@@ -302,7 +364,7 @@ class LazerUserBanners(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_tournament_banners" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
tournament_id: int
|
||||
image_url: str = Field(sa_column=Column(VARCHAR(500)))
|
||||
is_active: bool | None = Field(default=None)
|
||||
@@ -315,7 +377,7 @@ class LazerUserAchievement(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_achievements" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
achievement_id: int
|
||||
achieved_at: datetime = Field(
|
||||
default_factory=datetime.utcnow, sa_column=Column(DateTime)
|
||||
@@ -328,7 +390,7 @@ class LazerUserBadge(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_badges" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
badge_id: int
|
||||
awarded_at: datetime | None = Field(default=None, sa_column=Column(DateTime))
|
||||
description: str | None = Field(default=None, sa_column=Column(Text))
|
||||
@@ -349,7 +411,7 @@ class LazerUserMonthlyPlaycounts(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_monthly_playcounts" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
start_date: datetime = Field(sa_column=Column(Date))
|
||||
play_count: int = Field(default=0)
|
||||
|
||||
@@ -367,7 +429,7 @@ class LazerUserPreviousUsername(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_previous_usernames" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
username: str = Field(max_length=32)
|
||||
changed_at: datetime = Field(sa_column=Column(DateTime))
|
||||
|
||||
@@ -385,7 +447,7 @@ class LazerUserReplaysWatched(SQLModel, table=True):
|
||||
__tablename__ = "lazer_user_replays_watched" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
start_date: datetime = Field(sa_column=Column(Date))
|
||||
count: int = Field(default=0)
|
||||
|
||||
@@ -410,7 +472,9 @@ class DailyChallengeStats(SQLModel, table=True):
|
||||
__tablename__ = "daily_challenge_stats" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id", unique=True)
|
||||
user_id: int = Field(
|
||||
sa_column=Column(BigInteger, ForeignKey("users.id"), unique=True)
|
||||
)
|
||||
|
||||
daily_streak_best: int = Field(default=0)
|
||||
daily_streak_current: int = Field(default=0)
|
||||
@@ -431,7 +495,7 @@ class RankHistory(SQLModel, table=True):
|
||||
__tablename__ = "rank_history" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
mode: str = Field(max_length=10)
|
||||
rank_data: list = Field(sa_column=Column(JSON)) # Array of ranks
|
||||
date_recorded: datetime = Field(
|
||||
@@ -445,7 +509,7 @@ class UserAvatar(SQLModel, table=True):
|
||||
__tablename__ = "user_avatars" # pyright: ignore[reportAssignmentType]
|
||||
|
||||
id: int | None = Field(default=None, primary_key=True, index=True)
|
||||
user_id: int = Field(foreign_key="users.id")
|
||||
user_id: int = Field(sa_column=Column(BigInteger, ForeignKey("users.id")))
|
||||
filename: str = Field(max_length=255)
|
||||
original_filename: str = Field(max_length=255)
|
||||
file_size: int
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from app.config import settings
|
||||
from app.dependencies.database import get_redis
|
||||
from app.fetcher import Fetcher
|
||||
from app.log import logger
|
||||
|
||||
fetcher: Fetcher | None = None
|
||||
|
||||
@@ -25,5 +26,7 @@ def get_fetcher() -> Fetcher:
|
||||
if refresh_token:
|
||||
fetcher.refresh_token = str(refresh_token)
|
||||
if not fetcher.access_token or not fetcher.refresh_token:
|
||||
print("Login to initialize fetcher:", fetcher.authorize_url)
|
||||
logger.opt(colors=True).info(
|
||||
f"Login to initialize fetcher: <y>{fetcher.authorize_url}</y>"
|
||||
)
|
||||
return fetcher
|
||||
|
||||
@@ -9,8 +9,6 @@ from .database import get_db
|
||||
|
||||
from fastapi import Depends, HTTPException
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from sqlalchemy.orm import joinedload, selectinload
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
security = HTTPBearer()
|
||||
@@ -35,25 +33,7 @@ async def get_current_user_by_token(token: str, db: AsyncSession) -> DBUser | No
|
||||
return None
|
||||
user = (
|
||||
await db.exec(
|
||||
select(DBUser)
|
||||
.options(
|
||||
joinedload(DBUser.lazer_profile), # pyright: ignore[reportArgumentType]
|
||||
joinedload(DBUser.lazer_counts), # pyright: ignore[reportArgumentType]
|
||||
joinedload(DBUser.daily_challenge_stats), # pyright: ignore[reportArgumentType]
|
||||
joinedload(DBUser.avatar), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_statistics), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_achievements), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_profile_sections), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.statistics), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.team_membership), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.rank_history), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.active_banners), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_badges), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_monthly_playcounts), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_previous_usernames), # pyright: ignore[reportArgumentType]
|
||||
selectinload(DBUser.lazer_replays_watched), # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
.where(DBUser.id == token_record.user_id)
|
||||
DBUser.all_select_clause().where(DBUser.id == token_record.user_id)
|
||||
)
|
||||
).first()
|
||||
return user
|
||||
|
||||
@@ -1,23 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from app.database.beatmap import BeatmapResp
|
||||
from app.log import logger
|
||||
|
||||
from ._base import BaseFetcher
|
||||
|
||||
from httpx import AsyncClient
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.database.beatmap import BeatmapResp
|
||||
|
||||
|
||||
class BeatmapFetcher(BaseFetcher):
|
||||
async def get_beatmap(self, beatmap_id: int) -> "BeatmapResp":
|
||||
from app.database.beatmap import BeatmapResp
|
||||
|
||||
async def get_beatmap(
|
||||
self, beatmap_id: int | None = None, beatmap_checksum: str | None = None
|
||||
) -> BeatmapResp:
|
||||
if beatmap_id:
|
||||
params = {"id": beatmap_id}
|
||||
elif beatmap_checksum:
|
||||
params = {"checksum": beatmap_checksum}
|
||||
else:
|
||||
raise ValueError("Either beatmap_id or beatmap_checksum must be provided.")
|
||||
logger.opt(colors=True).debug(
|
||||
f"<blue>[BeatmapFetcher]</blue> get_beatmap: <y>{params}</y>"
|
||||
)
|
||||
async with AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"https://osu.ppy.sh/api/v2/beatmaps/{beatmap_id}",
|
||||
"https://osu.ppy.sh/api/v2/beatmaps/lookup",
|
||||
headers=self.header,
|
||||
params=params,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return BeatmapResp.model_validate(response.json())
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.database.beatmapset import BeatmapsetResp
|
||||
from app.log import logger
|
||||
|
||||
from ._base import BaseFetcher
|
||||
|
||||
@@ -9,6 +10,9 @@ from httpx import AsyncClient
|
||||
|
||||
class BeatmapsetFetcher(BaseFetcher):
|
||||
async def get_beatmapset(self, beatmap_set_id: int) -> BeatmapsetResp:
|
||||
logger.opt(colors=True).debug(
|
||||
f"<blue>[BeatmapsetFetcher]</blue> get_beatmapset: <y>{beatmap_set_id}</y>"
|
||||
)
|
||||
async with AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"https://osu.ppy.sh/api/v2/beatmapsets/{beatmap_set_id}",
|
||||
|
||||
@@ -3,10 +3,14 @@ from __future__ import annotations
|
||||
from ._base import BaseFetcher
|
||||
|
||||
from httpx import AsyncClient
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class OsuDotDirectFetcher(BaseFetcher):
|
||||
async def get_beatmap_raw(self, beatmap_id: int) -> str:
|
||||
logger.opt(colors=True).debug(
|
||||
f"<blue>[OsuDotDirectFetcher]</blue> get_beatmap_raw: <y>{beatmap_id}</y>"
|
||||
)
|
||||
async with AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"https://osu.direct/api/osu/{beatmap_id}/raw",
|
||||
|
||||
138
app/log.py
Normal file
138
app/log.py
Normal file
@@ -0,0 +1,138 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import http
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from sys import stdout
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from app.config import settings
|
||||
|
||||
import loguru
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from loguru import Logger
|
||||
|
||||
logger: "Logger" = loguru.logger
|
||||
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
# Get corresponding Loguru level if it exists.
|
||||
try:
|
||||
level: str | int = logger.level(record.levelname).name
|
||||
except ValueError:
|
||||
level = record.levelno
|
||||
|
||||
# Find caller from where originated the logged message.
|
||||
frame, depth = inspect.currentframe(), 0
|
||||
while frame:
|
||||
filename = frame.f_code.co_filename
|
||||
is_logging = filename == logging.__file__
|
||||
is_frozen = "importlib" in filename and "_bootstrap" in filename
|
||||
if depth > 0 and not (is_logging or is_frozen):
|
||||
break
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
message = record.getMessage()
|
||||
|
||||
if record.name == "uvicorn.access":
|
||||
message = self._format_uvicorn_access_log(message)
|
||||
elif record.name == "uvicorn.error":
|
||||
message = self._format_uvicorn_error_log(message)
|
||||
logger.opt(depth=depth, exception=record.exc_info, colors=True).log(
|
||||
level, message
|
||||
)
|
||||
|
||||
def _format_uvicorn_error_log(self, message: str) -> str:
|
||||
websocket_pattern = (
|
||||
r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"WebSocket\s+([^"]+)"\s+([\w\[\]]+)'
|
||||
)
|
||||
websocket_match = re.search(websocket_pattern, message)
|
||||
|
||||
if websocket_match:
|
||||
ip, path, status = websocket_match.groups()
|
||||
|
||||
colored_ip = f"<cyan>{ip}</cyan>"
|
||||
status_colors = {
|
||||
"[accepted]": "<green>[accepted]</green>",
|
||||
"403": "<red>403 [rejected]</red>",
|
||||
}
|
||||
colored_status = status_colors.get(
|
||||
status.lower(), f"<white>{status}</white>"
|
||||
)
|
||||
return (
|
||||
f'{colored_ip} - "<bold><magenta>WebSocket</magenta> '
|
||||
f'{path}</bold>" '
|
||||
f"{colored_status}"
|
||||
)
|
||||
else:
|
||||
return message
|
||||
|
||||
def _format_uvicorn_access_log(self, message: str) -> str:
|
||||
http_pattern = r'(\d+\.\d+\.\d+\.\d+:\d+)\s*-\s*"(\w+)\s+([^"]+)"\s+(\d+)'
|
||||
|
||||
http_match = re.search(http_pattern, message)
|
||||
if http_match:
|
||||
ip, method, path, status_code = http_match.groups()
|
||||
try:
|
||||
status_phrase = http.HTTPStatus(int(status_code)).phrase
|
||||
except ValueError:
|
||||
status_phrase = ""
|
||||
|
||||
colored_ip = f"<cyan>{ip}</cyan>"
|
||||
method_colors = {
|
||||
"GET": "<green>GET</green>",
|
||||
"POST": "<blue>POST</blue>",
|
||||
"PUT": "<yellow>PUT</yellow>",
|
||||
"DELETE": "<red>DELETE</red>",
|
||||
"PATCH": "<magenta>PATCH</magenta>",
|
||||
"OPTIONS": "<white>OPTIONS</white>",
|
||||
"HEAD": "<white>HEAD</white>",
|
||||
}
|
||||
colored_method = method_colors.get(method, f"<white>{method}</white>")
|
||||
status = int(status_code)
|
||||
status_color = "white"
|
||||
if 200 <= status < 300:
|
||||
status_color = "green"
|
||||
elif 300 <= status < 400:
|
||||
status_color = "yellow"
|
||||
elif 400 <= status < 500:
|
||||
status_color = "red"
|
||||
elif 500 <= status < 600:
|
||||
status_color = "red"
|
||||
|
||||
return (
|
||||
f'{colored_ip} - "<bold>{colored_method} '
|
||||
f'{path}</bold>" '
|
||||
f"<{status_color}>{status_code} {status_phrase}</{status_color}>"
|
||||
)
|
||||
|
||||
return message
|
||||
|
||||
|
||||
logger.remove()
|
||||
logger.add(
|
||||
stdout,
|
||||
colorize=True,
|
||||
format=(
|
||||
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> [<level>{level}</level>] | {message}"
|
||||
),
|
||||
level=settings.LOG_LEVEL,
|
||||
diagnose=settings.DEBUG,
|
||||
)
|
||||
logging.basicConfig(handlers=[InterceptHandler()], level=settings.LOG_LEVEL, force=True)
|
||||
|
||||
uvicorn_loggers = [
|
||||
"uvicorn",
|
||||
"uvicorn.error",
|
||||
"uvicorn.access",
|
||||
"fastapi",
|
||||
]
|
||||
|
||||
for logger_name in uvicorn_loggers:
|
||||
uvicorn_logger = logging.getLogger(logger_name)
|
||||
uvicorn_logger.handlers = [InterceptHandler()]
|
||||
uvicorn_logger.propagate = False
|
||||
152
app/models/metadata_hub.py
Normal file
152
app/models/metadata_hub.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum
|
||||
from typing import Any, Literal
|
||||
|
||||
from app.models.signalr import UserState
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class _UserActivity(BaseModel):
|
||||
model_config = ConfigDict(serialize_by_alias=True)
|
||||
type: Literal[
|
||||
"ChoosingBeatmap",
|
||||
"InSoloGame",
|
||||
"WatchingReplay",
|
||||
"SpectatingUser",
|
||||
"SearchingForLobby",
|
||||
"InLobby",
|
||||
"InMultiplayerGame",
|
||||
"SpectatingMultiplayerGame",
|
||||
"InPlaylistGame",
|
||||
"EditingBeatmap",
|
||||
"ModdingBeatmap",
|
||||
"TestingBeatmap",
|
||||
"InDailyChallengeLobby",
|
||||
"PlayingDailyChallenge",
|
||||
] = Field(alias="$dtype")
|
||||
value: Any | None = Field(alias="$value")
|
||||
|
||||
|
||||
class ChoosingBeatmap(_UserActivity):
|
||||
type: Literal["ChoosingBeatmap"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class InGameValue(BaseModel):
|
||||
beatmap_id: int = Field(alias="BeatmapID")
|
||||
beatmap_display_title: str = Field(alias="BeatmapDisplayTitle")
|
||||
ruleset_id: int = Field(alias="RulesetID")
|
||||
ruleset_playing_verb: str = Field(alias="RulesetPlayingVerb")
|
||||
|
||||
|
||||
class _InGame(_UserActivity):
|
||||
value: InGameValue = Field(alias="$value")
|
||||
|
||||
|
||||
class InSoloGame(_InGame):
|
||||
type: Literal["InSoloGame"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class InMultiplayerGame(_InGame):
|
||||
type: Literal["InMultiplayerGame"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class SpectatingMultiplayerGame(_InGame):
|
||||
type: Literal["SpectatingMultiplayerGame"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class InPlaylistGame(_InGame):
|
||||
type: Literal["InPlaylistGame"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class EditingBeatmapValue(BaseModel):
|
||||
beatmap_id: int = Field(alias="BeatmapID")
|
||||
beatmap_display_title: str = Field(alias="BeatmapDisplayTitle")
|
||||
|
||||
|
||||
class EditingBeatmap(_UserActivity):
|
||||
type: Literal["EditingBeatmap"] = Field(alias="$dtype")
|
||||
value: EditingBeatmapValue = Field(alias="$value")
|
||||
|
||||
|
||||
class TestingBeatmap(_UserActivity):
|
||||
type: Literal["TestingBeatmap"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class ModdingBeatmap(_UserActivity):
|
||||
type: Literal["ModdingBeatmap"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class WatchingReplayValue(BaseModel):
|
||||
score_id: int = Field(alias="ScoreID")
|
||||
player_name: str = Field(alias="PlayerName")
|
||||
beatmap_id: int = Field(alias="BeatmapID")
|
||||
beatmap_display_title: str = Field(alias="BeatmapDisplayTitle")
|
||||
|
||||
|
||||
class WatchingReplay(_UserActivity):
|
||||
type: Literal["WatchingReplay"] = Field(alias="$dtype")
|
||||
value: int | None = Field(alias="$value") # Replay ID
|
||||
|
||||
|
||||
class SpectatingUser(WatchingReplay):
|
||||
type: Literal["SpectatingUser"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class SearchingForLobby(_UserActivity):
|
||||
type: Literal["SearchingForLobby"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
class InLobbyValue(BaseModel):
|
||||
room_id: int = Field(alias="RoomID")
|
||||
room_name: str = Field(alias="RoomName")
|
||||
|
||||
|
||||
class InLobby(_UserActivity):
|
||||
type: Literal["InLobby"] = "InLobby"
|
||||
|
||||
|
||||
class InDailyChallengeLobby(_UserActivity):
|
||||
type: Literal["InDailyChallengeLobby"] = Field(alias="$dtype")
|
||||
|
||||
|
||||
UserActivity = (
|
||||
ChoosingBeatmap
|
||||
| InSoloGame
|
||||
| WatchingReplay
|
||||
| SpectatingUser
|
||||
| SearchingForLobby
|
||||
| InLobby
|
||||
| InMultiplayerGame
|
||||
| SpectatingMultiplayerGame
|
||||
| InPlaylistGame
|
||||
| EditingBeatmap
|
||||
| ModdingBeatmap
|
||||
| TestingBeatmap
|
||||
| InDailyChallengeLobby
|
||||
)
|
||||
|
||||
|
||||
class MetadataClientState(UserState):
|
||||
user_activity: UserActivity | None = None
|
||||
status: OnlineStatus | None = None
|
||||
|
||||
def to_dict(self) -> dict[str, Any] | None:
|
||||
if self.status is None or self.status == OnlineStatus.OFFLINE:
|
||||
return None
|
||||
dumped = self.model_dump(by_alias=True, exclude_none=True)
|
||||
return {
|
||||
"Activity": dumped.get("user_activity"),
|
||||
"Status": dumped.get("status"),
|
||||
}
|
||||
|
||||
@property
|
||||
def pushable(self) -> bool:
|
||||
return self.status is not None and self.status != OnlineStatus.OFFLINE
|
||||
|
||||
|
||||
class OnlineStatus(IntEnum):
|
||||
OFFLINE = 0 # 隐身
|
||||
DO_NOT_DISTURB = 1
|
||||
ONLINE = 2
|
||||
@@ -1,47 +1,91 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TypedDict
|
||||
import json
|
||||
from typing import Literal, NotRequired, TypedDict
|
||||
|
||||
from app.path import STATIC_DIR
|
||||
|
||||
|
||||
class APIMod(TypedDict):
|
||||
acronym: str
|
||||
settings: dict[str, bool | float | str]
|
||||
settings: NotRequired[dict[str, bool | float | str]]
|
||||
|
||||
|
||||
# https://github.com/ppy/osu-api/wiki#mods
|
||||
LEGACY_MOD_TO_API_MOD = {
|
||||
(1 << 0): APIMod(acronym="NF", settings={}), # No Fail
|
||||
(1 << 1): APIMod(acronym="EZ", settings={}),
|
||||
(1 << 2): APIMod(acronym="TD", settings={}), # Touch Device
|
||||
(1 << 3): APIMod(acronym="HD", settings={}), # Hidden
|
||||
(1 << 4): APIMod(acronym="HR", settings={}), # Hard Rock
|
||||
(1 << 5): APIMod(acronym="SD", settings={}), # Sudden Death
|
||||
(1 << 6): APIMod(acronym="DT", settings={}), # Double Time
|
||||
(1 << 7): APIMod(acronym="RX", settings={}), # Relax
|
||||
(1 << 8): APIMod(acronym="HT", settings={}), # Half Time
|
||||
(1 << 9): APIMod(acronym="NC", settings={}), # Nightcore
|
||||
(1 << 10): APIMod(acronym="FL", settings={}), # Flashlight
|
||||
(1 << 11): APIMod(acronym="AT", settings={}), # Auto Play
|
||||
(1 << 12): APIMod(acronym="SO", settings={}), # Spun Out
|
||||
(1 << 13): APIMod(acronym="AP", settings={}), # Autopilot
|
||||
(1 << 14): APIMod(acronym="PF", settings={}), # Perfect
|
||||
(1 << 15): APIMod(acronym="4K", settings={}), # 4K
|
||||
(1 << 16): APIMod(acronym="5K", settings={}), # 5K
|
||||
(1 << 17): APIMod(acronym="6K", settings={}), # 6K
|
||||
(1 << 18): APIMod(acronym="7K", settings={}), # 7K
|
||||
(1 << 19): APIMod(acronym="8K", settings={}), # 8K
|
||||
(1 << 20): APIMod(acronym="FI", settings={}), # Fade In
|
||||
(1 << 21): APIMod(acronym="RD", settings={}), # Random
|
||||
(1 << 22): APIMod(acronym="CN", settings={}), # Cinema
|
||||
(1 << 23): APIMod(acronym="TP", settings={}), # Target Practice
|
||||
(1 << 24): APIMod(acronym="9K", settings={}), # 9K
|
||||
(1 << 25): APIMod(acronym="CO", settings={}), # Key Co-op
|
||||
(1 << 26): APIMod(acronym="1K", settings={}), # 1K
|
||||
(1 << 27): APIMod(acronym="2K", settings={}), # 2K
|
||||
(1 << 28): APIMod(acronym="3K", settings={}), # 3K
|
||||
(1 << 29): APIMod(acronym="SV2", settings={}), # Score V2
|
||||
(1 << 30): APIMod(acronym="MR", settings={}), # Mirror
|
||||
API_MOD_TO_LEGACY: dict[str, int] = {
|
||||
"NF": 1 << 0, # No Fail
|
||||
"EZ": 1 << 1, # Easy
|
||||
"TD": 1 << 2, # Touch Device
|
||||
"HD": 1 << 3, # Hidden
|
||||
"HR": 1 << 4, # Hard Rock
|
||||
"SD": 1 << 5, # Sudden Death
|
||||
"DT": 1 << 6, # Double Time
|
||||
"RX": 1 << 7, # Relax
|
||||
"HT": 1 << 8, # Half Time
|
||||
"NC": 1 << 9, # Nightcore
|
||||
"FL": 1 << 10, # Flashlight
|
||||
"AT": 1 << 11, # Autoplay
|
||||
"SO": 1 << 12, # Spun Out
|
||||
"AP": 1 << 13, # Auto Pilot
|
||||
"PF": 1 << 14, # Perfect
|
||||
"4K": 1 << 15, # 4K
|
||||
"5K": 1 << 16, # 5K
|
||||
"6K": 1 << 17, # 6K
|
||||
"7K": 1 << 18, # 7K
|
||||
"8K": 1 << 19, # 8K
|
||||
"FI": 1 << 20, # Fade In
|
||||
"RD": 1 << 21, # Random
|
||||
"CN": 1 << 22, # Cinema
|
||||
"TP": 1 << 23, # Target Practice
|
||||
"9K": 1 << 24, # 9K
|
||||
"CO": 1 << 25, # Key Co-op
|
||||
"1K": 1 << 26, # 1K
|
||||
"3K": 1 << 27, # 3K
|
||||
"2K": 1 << 28, # 2K
|
||||
"SV2": 1 << 29, # ScoreV2
|
||||
"MR": 1 << 30, # Mirror
|
||||
}
|
||||
LEGACY_MOD_TO_API_MOD = {}
|
||||
for k, v in API_MOD_TO_LEGACY.items():
|
||||
LEGACY_MOD_TO_API_MOD[v] = APIMod(acronym=k, settings={})
|
||||
API_MOD_TO_LEGACY["NC"] |= API_MOD_TO_LEGACY["DT"]
|
||||
API_MOD_TO_LEGACY["PF"] |= API_MOD_TO_LEGACY["SD"]
|
||||
|
||||
|
||||
# see static/mods.json
|
||||
class Settings(TypedDict):
|
||||
Name: str
|
||||
Type: str
|
||||
Label: str
|
||||
Description: str
|
||||
|
||||
|
||||
class Mod(TypedDict):
|
||||
Acronym: str
|
||||
Name: str
|
||||
Description: str
|
||||
Type: str
|
||||
Settings: list[Settings]
|
||||
IncompatibleMods: list[str]
|
||||
RequiresConfiguration: bool
|
||||
UserPlayable: bool
|
||||
ValidForMultiplayer: bool
|
||||
ValidForFreestyleAsRequiredMod: bool
|
||||
ValidForMultiplayerAsFreeMod: bool
|
||||
AlwaysValidForSubmission: bool
|
||||
|
||||
|
||||
API_MODS: dict[Literal[0, 1, 2, 3], dict[str, Mod]] = {}
|
||||
|
||||
|
||||
def init_mods():
|
||||
mods_file = STATIC_DIR / "mods.json"
|
||||
raw_mods = json.loads(mods_file.read_text())
|
||||
for ruleset in raw_mods:
|
||||
ruleset_mods = {}
|
||||
for mod in ruleset["Mods"]:
|
||||
ruleset_mods[mod["Acronym"]] = mod
|
||||
API_MODS[ruleset["RulesetID"]] = ruleset_mods
|
||||
|
||||
|
||||
def int_to_mods(mods: int) -> list[APIMod]:
|
||||
@@ -54,3 +98,10 @@ def int_to_mods(mods: int) -> list[APIMod]:
|
||||
if mods & (1 << 9):
|
||||
mod_list.remove(LEGACY_MOD_TO_API_MOD[(1 << 6)])
|
||||
return mod_list
|
||||
|
||||
|
||||
def mods_to_int(mods: list[APIMod]) -> int:
|
||||
sum_ = 0
|
||||
for mod in mods:
|
||||
sum_ |= API_MOD_TO_LEGACY.get(mod["acronym"], 0)
|
||||
return sum_
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# OAuth 相关模型
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -34,3 +35,22 @@ class OAuthErrorResponse(BaseModel):
|
||||
error_description: str
|
||||
hint: str
|
||||
message: str
|
||||
|
||||
|
||||
class RegistrationErrorResponse(BaseModel):
|
||||
"""注册错误响应模型"""
|
||||
form_error: dict
|
||||
|
||||
|
||||
class UserRegistrationErrors(BaseModel):
|
||||
"""用户注册错误模型"""
|
||||
username: List[str] = []
|
||||
user_email: List[str] = []
|
||||
password: List[str] = []
|
||||
|
||||
|
||||
class RegistrationRequestErrors(BaseModel):
|
||||
"""注册请求错误模型"""
|
||||
message: str | None = None
|
||||
redirect: str | None = None
|
||||
user: UserRegistrationErrors | None = None
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum, IntEnum
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
from .mods import API_MODS, APIMod, init_mods
|
||||
|
||||
from pydantic import BaseModel, Field, ValidationInfo, field_validator
|
||||
import rosu_pp_py as rosu
|
||||
|
||||
|
||||
@@ -30,40 +34,141 @@ INT_TO_MODE = {v: k for k, v in MODE_TO_INT.items()}
|
||||
|
||||
|
||||
class Rank(str, Enum):
|
||||
X = "ss"
|
||||
XH = "ssh"
|
||||
S = "s"
|
||||
SH = "sh"
|
||||
A = "a"
|
||||
B = "b"
|
||||
C = "c"
|
||||
D = "d"
|
||||
F = "f"
|
||||
X = "X"
|
||||
XH = "XH"
|
||||
S = "S"
|
||||
SH = "SH"
|
||||
A = "A"
|
||||
B = "B"
|
||||
C = "C"
|
||||
D = "D"
|
||||
F = "F"
|
||||
|
||||
|
||||
# https://github.com/ppy/osu/blob/master/osu.Game/Rulesets/Scoring/HitResult.cs
|
||||
class HitResult(IntEnum):
|
||||
PERFECT = 0 # [Order(0)]
|
||||
GREAT = 1 # [Order(1)]
|
||||
GOOD = 2 # [Order(2)]
|
||||
OK = 3 # [Order(3)]
|
||||
MEH = 4 # [Order(4)]
|
||||
MISS = 5 # [Order(5)]
|
||||
class HitResult(str, Enum):
|
||||
PERFECT = "perfect" # [Order(0)]
|
||||
GREAT = "great" # [Order(1)]
|
||||
GOOD = "good" # [Order(2)]
|
||||
OK = "ok" # [Order(3)]
|
||||
MEH = "meh" # [Order(4)]
|
||||
MISS = "miss" # [Order(5)]
|
||||
|
||||
LARGE_TICK_HIT = 6 # [Order(6)]
|
||||
SMALL_TICK_HIT = 7 # [Order(7)]
|
||||
SLIDER_TAIL_HIT = 8 # [Order(8)]
|
||||
LARGE_TICK_HIT = "large_tick_hit" # [Order(6)]
|
||||
SMALL_TICK_HIT = "small_tick_hit" # [Order(7)]
|
||||
SLIDER_TAIL_HIT = "slider_tail_hit" # [Order(8)]
|
||||
|
||||
LARGE_BONUS = 9 # [Order(9)]
|
||||
SMALL_BONUS = 10 # [Order(10)]
|
||||
LARGE_BONUS = "large_bonus" # [Order(9)]
|
||||
SMALL_BONUS = "small_bonus" # [Order(10)]
|
||||
|
||||
LARGE_TICK_MISS = 11 # [Order(11)]
|
||||
SMALL_TICK_MISS = 12 # [Order(12)]
|
||||
LARGE_TICK_MISS = "large_tick_miss" # [Order(11)]
|
||||
SMALL_TICK_MISS = "small_tick_miss" # [Order(12)]
|
||||
|
||||
IGNORE_HIT = 13 # [Order(13)]
|
||||
IGNORE_MISS = 14 # [Order(14)]
|
||||
IGNORE_HIT = "ignore_hit" # [Order(13)]
|
||||
IGNORE_MISS = "ignore_miss" # [Order(14)]
|
||||
|
||||
NONE = 15 # [Order(15)]
|
||||
COMBO_BREAK = 16 # [Order(16)]
|
||||
NONE = "none" # [Order(15)]
|
||||
COMBO_BREAK = "combo_break" # [Order(16)]
|
||||
|
||||
LEGACY_COMBO_INCREASE = 99 # [Order(99)] @deprecated
|
||||
LEGACY_COMBO_INCREASE = "legacy_combo_increase" # [Order(99)] @deprecated
|
||||
|
||||
def is_hit(self) -> bool:
|
||||
return self not in (
|
||||
HitResult.NONE,
|
||||
HitResult.IGNORE_MISS,
|
||||
HitResult.COMBO_BREAK,
|
||||
HitResult.LARGE_TICK_MISS,
|
||||
HitResult.SMALL_TICK_MISS,
|
||||
HitResult.MISS,
|
||||
)
|
||||
|
||||
|
||||
class HitResultInt(IntEnum):
|
||||
PERFECT = 0
|
||||
GREAT = 1
|
||||
GOOD = 2
|
||||
OK = 3
|
||||
MEH = 4
|
||||
MISS = 5
|
||||
|
||||
LARGE_TICK_HIT = 6
|
||||
SMALL_TICK_HIT = 7
|
||||
SLIDER_TAIL_HIT = 8
|
||||
|
||||
LARGE_BONUS = 9
|
||||
SMALL_BONUS = 10
|
||||
|
||||
LARGE_TICK_MISS = 11
|
||||
SMALL_TICK_MISS = 12
|
||||
|
||||
IGNORE_HIT = 13
|
||||
IGNORE_MISS = 14
|
||||
|
||||
NONE = 15
|
||||
COMBO_BREAK = 16
|
||||
|
||||
LEGACY_COMBO_INCREASE = 99
|
||||
|
||||
def is_hit(self) -> bool:
|
||||
return self not in (
|
||||
HitResultInt.NONE,
|
||||
HitResultInt.IGNORE_MISS,
|
||||
HitResultInt.COMBO_BREAK,
|
||||
HitResultInt.LARGE_TICK_MISS,
|
||||
HitResultInt.SMALL_TICK_MISS,
|
||||
HitResultInt.MISS,
|
||||
)
|
||||
|
||||
|
||||
class LeaderboardType(Enum):
|
||||
GLOBAL = "global"
|
||||
FRIENDS = "friends"
|
||||
COUNTRY = "country"
|
||||
TEAM = "team"
|
||||
|
||||
|
||||
ScoreStatistics = dict[HitResult, int]
|
||||
ScoreStatisticsInt = dict[HitResultInt, int]
|
||||
|
||||
|
||||
class SoloScoreSubmissionInfo(BaseModel):
|
||||
rank: Rank
|
||||
total_score: int = Field(ge=0, le=2**31 - 1)
|
||||
total_score_without_mods: int = Field(ge=0, le=2**31 - 1)
|
||||
accuracy: float = Field(ge=0, le=1)
|
||||
pp: float = Field(default=0, ge=0, le=2**31 - 1)
|
||||
max_combo: int = 0
|
||||
ruleset_id: Literal[0, 1, 2, 3]
|
||||
passed: bool = False
|
||||
mods: list[APIMod] = Field(default_factory=list)
|
||||
statistics: ScoreStatistics = Field(default_factory=dict)
|
||||
maximum_statistics: ScoreStatistics = Field(default_factory=dict)
|
||||
|
||||
@field_validator("mods", mode="after")
|
||||
@classmethod
|
||||
def validate_mods(cls, mods: list[APIMod], info: ValidationInfo):
|
||||
if not API_MODS:
|
||||
init_mods()
|
||||
incompatible_mods = set()
|
||||
# check incompatible mods
|
||||
for mod in mods:
|
||||
if mod["acronym"] in incompatible_mods:
|
||||
raise ValueError(
|
||||
f"Mod {mod['acronym']} is incompatible with other mods"
|
||||
)
|
||||
setting_mods = API_MODS[info.data["ruleset_id"]].get(mod["acronym"])
|
||||
if not setting_mods:
|
||||
raise ValueError(f"Invalid mod: {mod['acronym']}")
|
||||
incompatible_mods.update(setting_mods["IncompatibleMods"])
|
||||
return mods
|
||||
|
||||
|
||||
class LegacyReplaySoloScoreInfo(TypedDict):
|
||||
online_id: int
|
||||
mods: list[APIMod]
|
||||
statistics: ScoreStatisticsInt
|
||||
maximum_statistics: ScoreStatisticsInt
|
||||
client_version: str
|
||||
rank: Rank
|
||||
user_id: int
|
||||
total_score_without_mods: int
|
||||
|
||||
@@ -1,11 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
import datetime
|
||||
from typing import Any, get_origin
|
||||
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
import msgpack
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
Field,
|
||||
TypeAdapter,
|
||||
model_serializer,
|
||||
model_validator,
|
||||
)
|
||||
|
||||
|
||||
def serialize_to_list(value: BaseModel) -> list[Any]:
|
||||
data = []
|
||||
for field, info in value.__class__.model_fields.items():
|
||||
v = getattr(value, field)
|
||||
anno = get_origin(info.annotation)
|
||||
if anno and issubclass(anno, BaseModel):
|
||||
data.append(serialize_to_list(v))
|
||||
elif anno and issubclass(anno, list):
|
||||
data.append(
|
||||
TypeAdapter(
|
||||
info.annotation,
|
||||
).dump_python(v)
|
||||
)
|
||||
elif isinstance(v, datetime.datetime):
|
||||
data.append([msgpack.ext.Timestamp.from_datetime(v), 0])
|
||||
else:
|
||||
data.append(v)
|
||||
return data
|
||||
|
||||
|
||||
class MessagePackArrayModel(BaseModel):
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def unpack(cls, v: Any) -> Any:
|
||||
@@ -16,11 +47,15 @@ class MessagePackArrayModel(BaseModel):
|
||||
return dict(zip(fields, v))
|
||||
return v
|
||||
|
||||
@model_serializer
|
||||
def serialize(self) -> list[Any]:
|
||||
return serialize_to_list(self)
|
||||
|
||||
|
||||
class Transport(BaseModel):
|
||||
transport: str
|
||||
transfer_formats: list[str] = Field(
|
||||
default_factory=lambda: ["Binary"], alias="transferFormats"
|
||||
default_factory=lambda: ["Binary", "Text"], alias="transferFormats"
|
||||
)
|
||||
|
||||
|
||||
@@ -29,3 +64,8 @@ class NegotiateResponse(BaseModel):
|
||||
connectionToken: str
|
||||
negotiateVersion: int = 1
|
||||
availableTransports: list[Transport]
|
||||
|
||||
|
||||
class UserState(BaseModel):
|
||||
connection_id: str
|
||||
connection_token: str
|
||||
|
||||
@@ -4,18 +4,22 @@ import datetime
|
||||
from enum import IntEnum
|
||||
from typing import Any
|
||||
|
||||
from app.models.beatmap import BeatmapRankStatus
|
||||
|
||||
from .score import (
|
||||
HitResult,
|
||||
ScoreStatisticsInt,
|
||||
)
|
||||
from .signalr import MessagePackArrayModel
|
||||
from .signalr import MessagePackArrayModel, UserState
|
||||
|
||||
import msgpack
|
||||
from pydantic import Field, field_validator
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
class APIMod(MessagePackArrayModel):
|
||||
acronym: str
|
||||
settings: dict[str, Any] = Field(default_factory=dict)
|
||||
settings: dict[str, Any] | list = Field(
|
||||
default_factory=dict
|
||||
) # FIXME: with settings
|
||||
|
||||
|
||||
class SpectatedUserState(IntEnum):
|
||||
@@ -32,7 +36,7 @@ class SpectatorState(MessagePackArrayModel):
|
||||
ruleset_id: int | None = None # 0,1,2,3
|
||||
mods: list[APIMod] = Field(default_factory=list)
|
||||
state: SpectatedUserState
|
||||
maximum_statistics: dict[HitResult, int] = Field(default_factory=dict)
|
||||
maximum_statistics: ScoreStatisticsInt = Field(default_factory=dict)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, SpectatorState):
|
||||
@@ -58,7 +62,7 @@ class FrameHeader(MessagePackArrayModel):
|
||||
acc: float
|
||||
combo: int
|
||||
max_combo: int
|
||||
statistics: dict[HitResult, int] = Field(default_factory=dict)
|
||||
statistics: ScoreStatisticsInt = Field(default_factory=dict)
|
||||
score_processor_statistics: ScoreProcessorStatistics
|
||||
received_time: datetime.datetime
|
||||
mods: list[APIMod] = Field(default_factory=list)
|
||||
@@ -79,22 +83,56 @@ class FrameHeader(MessagePackArrayModel):
|
||||
raise ValueError(f"Cannot convert {type(v)} to datetime")
|
||||
|
||||
|
||||
class ReplayButtonState(IntEnum):
|
||||
NONE = 0
|
||||
LEFT1 = 1
|
||||
RIGHT1 = 2
|
||||
LEFT2 = 4
|
||||
RIGHT2 = 8
|
||||
SMOKE = 16
|
||||
# class ReplayButtonState(IntEnum):
|
||||
# NONE = 0
|
||||
# LEFT1 = 1
|
||||
# RIGHT1 = 2
|
||||
# LEFT2 = 4
|
||||
# RIGHT2 = 8
|
||||
# SMOKE = 16
|
||||
|
||||
|
||||
class LegacyReplayFrame(MessagePackArrayModel):
|
||||
time: int # from ReplayFrame,the parent of LegacyReplayFrame
|
||||
time: float # from ReplayFrame,the parent of LegacyReplayFrame
|
||||
x: float | None = None
|
||||
y: float | None = None
|
||||
button_state: ReplayButtonState
|
||||
button_state: int
|
||||
|
||||
|
||||
class FrameDataBundle(MessagePackArrayModel):
|
||||
header: FrameHeader
|
||||
frames: list[LegacyReplayFrame]
|
||||
|
||||
|
||||
# Use for server
|
||||
class APIUser(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class ScoreInfo(BaseModel):
|
||||
mods: list[APIMod]
|
||||
user: APIUser
|
||||
ruleset: int
|
||||
maximum_statistics: ScoreStatisticsInt
|
||||
id: int | None = None
|
||||
total_score: int | None = None
|
||||
acc: float | None = None
|
||||
max_combo: int | None = None
|
||||
combo: int | None = None
|
||||
statistics: ScoreStatisticsInt = Field(default_factory=dict)
|
||||
|
||||
|
||||
class StoreScore(BaseModel):
|
||||
score_info: ScoreInfo
|
||||
replay_frames: list[LegacyReplayFrame] = Field(default_factory=list)
|
||||
|
||||
|
||||
class StoreClientState(UserState):
|
||||
state: SpectatorState | None = None
|
||||
beatmap_status: BeatmapRankStatus | None = None
|
||||
checksum: str | None = None
|
||||
ruleset_id: int | None = None
|
||||
score_token: int | None = None
|
||||
watched_user: set[int] = Field(default_factory=set)
|
||||
score: StoreScore | None = None
|
||||
|
||||
8
app/path.py
Normal file
8
app/path.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
STATIC_DIR = Path(__file__).parent.parent / "static"
|
||||
|
||||
REPLAY_DIR = Path(__file__).parent.parent / "replays"
|
||||
REPLAY_DIR.mkdir(exist_ok=True)
|
||||
@@ -1,15 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.signalr import signalr_router as signalr_router
|
||||
|
||||
from . import ( # pyright: ignore[reportUnusedImport] # noqa: F401
|
||||
beatmap,
|
||||
beatmapset,
|
||||
me,
|
||||
relationship,
|
||||
score,
|
||||
user,
|
||||
)
|
||||
from .api_router import router as api_router
|
||||
from .auth import router as auth_router
|
||||
from .fetcher import fetcher_router as fetcher_router
|
||||
from .signalr import signalr_router as signalr_router
|
||||
|
||||
__all__ = ["api_router", "auth_router", "fetcher_router", "signalr_router"]
|
||||
|
||||
@@ -1,39 +1,264 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import re
|
||||
|
||||
from app.auth import (
|
||||
authenticate_user,
|
||||
create_access_token,
|
||||
generate_refresh_token,
|
||||
get_password_hash,
|
||||
get_token_by_refresh_token,
|
||||
store_token,
|
||||
)
|
||||
from app.config import settings
|
||||
from app.database import User as DBUser
|
||||
from app.dependencies import get_db
|
||||
from app.models.oauth import TokenResponse, OAuthErrorResponse
|
||||
from app.models.oauth import (
|
||||
OAuthErrorResponse,
|
||||
RegistrationRequestErrors,
|
||||
TokenResponse,
|
||||
UserRegistrationErrors,
|
||||
)
|
||||
|
||||
from fastapi import APIRouter, Depends, Form
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
|
||||
def create_oauth_error_response(error: str, description: str, hint: str, status_code: int = 400):
|
||||
def create_oauth_error_response(
|
||||
error: str, description: str, hint: str, status_code: int = 400
|
||||
):
|
||||
"""创建标准的 OAuth 错误响应"""
|
||||
error_data = OAuthErrorResponse(
|
||||
error=error,
|
||||
error_description=description,
|
||||
hint=hint,
|
||||
message=description
|
||||
)
|
||||
return JSONResponse(
|
||||
status_code=status_code,
|
||||
content=error_data.model_dump()
|
||||
error=error, error_description=description, hint=hint, message=description
|
||||
)
|
||||
return JSONResponse(status_code=status_code, content=error_data.model_dump())
|
||||
|
||||
|
||||
def validate_username(username: str) -> list[str]:
|
||||
"""验证用户名"""
|
||||
errors = []
|
||||
|
||||
if not username:
|
||||
errors.append("Username is required")
|
||||
return errors
|
||||
|
||||
if len(username) < 3:
|
||||
errors.append("Username must be at least 3 characters long")
|
||||
|
||||
if len(username) > 15:
|
||||
errors.append("Username must be at most 15 characters long")
|
||||
|
||||
# 检查用户名格式(只允许字母、数字、下划线、连字符)
|
||||
if not re.match(r"^[a-zA-Z0-9_-]+$", username):
|
||||
errors.append(
|
||||
"Username can only contain letters, numbers, underscores, and hyphens"
|
||||
)
|
||||
|
||||
# 检查是否以数字开头
|
||||
if username[0].isdigit():
|
||||
errors.append("Username cannot start with a number")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_email(email: str) -> list[str]:
|
||||
"""验证邮箱"""
|
||||
errors = []
|
||||
|
||||
if not email:
|
||||
errors.append("Email is required")
|
||||
return errors
|
||||
|
||||
# 基本的邮箱格式验证
|
||||
email_pattern = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
|
||||
if not re.match(email_pattern, email):
|
||||
errors.append("Please enter a valid email address")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_password(password: str) -> list[str]:
|
||||
"""验证密码"""
|
||||
errors = []
|
||||
|
||||
if not password:
|
||||
errors.append("Password is required")
|
||||
return errors
|
||||
|
||||
if len(password) < 8:
|
||||
errors.append("Password must be at least 8 characters long")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
router = APIRouter(tags=["osu! OAuth 认证"])
|
||||
|
||||
|
||||
@router.post("/users")
|
||||
async def register_user(
|
||||
user_username: str = Form(..., alias="user[username]"),
|
||||
user_email: str = Form(..., alias="user[user_email]"),
|
||||
user_password: str = Form(..., alias="user[password]"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""用户注册接口 - 匹配 osu! 客户端的注册请求"""
|
||||
|
||||
username_errors = validate_username(user_username)
|
||||
email_errors = validate_email(user_email)
|
||||
password_errors = validate_password(user_password)
|
||||
|
||||
result = await db.exec(select(DBUser).where(DBUser.name == user_username))
|
||||
existing_user = result.first()
|
||||
if existing_user:
|
||||
username_errors.append("Username is already taken")
|
||||
|
||||
result = await db.exec(select(DBUser).where(DBUser.email == user_email))
|
||||
existing_email = result.first()
|
||||
if existing_email:
|
||||
email_errors.append("Email is already taken")
|
||||
|
||||
if username_errors or email_errors or password_errors:
|
||||
errors = RegistrationRequestErrors(
|
||||
user=UserRegistrationErrors(
|
||||
username=username_errors,
|
||||
user_email=email_errors,
|
||||
password=password_errors,
|
||||
)
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=422, content={"form_error": errors.model_dump()}
|
||||
)
|
||||
|
||||
try:
|
||||
# 创建新用户
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
new_user = DBUser(
|
||||
name=user_username,
|
||||
safe_name=user_username.lower(), # 安全用户名(小写)
|
||||
email=user_email,
|
||||
pw_bcrypt=get_password_hash(user_password),
|
||||
priv=1, # 普通用户权限
|
||||
country="CN", # 默认国家
|
||||
creation_time=int(time.time()),
|
||||
latest_activity=int(time.time()),
|
||||
preferred_mode=0, # 默认模式
|
||||
play_style=0, # 默认游戏风格
|
||||
)
|
||||
|
||||
db.add(new_user)
|
||||
await db.commit()
|
||||
await db.refresh(new_user)
|
||||
|
||||
# 保存用户ID,因为会话可能会关闭
|
||||
user_id = new_user.id
|
||||
|
||||
if user_id <= 2:
|
||||
await db.rollback()
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# 确保 AUTO_INCREMENT 值从3开始(ID=1是BanchoBot,ID=2预留给ppy)
|
||||
await db.execute(text("ALTER TABLE users AUTO_INCREMENT = 3"))
|
||||
await db.commit()
|
||||
|
||||
# 重新创建用户
|
||||
new_user = DBUser(
|
||||
name=user_username,
|
||||
safe_name=user_username.lower(),
|
||||
email=user_email,
|
||||
pw_bcrypt=get_password_hash(user_password),
|
||||
priv=1,
|
||||
country="CN",
|
||||
creation_time=int(time.time()),
|
||||
latest_activity=int(time.time()),
|
||||
preferred_mode=0,
|
||||
play_style=0,
|
||||
)
|
||||
|
||||
db.add(new_user)
|
||||
await db.commit()
|
||||
await db.refresh(new_user)
|
||||
user_id = new_user.id
|
||||
|
||||
# 最终检查ID是否有效
|
||||
if user_id <= 2:
|
||||
await db.rollback()
|
||||
errors = RegistrationRequestErrors(
|
||||
message=(
|
||||
"Failed to create account with valid ID. "
|
||||
"Please contact support."
|
||||
)
|
||||
)
|
||||
return JSONResponse(
|
||||
status_code=500, content={"form_error": errors.model_dump()}
|
||||
)
|
||||
|
||||
except Exception as fix_error:
|
||||
await db.rollback()
|
||||
print(f"Failed to fix AUTO_INCREMENT: {fix_error}")
|
||||
errors = RegistrationRequestErrors(
|
||||
message="Failed to create account with valid ID. Please try again."
|
||||
)
|
||||
return JSONResponse(
|
||||
status_code=500, content={"form_error": errors.model_dump()}
|
||||
)
|
||||
|
||||
# 创建默认的 lazer_profile
|
||||
from app.database.user import LazerUserProfile
|
||||
|
||||
lazer_profile = LazerUserProfile(
|
||||
user_id=user_id,
|
||||
is_active=True,
|
||||
is_bot=False,
|
||||
is_deleted=False,
|
||||
is_online=True,
|
||||
is_supporter=False,
|
||||
is_restricted=False,
|
||||
session_verified=False,
|
||||
has_supported=False,
|
||||
pm_friends_only=False,
|
||||
default_group="default",
|
||||
join_date=datetime.utcnow(),
|
||||
playmode="osu",
|
||||
support_level=0,
|
||||
max_blocks=50,
|
||||
max_friends=250,
|
||||
post_count=0,
|
||||
)
|
||||
|
||||
db.add(lazer_profile)
|
||||
await db.commit()
|
||||
|
||||
# 返回成功响应
|
||||
return JSONResponse(
|
||||
status_code=201,
|
||||
content={"message": "Account created successfully", "user_id": user_id},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
# 打印详细错误信息用于调试
|
||||
print(f"Registration error: {e}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
# 返回通用错误
|
||||
errors = RegistrationRequestErrors(
|
||||
message="An error occurred while creating your account. Please try again."
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=500, content={"form_error": errors.model_dump()}
|
||||
)
|
||||
|
||||
|
||||
@router.post("/oauth/token", response_model=TokenResponse)
|
||||
async def oauth_token(
|
||||
grant_type: str = Form(...),
|
||||
@@ -53,9 +278,13 @@ async def oauth_token(
|
||||
):
|
||||
return create_oauth_error_response(
|
||||
error="invalid_client",
|
||||
description="Client authentication failed (e.g., unknown client, no client authentication included, or unsupported authentication method).",
|
||||
description=(
|
||||
"Client authentication failed (e.g., unknown client, "
|
||||
"no client authentication included, "
|
||||
"or unsupported authentication method)."
|
||||
),
|
||||
hint="Invalid client credentials",
|
||||
status_code=401
|
||||
status_code=401,
|
||||
)
|
||||
|
||||
if grant_type == "password":
|
||||
@@ -63,8 +292,12 @@ async def oauth_token(
|
||||
if not username or not password:
|
||||
return create_oauth_error_response(
|
||||
error="invalid_request",
|
||||
description="The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed.",
|
||||
hint="Username and password required"
|
||||
description=(
|
||||
"The request is missing a required parameter, includes an "
|
||||
"invalid parameter value, "
|
||||
"includes a parameter more than once, or is otherwise malformed."
|
||||
),
|
||||
hint="Username and password required",
|
||||
)
|
||||
|
||||
# 验证用户
|
||||
@@ -72,8 +305,14 @@ async def oauth_token(
|
||||
if not user:
|
||||
return create_oauth_error_response(
|
||||
error="invalid_grant",
|
||||
description="The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client.",
|
||||
hint="Incorrect sign in"
|
||||
description=(
|
||||
"The provided authorization grant (e.g., authorization code, "
|
||||
"resource owner credentials) "
|
||||
"or refresh token is invalid, expired, revoked, "
|
||||
"does not match the redirection URI used in "
|
||||
"the authorization request, or was issued to another client."
|
||||
),
|
||||
hint="Incorrect sign in",
|
||||
)
|
||||
|
||||
# 生成令牌
|
||||
@@ -105,8 +344,12 @@ async def oauth_token(
|
||||
if not refresh_token:
|
||||
return create_oauth_error_response(
|
||||
error="invalid_request",
|
||||
description="The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed.",
|
||||
hint="Refresh token required"
|
||||
description=(
|
||||
"The request is missing a required parameter, "
|
||||
"includes an invalid parameter value, "
|
||||
"includes a parameter more than once, or is otherwise malformed."
|
||||
),
|
||||
hint="Refresh token required",
|
||||
)
|
||||
|
||||
# 验证刷新令牌
|
||||
@@ -114,8 +357,14 @@ async def oauth_token(
|
||||
if not token_record:
|
||||
return create_oauth_error_response(
|
||||
error="invalid_grant",
|
||||
description="The provided authorization grant (e.g., authorization code, resource owner credentials) or refresh token is invalid, expired, revoked, does not match the redirection URI used in the authorization request, or was issued to another client.",
|
||||
hint="Invalid refresh token"
|
||||
description=(
|
||||
"The provided authorization grant (e.g., authorization code, "
|
||||
"resource owner credentials) or refresh token is "
|
||||
"invalid, expired, revoked, "
|
||||
"does not match the redirection URI used "
|
||||
"in the authorization request, or was issued to another client."
|
||||
),
|
||||
hint="Invalid refresh token",
|
||||
)
|
||||
|
||||
# 生成新的访问令牌
|
||||
@@ -145,6 +394,9 @@ async def oauth_token(
|
||||
else:
|
||||
return create_oauth_error_response(
|
||||
error="unsupported_grant_type",
|
||||
description="The authorization grant type is not supported by the authorization server.",
|
||||
hint="Unsupported grant type"
|
||||
description=(
|
||||
"The authorization grant type is not supported "
|
||||
"by the authorization server."
|
||||
),
|
||||
hint="Unsupported grant type",
|
||||
)
|
||||
|
||||
@@ -16,7 +16,10 @@ from app.dependencies.user import get_current_user
|
||||
from app.fetcher import Fetcher
|
||||
from app.models.beatmap import BeatmapAttributes
|
||||
from app.models.mods import APIMod, int_to_mods
|
||||
from app.models.score import INT_TO_MODE, GameMode
|
||||
from app.models.score import (
|
||||
INT_TO_MODE,
|
||||
GameMode,
|
||||
)
|
||||
from app.utils import calculate_beatmap_attribute
|
||||
|
||||
from .api_router import router
|
||||
@@ -31,6 +34,31 @@ from sqlmodel import col, select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
|
||||
@router.get("/beatmaps/lookup", tags=["beatmap"], response_model=BeatmapResp)
|
||||
async def lookup_beatmap(
|
||||
id: int | None = Query(default=None, alias="id"),
|
||||
md5: str | None = Query(default=None, alias="checksum"),
|
||||
filename: str | None = Query(default=None, alias="filename"),
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
fetcher: Fetcher = Depends(get_fetcher),
|
||||
):
|
||||
if id is None and md5 is None and filename is None:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="At least one of 'id', 'checksum', or 'filename' must be provided.",
|
||||
)
|
||||
try:
|
||||
beatmap = await Beatmap.get_or_fetch(db, fetcher, bid=id, md5=md5)
|
||||
except HTTPError:
|
||||
raise HTTPException(status_code=404, detail="Beatmap not found")
|
||||
|
||||
if beatmap is None:
|
||||
raise HTTPException(status_code=404, detail="Beatmap not found")
|
||||
|
||||
return BeatmapResp.from_db(beatmap)
|
||||
|
||||
|
||||
@router.get("/beatmaps/{bid}", tags=["beatmap"], response_model=BeatmapResp)
|
||||
async def get_beatmap(
|
||||
bid: int,
|
||||
@@ -39,7 +67,7 @@ async def get_beatmap(
|
||||
fetcher: Fetcher = Depends(get_fetcher),
|
||||
):
|
||||
try:
|
||||
beatmap = await Beatmap.get_or_fetch(db, bid, fetcher)
|
||||
beatmap = await Beatmap.get_or_fetch(db, fetcher, bid)
|
||||
return BeatmapResp.from_db(beatmap)
|
||||
except HTTPError:
|
||||
raise HTTPException(status_code=404, detail="Beatmap not found")
|
||||
@@ -119,7 +147,7 @@ async def get_beatmap_attributes(
|
||||
if ruleset_id is not None and ruleset is None:
|
||||
ruleset = INT_TO_MODE[ruleset_id]
|
||||
if ruleset is None:
|
||||
beatmap_db = await Beatmap.get_or_fetch(db, beatmap, fetcher)
|
||||
beatmap_db = await Beatmap.get_or_fetch(db, fetcher, beatmap)
|
||||
ruleset = beatmap_db.mode
|
||||
key = (
|
||||
f"beatmap:{beatmap}:{ruleset}:"
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from app.database import User as DBUser
|
||||
from app.database.relationship import Relationship, RelationshipResp, RelationshipType
|
||||
from app.dependencies.database import get_db
|
||||
@@ -9,21 +7,23 @@ from app.dependencies.user import get_current_user
|
||||
|
||||
from .api_router import router
|
||||
|
||||
from fastapi import Depends, HTTPException, Query
|
||||
from fastapi import Depends, HTTPException, Query, Request
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
|
||||
@router.get("/{type}", tags=["relationship"], response_model=list[RelationshipResp])
|
||||
@router.get("/friends", tags=["relationship"], response_model=list[RelationshipResp])
|
||||
@router.get("/blocks", tags=["relationship"], response_model=list[RelationshipResp])
|
||||
async def get_relationship(
|
||||
type: Literal["friends", "blocks"],
|
||||
request: Request,
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if type == "friends":
|
||||
relationship_type = RelationshipType.FOLLOW
|
||||
else:
|
||||
relationship_type = RelationshipType.BLOCK
|
||||
relationship_type = (
|
||||
RelationshipType.FOLLOW
|
||||
if request.url.path.endswith("/friends")
|
||||
else RelationshipType.BLOCK
|
||||
)
|
||||
relationships = await db.exec(
|
||||
select(Relationship).where(
|
||||
Relationship.user_id == current_user.id,
|
||||
@@ -33,17 +33,19 @@ async def get_relationship(
|
||||
return [await RelationshipResp.from_db(db, rel) for rel in relationships]
|
||||
|
||||
|
||||
@router.post("/{type}", tags=["relationship"], response_model=RelationshipResp)
|
||||
@router.post("/friends", tags=["relationship"], response_model=RelationshipResp)
|
||||
@router.post("/blocks", tags=["relationship"])
|
||||
async def add_relationship(
|
||||
type: Literal["friends", "blocks"],
|
||||
request: Request,
|
||||
target: int = Query(),
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if type == "blocks":
|
||||
relationship_type = RelationshipType.BLOCK
|
||||
else:
|
||||
relationship_type = RelationshipType.FOLLOW
|
||||
relationship_type = (
|
||||
RelationshipType.FOLLOW
|
||||
if request.url.path.endswith("/friends")
|
||||
else RelationshipType.BLOCK
|
||||
)
|
||||
if target == current_user.id:
|
||||
raise HTTPException(422, "Cannot add relationship to yourself")
|
||||
relationship = (
|
||||
@@ -78,18 +80,22 @@ async def add_relationship(
|
||||
await db.delete(target_relationship)
|
||||
await db.commit()
|
||||
await db.refresh(relationship)
|
||||
return await RelationshipResp.from_db(db, relationship)
|
||||
if relationship.type == RelationshipType.FOLLOW:
|
||||
return await RelationshipResp.from_db(db, relationship)
|
||||
|
||||
|
||||
@router.delete("/{type}/{target}", tags=["relationship"])
|
||||
@router.delete("/friends/{target}", tags=["relationship"])
|
||||
@router.delete("/blocks/{target}", tags=["relationship"])
|
||||
async def delete_relationship(
|
||||
type: Literal["friends", "blocks"],
|
||||
request: Request,
|
||||
target: int,
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
relationship_type = (
|
||||
RelationshipType.BLOCK if type == "blocks" else RelationshipType.FOLLOW
|
||||
RelationshipType.BLOCK
|
||||
if "/blocks/" in request.url.path
|
||||
else RelationshipType.FOLLOW
|
||||
)
|
||||
relationship = (
|
||||
await db.exec(
|
||||
|
||||
@@ -1,20 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
|
||||
from app.database import (
|
||||
Beatmap,
|
||||
User as DBUser,
|
||||
)
|
||||
from app.database.beatmapset import Beatmapset
|
||||
from app.database.score import Score, ScoreResp
|
||||
from app.database.score_token import ScoreToken, ScoreTokenResp
|
||||
from app.dependencies.database import get_db
|
||||
from app.dependencies.user import get_current_user
|
||||
from app.models.score import (
|
||||
INT_TO_MODE,
|
||||
GameMode,
|
||||
HitResult,
|
||||
Rank,
|
||||
SoloScoreSubmissionInfo,
|
||||
)
|
||||
|
||||
from .api_router import router
|
||||
|
||||
from fastapi import Depends, HTTPException, Query
|
||||
from fastapi import Depends, Form, HTTPException, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlmodel import col, select
|
||||
from sqlmodel import col, select, true
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
|
||||
@@ -29,7 +37,7 @@ class BeatmapScores(BaseModel):
|
||||
async def get_beatmap_scores(
|
||||
beatmap: int,
|
||||
legacy_only: bool = Query(None), # TODO:加入对这个参数的查询
|
||||
mode: str = Query(None),
|
||||
mode: GameMode | None = Query(None),
|
||||
# mods: List[APIMod] = Query(None), # TODO:加入指定MOD的查询
|
||||
type: str = Query(None),
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
@@ -42,29 +50,28 @@ async def get_beatmap_scores(
|
||||
|
||||
all_scores = (
|
||||
await db.exec(
|
||||
select(Score).where(Score.beatmap_id == beatmap)
|
||||
# .where(Score.mods == mods if mods else True)
|
||||
Score.select_clause_unique(
|
||||
Score.beatmap_id == beatmap,
|
||||
col(Score.passed).is_(True),
|
||||
Score.gamemode == mode if mode is not None else true(),
|
||||
)
|
||||
)
|
||||
).all()
|
||||
|
||||
user_score = (
|
||||
await db.exec(
|
||||
select(Score)
|
||||
.options(
|
||||
joinedload(Score.beatmap) # pyright: ignore[reportArgumentType]
|
||||
.joinedload(Beatmap.beatmapset) # pyright: ignore[reportArgumentType]
|
||||
.selectinload(
|
||||
Beatmapset.beatmaps # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
Score.select_clause_unique(
|
||||
Score.beatmap_id == beatmap,
|
||||
Score.user_id == current_user.id,
|
||||
col(Score.passed).is_(True),
|
||||
Score.gamemode == mode if mode is not None else true(),
|
||||
)
|
||||
.where(Score.beatmap_id == beatmap)
|
||||
.where(Score.user_id == current_user.id)
|
||||
)
|
||||
).first()
|
||||
|
||||
return BeatmapScores(
|
||||
scores=[ScoreResp.from_db(score) for score in all_scores],
|
||||
userScore=ScoreResp.from_db(user_score) if user_score else None,
|
||||
scores=[await ScoreResp.from_db(db, score) for score in all_scores],
|
||||
userScore=await ScoreResp.from_db(db, user_score) if user_score else None,
|
||||
)
|
||||
|
||||
|
||||
@@ -93,18 +100,13 @@ async def get_user_beatmap_score(
|
||||
)
|
||||
user_score = (
|
||||
await db.exec(
|
||||
select(Score)
|
||||
.options(
|
||||
joinedload(Score.beatmap) # pyright: ignore[reportArgumentType]
|
||||
.joinedload(Beatmap.beatmapset) # pyright: ignore[reportArgumentType]
|
||||
.selectinload(
|
||||
Beatmapset.beatmaps # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
Score.select_clause()
|
||||
.where(
|
||||
Score.gamemode == mode if mode is not None else True,
|
||||
Score.beatmap_id == beatmap,
|
||||
Score.user_id == user,
|
||||
)
|
||||
.where(Score.gamemode == mode if mode is not None else True)
|
||||
.where(Score.beatmap_id == beatmap)
|
||||
.where(Score.user_id == user)
|
||||
.order_by(col(Score.classic_total_score).desc())
|
||||
.order_by(col(Score.total_score).desc())
|
||||
)
|
||||
).first()
|
||||
|
||||
@@ -115,7 +117,7 @@ async def get_user_beatmap_score(
|
||||
else:
|
||||
return BeatmapUserScore(
|
||||
position=user_score.position if user_score.position is not None else 0,
|
||||
score=ScoreResp.from_db(user_score),
|
||||
score=await ScoreResp.from_db(db, user_score),
|
||||
)
|
||||
|
||||
|
||||
@@ -138,19 +140,114 @@ async def get_user_all_beatmap_scores(
|
||||
)
|
||||
all_user_scores = (
|
||||
await db.exec(
|
||||
select(Score)
|
||||
.options(
|
||||
joinedload(Score.beatmap) # pyright: ignore[reportArgumentType]
|
||||
.joinedload(Beatmap.beatmapset) # pyright: ignore[reportArgumentType]
|
||||
.selectinload(
|
||||
Beatmapset.beatmaps # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
Score.select_clause()
|
||||
.where(
|
||||
Score.gamemode == ruleset if ruleset is not None else True,
|
||||
Score.beatmap_id == beatmap,
|
||||
Score.user_id == user,
|
||||
)
|
||||
.where(Score.gamemode == ruleset if ruleset is not None else True)
|
||||
.where(Score.beatmap_id == beatmap)
|
||||
.where(Score.user_id == user)
|
||||
.order_by(col(Score.classic_total_score).desc())
|
||||
)
|
||||
).all()
|
||||
|
||||
return [ScoreResp.from_db(score) for score in all_user_scores]
|
||||
return [await ScoreResp.from_db(db, score) for score in all_user_scores]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/beatmaps/{beatmap}/solo/scores", tags=["beatmap"], response_model=ScoreTokenResp
|
||||
)
|
||||
async def create_solo_score(
|
||||
beatmap: int,
|
||||
version_hash: str = Form(""),
|
||||
beatmap_hash: str = Form(),
|
||||
ruleset_id: int = Form(..., ge=0, le=3),
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
async with db:
|
||||
score_token = ScoreToken(
|
||||
user_id=current_user.id,
|
||||
beatmap_id=beatmap,
|
||||
ruleset_id=INT_TO_MODE[ruleset_id],
|
||||
)
|
||||
db.add(score_token)
|
||||
await db.commit()
|
||||
await db.refresh(score_token)
|
||||
return ScoreTokenResp.from_db(score_token)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/beatmaps/{beatmap}/solo/scores/{token}",
|
||||
tags=["beatmap"],
|
||||
response_model=ScoreResp,
|
||||
)
|
||||
async def submit_solo_score(
|
||||
beatmap: int,
|
||||
token: int,
|
||||
info: SoloScoreSubmissionInfo,
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not info.passed:
|
||||
info.rank = Rank.F
|
||||
async with db:
|
||||
score_token = (
|
||||
await db.exec(
|
||||
select(ScoreToken)
|
||||
.options(joinedload(ScoreToken.beatmap)) # pyright: ignore[reportArgumentType]
|
||||
.where(ScoreToken.id == token, ScoreToken.user_id == current_user.id)
|
||||
)
|
||||
).first()
|
||||
if not score_token or score_token.user_id != current_user.id:
|
||||
raise HTTPException(status_code=404, detail="Score token not found")
|
||||
if score_token.score_id:
|
||||
score = (
|
||||
await db.exec(
|
||||
select(Score)
|
||||
.options(joinedload(Score.beatmap)) # pyright: ignore[reportArgumentType]
|
||||
.where(
|
||||
Score.id == score_token.score_id,
|
||||
Score.user_id == current_user.id,
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not score:
|
||||
raise HTTPException(status_code=404, detail="Score not found")
|
||||
else:
|
||||
score = Score(
|
||||
accuracy=info.accuracy,
|
||||
max_combo=info.max_combo,
|
||||
# maximum_statistics=info.maximum_statistics,
|
||||
mods=info.mods,
|
||||
passed=info.passed,
|
||||
rank=info.rank,
|
||||
total_score=info.total_score,
|
||||
total_score_without_mods=info.total_score_without_mods,
|
||||
beatmap_id=beatmap,
|
||||
ended_at=datetime.datetime.now(datetime.UTC),
|
||||
gamemode=INT_TO_MODE[info.ruleset_id],
|
||||
started_at=score_token.created_at,
|
||||
user_id=current_user.id,
|
||||
preserve=info.passed,
|
||||
map_md5=score_token.beatmap.checksum,
|
||||
has_replay=False,
|
||||
pp=info.pp,
|
||||
type="solo",
|
||||
n300=info.statistics.get(HitResult.GREAT, 0),
|
||||
n100=info.statistics.get(HitResult.OK, 0),
|
||||
n50=info.statistics.get(HitResult.MEH, 0),
|
||||
nmiss=info.statistics.get(HitResult.MISS, 0),
|
||||
ngeki=info.statistics.get(HitResult.PERFECT, 0),
|
||||
nkatu=info.statistics.get(HitResult.GOOD, 0),
|
||||
)
|
||||
db.add(score)
|
||||
await db.commit()
|
||||
await db.refresh(score)
|
||||
score_id = score.id
|
||||
score_token.score_id = score_id
|
||||
await db.commit()
|
||||
score = (
|
||||
await db.exec(Score.select_clause().where(Score.id == score_id))
|
||||
).first()
|
||||
assert score is not None
|
||||
return await ScoreResp.from_db(db, score)
|
||||
|
||||
@@ -1,211 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from app.config import settings
|
||||
from app.router.signalr.exception import InvokeException
|
||||
from app.router.signalr.packet import (
|
||||
PacketType,
|
||||
ResultKind,
|
||||
encode_varint,
|
||||
parse_packet,
|
||||
)
|
||||
from app.router.signalr.store import ResultStore
|
||||
from app.router.signalr.utils import get_signature
|
||||
|
||||
from fastapi import WebSocket
|
||||
import msgpack
|
||||
from pydantic import BaseModel
|
||||
from starlette.websockets import WebSocketDisconnect
|
||||
|
||||
|
||||
class Client:
|
||||
def __init__(
|
||||
self, connection_id: str, connection_token: str, connection: WebSocket
|
||||
) -> None:
|
||||
self.connection_id = connection_id
|
||||
self.connection_token = connection_token
|
||||
self.connection = connection
|
||||
self._listen_task: asyncio.Task | None = None
|
||||
self._ping_task: asyncio.Task | None = None
|
||||
self._store = ResultStore()
|
||||
|
||||
async def send_packet(self, type: PacketType, packet: list[Any]):
|
||||
packet.insert(0, type.value)
|
||||
payload = msgpack.packb(packet)
|
||||
length = encode_varint(len(payload))
|
||||
await self.connection.send_bytes(length + payload)
|
||||
|
||||
async def _ping(self):
|
||||
while True:
|
||||
try:
|
||||
await self.send_packet(PacketType.PING, [])
|
||||
await asyncio.sleep(settings.SIGNALR_PING_INTERVAL)
|
||||
except WebSocketDisconnect:
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"Error in ping task for {self.connection_id}: {e}")
|
||||
break
|
||||
|
||||
|
||||
class Hub:
|
||||
def __init__(self) -> None:
|
||||
self.clients: dict[str, Client] = {}
|
||||
self.waited_clients: dict[str, int] = {}
|
||||
self.tasks: set[asyncio.Task] = set()
|
||||
|
||||
def add_waited_client(self, connection_token: str, timestamp: int) -> None:
|
||||
self.waited_clients[connection_token] = timestamp
|
||||
|
||||
def add_client(
|
||||
self, connection_id: str, connection_token: str, connection: WebSocket
|
||||
) -> Client:
|
||||
if connection_token in self.clients:
|
||||
raise ValueError(
|
||||
f"Client with connection token {connection_token} already exists."
|
||||
)
|
||||
if connection_token in self.waited_clients:
|
||||
if (
|
||||
self.waited_clients[connection_token]
|
||||
< time.time() - settings.SIGNALR_NEGOTIATE_TIMEOUT
|
||||
):
|
||||
raise TimeoutError(f"Connection {connection_id} has waited too long.")
|
||||
del self.waited_clients[connection_token]
|
||||
client = Client(connection_id, connection_token, connection)
|
||||
self.clients[connection_token] = client
|
||||
task = asyncio.create_task(client._ping())
|
||||
self.tasks.add(task)
|
||||
client._ping_task = task
|
||||
return client
|
||||
|
||||
async def remove_client(self, connection_id: str) -> None:
|
||||
if client := self.clients.get(connection_id):
|
||||
del self.clients[connection_id]
|
||||
if client._listen_task:
|
||||
client._listen_task.cancel()
|
||||
if client._ping_task:
|
||||
client._ping_task.cancel()
|
||||
await client.connection.close()
|
||||
|
||||
async def send_packet(self, client: Client, type: PacketType, packet: list[Any]):
|
||||
await client.send_packet(type, packet)
|
||||
|
||||
async def _listen_client(self, client: Client) -> None:
|
||||
jump = False
|
||||
while not jump:
|
||||
try:
|
||||
message = await client.connection.receive_bytes()
|
||||
packet_type, packet_data = parse_packet(message)
|
||||
task = asyncio.create_task(
|
||||
self._handle_packet(client, packet_type, packet_data)
|
||||
)
|
||||
self.tasks.add(task)
|
||||
task.add_done_callback(self.tasks.discard)
|
||||
except WebSocketDisconnect as e:
|
||||
if e.code == 1005:
|
||||
continue
|
||||
print(
|
||||
f"Client {client.connection_id} disconnected: {e.code}, {e.reason}"
|
||||
)
|
||||
jump = True
|
||||
except Exception as e:
|
||||
print(f"Error in client {client.connection_id}: {e}")
|
||||
jump = True
|
||||
await self.remove_client(client.connection_id)
|
||||
|
||||
async def _handle_packet(
|
||||
self, client: Client, type: PacketType, packet: list[Any]
|
||||
) -> None:
|
||||
match type:
|
||||
case PacketType.PING:
|
||||
...
|
||||
case PacketType.INVOCATION:
|
||||
invocation_id: str | None = packet[1] # pyright: ignore[reportRedeclaration]
|
||||
target: str = packet[2]
|
||||
args: list[Any] | None = packet[3]
|
||||
if args is None:
|
||||
args = []
|
||||
# streams: list[str] | None = packet[4] # TODO: stream support
|
||||
code = ResultKind.VOID
|
||||
result = None
|
||||
try:
|
||||
result = await self.invoke_method(client, target, args)
|
||||
if result is not None:
|
||||
code = ResultKind.HAS_VALUE
|
||||
except InvokeException as e:
|
||||
code = ResultKind.ERROR
|
||||
result = e.message
|
||||
|
||||
except Exception as e:
|
||||
code = ResultKind.ERROR
|
||||
result = str(e)
|
||||
|
||||
packet = [
|
||||
{}, # header
|
||||
invocation_id,
|
||||
code.value,
|
||||
]
|
||||
if result is not None:
|
||||
packet.append(result)
|
||||
if invocation_id is not None:
|
||||
await client.send_packet(
|
||||
PacketType.COMPLETION,
|
||||
packet,
|
||||
)
|
||||
case PacketType.COMPLETION:
|
||||
invocation_id: str = packet[1]
|
||||
code: ResultKind = ResultKind(packet[2])
|
||||
result: Any = packet[3] if len(packet) > 3 else None
|
||||
client._store.add_result(invocation_id, code, result)
|
||||
|
||||
async def invoke_method(self, client: Client, method: str, args: list[Any]) -> Any:
|
||||
method_ = getattr(self, method, None)
|
||||
call_params = []
|
||||
if not method_:
|
||||
raise InvokeException(f"Method '{method}' not found in hub.")
|
||||
signature = get_signature(method_)
|
||||
for name, param in signature.parameters.items():
|
||||
if name == "self" or param.annotation is Client:
|
||||
continue
|
||||
if issubclass(param.annotation, BaseModel):
|
||||
call_params.append(param.annotation.model_validate(args.pop(0)))
|
||||
else:
|
||||
call_params.append(args.pop(0))
|
||||
return await method_(client, *call_params)
|
||||
|
||||
async def call(self, client: Client, method: str, *args: Any) -> Any:
|
||||
invocation_id = client._store.get_invocation_id()
|
||||
await client.send_packet(
|
||||
PacketType.INVOCATION,
|
||||
[
|
||||
{}, # header
|
||||
invocation_id,
|
||||
method,
|
||||
list(args),
|
||||
None, # streams
|
||||
],
|
||||
)
|
||||
r = await client._store.fetch(invocation_id, None)
|
||||
if r[0] == ResultKind.HAS_VALUE:
|
||||
return r[1]
|
||||
if r[0] == ResultKind.ERROR:
|
||||
raise InvokeException(r[1])
|
||||
return None
|
||||
|
||||
async def call_noblock(self, client: Client, method: str, *args: Any) -> None:
|
||||
await client.send_packet(
|
||||
PacketType.INVOCATION,
|
||||
[
|
||||
{}, # header
|
||||
None, # invocation_id
|
||||
method,
|
||||
list(args),
|
||||
None, # streams
|
||||
],
|
||||
)
|
||||
return None
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
return item in self.clients or item in self.waited_clients
|
||||
@@ -1,6 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .hub import Hub
|
||||
|
||||
|
||||
class MetadataHub(Hub): ...
|
||||
@@ -1,15 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from app.models.spectator_hub import FrameDataBundle, SpectatorState
|
||||
|
||||
from .hub import Client, Hub
|
||||
|
||||
|
||||
class SpectatorHub(Hub):
|
||||
async def BeginPlaySession(
|
||||
self, client: Client, score_token: int, state: SpectatorState
|
||||
) -> None: ...
|
||||
|
||||
async def SendFrameData(
|
||||
self, client: Client, frame_data: FrameDataBundle
|
||||
) -> None: ...
|
||||
@@ -1,56 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntEnum
|
||||
from typing import Any
|
||||
|
||||
import msgpack
|
||||
|
||||
SEP = b"\x1e"
|
||||
|
||||
|
||||
class PacketType(IntEnum):
|
||||
INVOCATION = 1
|
||||
STREAM_ITEM = 2
|
||||
COMPLETION = 3
|
||||
STREAM_INVOCATION = 4
|
||||
CANCEL_INVOCATION = 5
|
||||
PING = 6
|
||||
CLOSE = 7
|
||||
|
||||
|
||||
class ResultKind(IntEnum):
|
||||
ERROR = 1
|
||||
VOID = 2
|
||||
HAS_VALUE = 3
|
||||
|
||||
|
||||
def parse_packet(data: bytes) -> tuple[PacketType, list[Any]]:
|
||||
length, offset = decode_varint(data)
|
||||
message_data = data[offset : offset + length]
|
||||
unpacked = msgpack.unpackb(message_data, raw=False)
|
||||
return PacketType(unpacked[0]), unpacked[1:]
|
||||
|
||||
|
||||
def encode_varint(value: int) -> bytes:
|
||||
result = []
|
||||
while value >= 0x80:
|
||||
result.append((value & 0x7F) | 0x80)
|
||||
value >>= 7
|
||||
result.append(value & 0x7F)
|
||||
return bytes(result)
|
||||
|
||||
|
||||
def decode_varint(data: bytes, offset: int = 0) -> tuple[int, int]:
|
||||
result = 0
|
||||
shift = 0
|
||||
pos = offset
|
||||
|
||||
while pos < len(data):
|
||||
byte = data[pos]
|
||||
result |= (byte & 0x7F) << shift
|
||||
pos += 1
|
||||
if (byte & 0x80) == 0:
|
||||
break
|
||||
shift += 7
|
||||
|
||||
return result, pos
|
||||
75
app/router/user.py
Normal file
75
app/router/user.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from app.database import (
|
||||
User as DBUser,
|
||||
)
|
||||
from app.dependencies import get_current_user
|
||||
from app.dependencies.database import get_db
|
||||
from app.models.score import INT_TO_MODE
|
||||
from app.models.user import (
|
||||
User as ApiUser,
|
||||
)
|
||||
from app.utils import convert_db_user_to_api_user
|
||||
|
||||
from .api_router import router
|
||||
|
||||
from fastapi import Depends, HTTPException, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
from sqlmodel.sql.expression import col
|
||||
|
||||
|
||||
@router.get("/users/{user}/{ruleset}", response_model=ApiUser)
|
||||
@router.get("/users/{user}", response_model=ApiUser)
|
||||
async def get_user_info_default(
|
||||
user: str,
|
||||
ruleset: Literal["osu", "taiko", "fruits", "mania"] = "osu",
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
session: AsyncSession = Depends(get_db),
|
||||
):
|
||||
searched_user = (
|
||||
await session.exec(
|
||||
DBUser.all_select_clause().where(
|
||||
DBUser.id == int(user)
|
||||
if user.isdigit()
|
||||
else DBUser.name == user.removeprefix("@")
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if not searched_user:
|
||||
raise HTTPException(404, detail="User not found")
|
||||
return await convert_db_user_to_api_user(searched_user, ruleset=ruleset)
|
||||
|
||||
|
||||
class BatchUserResponse(BaseModel):
|
||||
users: list[ApiUser]
|
||||
|
||||
|
||||
@router.get("/users", response_model=BatchUserResponse)
|
||||
@router.get("/users/lookup", response_model=BatchUserResponse)
|
||||
async def get_users(
|
||||
user_ids: list[int] = Query(default_factory=list, alias="ids[]"),
|
||||
include_variant_statistics: bool = Query(default=False), # TODO
|
||||
current_user: DBUser = Depends(get_current_user),
|
||||
session: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if user_ids:
|
||||
searched_users = (
|
||||
await session.exec(
|
||||
DBUser.all_select_clause().limit(50).where(col(DBUser.id).in_(user_ids))
|
||||
)
|
||||
).all()
|
||||
else:
|
||||
searched_users = (
|
||||
await session.exec(DBUser.all_select_clause().limit(50))
|
||||
).all()
|
||||
return BatchUserResponse(
|
||||
users=[
|
||||
await convert_db_user_to_api_user(
|
||||
searched_user, ruleset=INT_TO_MODE[current_user.preferred_mode].value
|
||||
)
|
||||
for searched_user in searched_users
|
||||
]
|
||||
)
|
||||
301
app/signalr/hub/hub.py
Normal file
301
app/signalr/hub/hub.py
Normal file
@@ -0,0 +1,301 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from app.config import settings
|
||||
from app.log import logger
|
||||
from app.models.signalr import UserState
|
||||
from app.signalr.exception import InvokeException
|
||||
from app.signalr.packet import (
|
||||
ClosePacket,
|
||||
CompletionPacket,
|
||||
InvocationPacket,
|
||||
Packet,
|
||||
PingPacket,
|
||||
Protocol,
|
||||
)
|
||||
from app.signalr.store import ResultStore
|
||||
from app.signalr.utils import get_signature
|
||||
|
||||
from fastapi import WebSocket
|
||||
from pydantic import BaseModel
|
||||
from starlette.websockets import WebSocketDisconnect
|
||||
|
||||
|
||||
class CloseConnection(Exception):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Connection closed",
|
||||
allow_reconnect: bool = False,
|
||||
from_client: bool = False,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.allow_reconnect = allow_reconnect
|
||||
self.from_client = from_client
|
||||
|
||||
|
||||
class Client:
|
||||
def __init__(
|
||||
self,
|
||||
connection_id: str,
|
||||
connection_token: str,
|
||||
connection: WebSocket,
|
||||
protocol: Protocol,
|
||||
) -> None:
|
||||
self.connection_id = connection_id
|
||||
self.connection_token = connection_token
|
||||
self.connection = connection
|
||||
self.procotol = protocol
|
||||
self._listen_task: asyncio.Task | None = None
|
||||
self._ping_task: asyncio.Task | None = None
|
||||
self._store = ResultStore()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.connection_token)
|
||||
|
||||
@property
|
||||
def user_id(self) -> int:
|
||||
return int(self.connection_id)
|
||||
|
||||
async def send_packet(self, packet: Packet):
|
||||
await self.connection.send_bytes(self.procotol.encode(packet))
|
||||
|
||||
async def receive_packets(self) -> list[Packet]:
|
||||
message = await self.connection.receive()
|
||||
d = message.get("bytes") or message.get("text", "").encode()
|
||||
if not d:
|
||||
return []
|
||||
return self.procotol.decode(d)
|
||||
|
||||
async def _ping(self):
|
||||
while True:
|
||||
try:
|
||||
await self.send_packet(PingPacket())
|
||||
await asyncio.sleep(settings.SIGNALR_PING_INTERVAL)
|
||||
except WebSocketDisconnect:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ping task for {self.connection_id}: {e}")
|
||||
break
|
||||
|
||||
|
||||
class Hub[TState: UserState]:
|
||||
def __init__(self) -> None:
|
||||
self.clients: dict[str, Client] = {}
|
||||
self.waited_clients: dict[str, int] = {}
|
||||
self.tasks: set[asyncio.Task] = set()
|
||||
self.groups: dict[str, set[Client]] = {}
|
||||
self.state: dict[int, TState] = {}
|
||||
|
||||
def add_waited_client(self, connection_token: str, timestamp: int) -> None:
|
||||
self.waited_clients[connection_token] = timestamp
|
||||
|
||||
def get_client_by_id(self, id: str, default: Any = None) -> Client:
|
||||
for client in self.clients.values():
|
||||
if client.connection_id == id:
|
||||
return client
|
||||
return default
|
||||
|
||||
@abstractmethod
|
||||
def create_state(self, client: Client) -> TState:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_or_create_state(self, client: Client) -> TState:
|
||||
if (state := self.state.get(client.user_id)) is not None:
|
||||
return state
|
||||
state = self.create_state(client)
|
||||
self.state[client.user_id] = state
|
||||
return state
|
||||
|
||||
def add_to_group(self, client: Client, group_id: str) -> None:
|
||||
self.groups.setdefault(group_id, set()).add(client)
|
||||
|
||||
def remove_from_group(self, client: Client, group_id: str) -> None:
|
||||
if group_id in self.groups:
|
||||
self.groups[group_id].discard(client)
|
||||
|
||||
async def add_client(
|
||||
self,
|
||||
connection_id: str,
|
||||
connection_token: str,
|
||||
protocol: Protocol,
|
||||
connection: WebSocket,
|
||||
) -> Client:
|
||||
if connection_token in self.clients:
|
||||
raise ValueError(
|
||||
f"Client with connection token {connection_token} already exists."
|
||||
)
|
||||
if connection_token in self.waited_clients:
|
||||
if (
|
||||
self.waited_clients[connection_token]
|
||||
< time.time() - settings.SIGNALR_NEGOTIATE_TIMEOUT
|
||||
):
|
||||
raise TimeoutError(f"Connection {connection_id} has waited too long.")
|
||||
del self.waited_clients[connection_token]
|
||||
client = Client(connection_id, connection_token, connection, protocol)
|
||||
self.clients[connection_token] = client
|
||||
task = asyncio.create_task(client._ping())
|
||||
self.tasks.add(task)
|
||||
client._ping_task = task
|
||||
return client
|
||||
|
||||
async def remove_client(self, client: Client) -> None:
|
||||
del self.clients[client.connection_token]
|
||||
if client._listen_task:
|
||||
client._listen_task.cancel()
|
||||
if client._ping_task:
|
||||
client._ping_task.cancel()
|
||||
for group in self.groups.values():
|
||||
group.discard(client)
|
||||
await self.clean_state(client, False)
|
||||
|
||||
@abstractmethod
|
||||
async def _clean_state(self, state: TState) -> None:
|
||||
return
|
||||
|
||||
async def clean_state(self, client: Client, disconnected: bool) -> None:
|
||||
if (state := self.state.get(client.user_id)) is None:
|
||||
return
|
||||
if disconnected and client.connection_token != state.connection_token:
|
||||
return
|
||||
try:
|
||||
await self._clean_state(state)
|
||||
except Exception:
|
||||
...
|
||||
|
||||
async def on_connect(self, client: Client) -> None:
|
||||
if method := getattr(self, "on_client_connect", None):
|
||||
await method(client)
|
||||
|
||||
async def send_packet(self, client: Client, packet: Packet) -> None:
|
||||
await client.send_packet(packet)
|
||||
|
||||
async def broadcast_call(self, method: str, *args: Any) -> None:
|
||||
tasks = []
|
||||
for client in self.clients.values():
|
||||
tasks.append(self.call_noblock(client, method, *args))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def broadcast_group_call(
|
||||
self, group_id: str, method: str, *args: Any
|
||||
) -> None:
|
||||
tasks = []
|
||||
for client in self.groups.get(group_id, []):
|
||||
tasks.append(self.call_noblock(client, method, *args))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def _listen_client(self, client: Client) -> None:
|
||||
try:
|
||||
while True:
|
||||
packets = await client.receive_packets()
|
||||
for packet in packets:
|
||||
if isinstance(packet, PingPacket):
|
||||
continue
|
||||
elif isinstance(packet, ClosePacket):
|
||||
raise CloseConnection(
|
||||
packet.error or "Connection closed by client",
|
||||
packet.allow_reconnect,
|
||||
True,
|
||||
)
|
||||
task = asyncio.create_task(self._handle_packet(client, packet))
|
||||
self.tasks.add(task)
|
||||
task.add_done_callback(self.tasks.discard)
|
||||
except WebSocketDisconnect as e:
|
||||
logger.info(
|
||||
f"Client {client.connection_id} disconnected: {e.code}, {e.reason}"
|
||||
)
|
||||
except RuntimeError as e:
|
||||
if "disconnect message" in str(e):
|
||||
logger.info(f"Client {client.connection_id} closed the connection.")
|
||||
else:
|
||||
logger.exception(f"RuntimeError in client {client.connection_id}: {e}")
|
||||
except CloseConnection as e:
|
||||
if not e.from_client:
|
||||
await client.send_packet(
|
||||
ClosePacket(error=e.message, allow_reconnect=e.allow_reconnect)
|
||||
)
|
||||
logger.info(
|
||||
f"Client {client.connection_id} closed the connection: {e.message}"
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"Error in client {client.connection_id}")
|
||||
|
||||
await self.remove_client(client)
|
||||
|
||||
async def _handle_packet(self, client: Client, packet: Packet) -> None:
|
||||
if isinstance(packet, PingPacket):
|
||||
return
|
||||
elif isinstance(packet, InvocationPacket):
|
||||
args = packet.arguments or []
|
||||
error = None
|
||||
result = None
|
||||
try:
|
||||
result = await self.invoke_method(client, packet.target, args)
|
||||
except InvokeException as e:
|
||||
error = e.message
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error invoking method {packet.target} for "
|
||||
f"client {client.connection_id}"
|
||||
)
|
||||
error = str(e)
|
||||
if packet.invocation_id is not None:
|
||||
await client.send_packet(
|
||||
CompletionPacket(
|
||||
invocation_id=packet.invocation_id,
|
||||
error=error,
|
||||
result=result,
|
||||
)
|
||||
)
|
||||
elif isinstance(packet, CompletionPacket):
|
||||
client._store.add_result(packet.invocation_id, packet.result, packet.error)
|
||||
|
||||
async def invoke_method(self, client: Client, method: str, args: list[Any]) -> Any:
|
||||
method_ = getattr(self, method, None)
|
||||
call_params = []
|
||||
if not method_:
|
||||
raise InvokeException(f"Method '{method}' not found in hub.")
|
||||
signature = get_signature(method_)
|
||||
for name, param in signature.parameters.items():
|
||||
if name == "self" or param.annotation is Client:
|
||||
continue
|
||||
if issubclass(param.annotation, BaseModel):
|
||||
call_params.append(param.annotation.model_validate(args.pop(0)))
|
||||
else:
|
||||
call_params.append(args.pop(0))
|
||||
return await method_(client, *call_params)
|
||||
|
||||
async def call(self, client: Client, method: str, *args: Any) -> Any:
|
||||
invocation_id = client._store.get_invocation_id()
|
||||
await client.send_packet(
|
||||
InvocationPacket(
|
||||
header={},
|
||||
invocation_id=invocation_id,
|
||||
target=method,
|
||||
arguments=list(args),
|
||||
stream_ids=None,
|
||||
)
|
||||
)
|
||||
r = await client._store.fetch(invocation_id, None)
|
||||
if r[1]:
|
||||
raise InvokeException(r[1])
|
||||
return r[0]
|
||||
|
||||
async def call_noblock(self, client: Client, method: str, *args: Any) -> None:
|
||||
await client.send_packet(
|
||||
InvocationPacket(
|
||||
header={},
|
||||
invocation_id=None,
|
||||
target=method,
|
||||
arguments=list(args),
|
||||
stream_ids=None,
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
return item in self.clients or item in self.waited_clients
|
||||
151
app/signalr/hub/metadata.py
Normal file
151
app/signalr/hub/metadata.py
Normal file
@@ -0,0 +1,151 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Coroutine
|
||||
from typing import override
|
||||
|
||||
from app.database.relationship import Relationship, RelationshipType
|
||||
from app.dependencies.database import engine
|
||||
from app.models.metadata_hub import MetadataClientState, OnlineStatus, UserActivity
|
||||
|
||||
from .hub import Client, Hub
|
||||
|
||||
from pydantic import TypeAdapter
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
ONLINE_PRESENCE_WATCHERS_GROUP = "metadata:online-presence-watchers"
|
||||
|
||||
|
||||
class MetadataHub(Hub[MetadataClientState]):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
@staticmethod
|
||||
def online_presence_watchers_group() -> str:
|
||||
return ONLINE_PRESENCE_WATCHERS_GROUP
|
||||
|
||||
def broadcast_tasks(
|
||||
self, user_id: int, store: MetadataClientState | None
|
||||
) -> set[Coroutine]:
|
||||
if store is not None and not store.pushable:
|
||||
return set()
|
||||
data = store.to_dict() if store else None
|
||||
return {
|
||||
self.broadcast_group_call(
|
||||
self.online_presence_watchers_group(),
|
||||
"UserPresenceUpdated",
|
||||
user_id,
|
||||
data,
|
||||
),
|
||||
self.broadcast_group_call(
|
||||
self.friend_presence_watchers_group(user_id),
|
||||
"FriendPresenceUpdated",
|
||||
user_id,
|
||||
data,
|
||||
),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def friend_presence_watchers_group(user_id: int):
|
||||
return f"metadata:friend-presence-watchers:{user_id}"
|
||||
|
||||
@override
|
||||
async def _clean_state(self, state: MetadataClientState) -> None:
|
||||
if state.pushable:
|
||||
await asyncio.gather(*self.broadcast_tasks(int(state.connection_id), None))
|
||||
|
||||
@override
|
||||
def create_state(self, client: Client) -> MetadataClientState:
|
||||
return MetadataClientState(
|
||||
connection_id=client.connection_id,
|
||||
connection_token=client.connection_token,
|
||||
)
|
||||
|
||||
async def on_client_connect(self, client: Client) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
self.get_or_create_state(client)
|
||||
|
||||
async with AsyncSession(engine) as session:
|
||||
async with session.begin():
|
||||
friends = (
|
||||
await session.exec(
|
||||
select(Relationship.target_id).where(
|
||||
Relationship.user_id == user_id,
|
||||
Relationship.type == RelationshipType.FOLLOW,
|
||||
)
|
||||
)
|
||||
).all()
|
||||
tasks = []
|
||||
for friend_id in friends:
|
||||
self.groups.setdefault(
|
||||
self.friend_presence_watchers_group(friend_id), set()
|
||||
).add(client)
|
||||
if (
|
||||
friend_state := self.state.get(friend_id)
|
||||
) and friend_state.pushable:
|
||||
tasks.append(
|
||||
self.broadcast_group_call(
|
||||
self.friend_presence_watchers_group(friend_id),
|
||||
"FriendPresenceUpdated",
|
||||
friend_id,
|
||||
friend_state.to_dict(),
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def UpdateStatus(self, client: Client, status: int) -> None:
|
||||
status_ = OnlineStatus(status)
|
||||
user_id = int(client.connection_id)
|
||||
store = self.get_or_create_state(client)
|
||||
if store.status is not None and store.status == status_:
|
||||
return
|
||||
store.status = OnlineStatus(status_)
|
||||
tasks = self.broadcast_tasks(user_id, store)
|
||||
tasks.add(
|
||||
self.call_noblock(
|
||||
client,
|
||||
"UserPresenceUpdated",
|
||||
user_id,
|
||||
store.to_dict(),
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def UpdateActivity(self, client: Client, activity_dict: dict | None) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
activity = (
|
||||
TypeAdapter(UserActivity).validate_python(activity_dict)
|
||||
if activity_dict
|
||||
else None
|
||||
)
|
||||
store = self.get_or_create_state(client)
|
||||
store.user_activity = activity
|
||||
tasks = self.broadcast_tasks(user_id, store)
|
||||
tasks.add(
|
||||
self.call_noblock(
|
||||
client,
|
||||
"UserPresenceUpdated",
|
||||
user_id,
|
||||
store.to_dict(),
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def BeginWatchingUserPresence(self, client: Client) -> None:
|
||||
await asyncio.gather(
|
||||
*[
|
||||
self.call_noblock(
|
||||
client,
|
||||
"UserPresenceUpdated",
|
||||
user_id,
|
||||
store.to_dict(),
|
||||
)
|
||||
for user_id, store in self.state.items()
|
||||
if store.pushable
|
||||
]
|
||||
)
|
||||
self.add_to_group(client, self.online_presence_watchers_group())
|
||||
|
||||
async def EndWatchingUserPresence(self, client: Client) -> None:
|
||||
self.remove_from_group(client, self.online_presence_watchers_group())
|
||||
355
app/signalr/hub/spectator.py
Normal file
355
app/signalr/hub/spectator.py
Normal file
@@ -0,0 +1,355 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import lzma
|
||||
import struct
|
||||
import time
|
||||
from typing import override
|
||||
|
||||
from app.database import Beatmap
|
||||
from app.database.score import Score
|
||||
from app.database.score_token import ScoreToken
|
||||
from app.database.user import User
|
||||
from app.dependencies.database import engine
|
||||
from app.models.beatmap import BeatmapRankStatus
|
||||
from app.models.mods import mods_to_int
|
||||
from app.models.score import LegacyReplaySoloScoreInfo, ScoreStatisticsInt
|
||||
from app.models.signalr import serialize_to_list
|
||||
from app.models.spectator_hub import (
|
||||
APIUser,
|
||||
FrameDataBundle,
|
||||
LegacyReplayFrame,
|
||||
ScoreInfo,
|
||||
SpectatedUserState,
|
||||
SpectatorState,
|
||||
StoreClientState,
|
||||
StoreScore,
|
||||
)
|
||||
from app.path import REPLAY_DIR
|
||||
from app.utils import unix_timestamp_to_windows
|
||||
|
||||
from .hub import Client, Hub
|
||||
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlmodel import select
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
|
||||
READ_SCORE_TIMEOUT = 30
|
||||
REPLAY_LATEST_VER = 30000016
|
||||
|
||||
|
||||
def encode_uleb128(num: int) -> bytes | bytearray:
|
||||
if num == 0:
|
||||
return b"\x00"
|
||||
|
||||
ret = bytearray()
|
||||
|
||||
while num != 0:
|
||||
ret.append(num & 0x7F)
|
||||
num >>= 7
|
||||
if num != 0:
|
||||
ret[-1] |= 0x80
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def encode_string(s: str) -> bytes:
|
||||
"""Write `s` into bytes (ULEB128 & string)."""
|
||||
if s:
|
||||
encoded = s.encode()
|
||||
ret = b"\x0b" + encode_uleb128(len(encoded)) + encoded
|
||||
else:
|
||||
ret = b"\x00"
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def save_replay(
|
||||
ruleset_id: int,
|
||||
md5: str,
|
||||
username: str,
|
||||
score: Score,
|
||||
statistics: ScoreStatisticsInt,
|
||||
maximum_statistics: ScoreStatisticsInt,
|
||||
frames: list[LegacyReplayFrame],
|
||||
) -> None:
|
||||
data = bytearray()
|
||||
data.extend(struct.pack("<bi", ruleset_id, REPLAY_LATEST_VER))
|
||||
data.extend(encode_string(md5))
|
||||
data.extend(encode_string(username))
|
||||
data.extend(encode_string(f"lazer-{username}-{score.started_at.isoformat()}"))
|
||||
data.extend(
|
||||
struct.pack(
|
||||
"<hhhhhhihbi",
|
||||
score.n300,
|
||||
score.n100,
|
||||
score.n50,
|
||||
score.ngeki,
|
||||
score.nkatu,
|
||||
score.nmiss,
|
||||
score.total_score,
|
||||
score.max_combo,
|
||||
score.is_perfect_combo,
|
||||
mods_to_int(score.mods),
|
||||
)
|
||||
)
|
||||
data.extend(encode_string("")) # hp graph
|
||||
data.extend(
|
||||
struct.pack(
|
||||
"<q",
|
||||
unix_timestamp_to_windows(round(score.started_at.timestamp())),
|
||||
)
|
||||
)
|
||||
|
||||
# write frames
|
||||
# FIXME: cannot play in stable
|
||||
frame_strs = []
|
||||
last_time = 0
|
||||
for frame in frames:
|
||||
frame_strs.append(
|
||||
f"{frame.time - last_time}|{frame.x or 0.0}"
|
||||
f"|{frame.y or 0.0}|{frame.button_state}"
|
||||
)
|
||||
last_time = frame.time
|
||||
frame_strs.append("-12345|0|0|0")
|
||||
|
||||
compressed = lzma.compress(
|
||||
",".join(frame_strs).encode("ascii"), format=lzma.FORMAT_ALONE
|
||||
)
|
||||
data.extend(struct.pack("<i", len(compressed)))
|
||||
data.extend(compressed)
|
||||
data.extend(struct.pack("<q", score.id))
|
||||
assert score.id
|
||||
score_info = LegacyReplaySoloScoreInfo(
|
||||
online_id=score.id,
|
||||
mods=score.mods,
|
||||
statistics=statistics,
|
||||
maximum_statistics=maximum_statistics,
|
||||
client_version="",
|
||||
rank=score.rank,
|
||||
user_id=score.user_id,
|
||||
total_score_without_mods=score.total_score_without_mods,
|
||||
)
|
||||
compressed = lzma.compress(
|
||||
json.dumps(score_info).encode(), format=lzma.FORMAT_ALONE
|
||||
)
|
||||
data.extend(struct.pack("<i", len(compressed)))
|
||||
data.extend(compressed)
|
||||
|
||||
replay_path = REPLAY_DIR / f"lazer-{score.type}-{username}-{score.id}.osr"
|
||||
replay_path.write_bytes(data)
|
||||
|
||||
|
||||
class SpectatorHub(Hub[StoreClientState]):
|
||||
@staticmethod
|
||||
def group_id(user_id: int) -> str:
|
||||
return f"watch:{user_id}"
|
||||
|
||||
@override
|
||||
def create_state(self, client: Client) -> StoreClientState:
|
||||
return StoreClientState(
|
||||
connection_id=client.connection_id,
|
||||
connection_token=client.connection_token,
|
||||
)
|
||||
|
||||
@override
|
||||
async def _clean_state(self, state: StoreClientState) -> None:
|
||||
if state.state:
|
||||
await self._end_session(int(state.connection_id), state.state)
|
||||
for target in self.waited_clients:
|
||||
target_client = self.get_client_by_id(target)
|
||||
if target_client:
|
||||
await self.call_noblock(
|
||||
target_client, "UserEndedWatching", int(state.connection_id)
|
||||
)
|
||||
|
||||
async def on_client_connect(self, client: Client) -> None:
|
||||
tasks = [
|
||||
self.call_noblock(
|
||||
client, "UserBeganPlaying", user_id, serialize_to_list(store.state)
|
||||
)
|
||||
for user_id, store in self.state.items()
|
||||
if store.state is not None
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def BeginPlaySession(
|
||||
self, client: Client, score_token: int, state: SpectatorState
|
||||
) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
store = self.get_or_create_state(client)
|
||||
if store.state is not None:
|
||||
return
|
||||
if state.beatmap_id is None or state.ruleset_id is None:
|
||||
return
|
||||
async with AsyncSession(engine) as session:
|
||||
async with session.begin():
|
||||
beatmap = (
|
||||
await session.exec(
|
||||
select(Beatmap).where(Beatmap.id == state.beatmap_id)
|
||||
)
|
||||
).first()
|
||||
if not beatmap:
|
||||
return
|
||||
user = (
|
||||
await session.exec(select(User).where(User.id == user_id))
|
||||
).first()
|
||||
if not user:
|
||||
return
|
||||
name = user.name
|
||||
store.state = state
|
||||
store.beatmap_status = beatmap.beatmap_status
|
||||
store.checksum = beatmap.checksum
|
||||
store.ruleset_id = state.ruleset_id
|
||||
store.score_token = score_token
|
||||
store.score = StoreScore(
|
||||
score_info=ScoreInfo(
|
||||
mods=state.mods,
|
||||
user=APIUser(id=user_id, name=name),
|
||||
ruleset=state.ruleset_id,
|
||||
maximum_statistics=state.maximum_statistics,
|
||||
)
|
||||
)
|
||||
await self.broadcast_group_call(
|
||||
self.group_id(user_id),
|
||||
"UserBeganPlaying",
|
||||
user_id,
|
||||
serialize_to_list(state),
|
||||
)
|
||||
|
||||
async def SendFrameData(self, client: Client, frame_data: FrameDataBundle) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
state = self.get_or_create_state(client)
|
||||
if not state.score:
|
||||
return
|
||||
state.score.score_info.acc = frame_data.header.acc
|
||||
state.score.score_info.combo = frame_data.header.combo
|
||||
state.score.score_info.max_combo = frame_data.header.max_combo
|
||||
state.score.score_info.statistics = frame_data.header.statistics
|
||||
state.score.score_info.total_score = frame_data.header.total_score
|
||||
state.score.score_info.mods = frame_data.header.mods
|
||||
state.score.replay_frames.extend(frame_data.frames)
|
||||
await self.broadcast_group_call(
|
||||
self.group_id(user_id),
|
||||
"UserSentFrames",
|
||||
user_id,
|
||||
frame_data.model_dump(),
|
||||
)
|
||||
|
||||
async def EndPlaySession(self, client: Client, state: SpectatorState) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
store = self.get_or_create_state(client)
|
||||
score = store.score
|
||||
if not score or not store.score_token:
|
||||
return
|
||||
|
||||
assert store.beatmap_status is not None
|
||||
|
||||
async def _save_replay():
|
||||
assert store.checksum is not None
|
||||
assert store.ruleset_id is not None
|
||||
assert store.state is not None
|
||||
assert store.score is not None
|
||||
async with AsyncSession(engine) as session:
|
||||
async with session:
|
||||
start_time = time.time()
|
||||
score_record = None
|
||||
while time.time() - start_time < READ_SCORE_TIMEOUT:
|
||||
sub_query = select(ScoreToken.score_id).where(
|
||||
ScoreToken.id == store.score_token,
|
||||
)
|
||||
result = await session.exec(
|
||||
select(Score)
|
||||
.options(joinedload(Score.beatmap)) # pyright: ignore[reportArgumentType]
|
||||
.where(
|
||||
Score.id == sub_query,
|
||||
Score.user_id == user_id,
|
||||
)
|
||||
)
|
||||
score_record = result.first()
|
||||
if score_record:
|
||||
break
|
||||
if not score_record:
|
||||
return
|
||||
if not score_record.passed:
|
||||
return
|
||||
score_record.has_replay = True
|
||||
await session.commit()
|
||||
await session.refresh(score_record)
|
||||
save_replay(
|
||||
ruleset_id=store.ruleset_id,
|
||||
md5=store.checksum,
|
||||
username=store.score.score_info.user.name,
|
||||
score=score_record,
|
||||
statistics=score.score_info.statistics,
|
||||
maximum_statistics=score.score_info.maximum_statistics,
|
||||
frames=score.replay_frames,
|
||||
)
|
||||
|
||||
if (
|
||||
(
|
||||
BeatmapRankStatus.PENDING
|
||||
< store.beatmap_status
|
||||
<= BeatmapRankStatus.LOVED
|
||||
)
|
||||
and any(
|
||||
k.is_hit() and v > 0 for k, v in score.score_info.statistics.items()
|
||||
)
|
||||
and state.state != SpectatedUserState.Failed
|
||||
):
|
||||
# save replay
|
||||
await _save_replay()
|
||||
store.state = None
|
||||
store.beatmap_status = None
|
||||
store.checksum = None
|
||||
store.ruleset_id = None
|
||||
store.score_token = None
|
||||
store.score = None
|
||||
await self._end_session(user_id, state)
|
||||
|
||||
async def _end_session(self, user_id: int, state: SpectatorState) -> None:
|
||||
if state.state == SpectatedUserState.Playing:
|
||||
state.state = SpectatedUserState.Quit
|
||||
await self.broadcast_group_call(
|
||||
self.group_id(user_id),
|
||||
"UserFinishedPlaying",
|
||||
user_id,
|
||||
serialize_to_list(state) if state else None,
|
||||
)
|
||||
|
||||
async def StartWatchingUser(self, client: Client, target_id: int) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
target_store = self.get_or_create_state(client)
|
||||
if target_store.state:
|
||||
await self.call_noblock(
|
||||
client,
|
||||
"UserBeganPlaying",
|
||||
target_id,
|
||||
serialize_to_list(target_store.state),
|
||||
)
|
||||
store = self.get_or_create_state(client)
|
||||
store.watched_user.add(target_id)
|
||||
|
||||
self.add_to_group(client, self.group_id(target_id))
|
||||
|
||||
async with AsyncSession(engine) as session:
|
||||
async with session.begin():
|
||||
username = (
|
||||
await session.exec(select(User.name).where(User.id == user_id))
|
||||
).first()
|
||||
if not username:
|
||||
return
|
||||
if (target_client := self.get_client_by_id(str(target_id))) is not None:
|
||||
await self.call_noblock(
|
||||
target_client, "UserStartedWatching", [[user_id, username]]
|
||||
)
|
||||
|
||||
async def EndWatchingUser(self, client: Client, target_id: int) -> None:
|
||||
user_id = int(client.connection_id)
|
||||
self.remove_from_group(client, self.group_id(target_id))
|
||||
store = self.state.get(user_id)
|
||||
if store:
|
||||
store.watched_user.discard(target_id)
|
||||
if (target_client := self.get_client_by_id(str(target_id))) is not None:
|
||||
await self.call_noblock(target_client, "UserEndedWatching", user_id)
|
||||
277
app/signalr/packet.py
Normal file
277
app/signalr/packet.py
Normal file
@@ -0,0 +1,277 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import IntEnum
|
||||
import json
|
||||
from typing import (
|
||||
Any,
|
||||
Protocol as TypingProtocol,
|
||||
)
|
||||
|
||||
import msgpack
|
||||
|
||||
SEP = b"\x1e"
|
||||
|
||||
|
||||
class PacketType(IntEnum):
|
||||
INVOCATION = 1
|
||||
STREAM_ITEM = 2
|
||||
COMPLETION = 3
|
||||
STREAM_INVOCATION = 4
|
||||
CANCEL_INVOCATION = 5
|
||||
PING = 6
|
||||
CLOSE = 7
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class Packet:
|
||||
type: PacketType
|
||||
header: dict[str, Any] | None = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class InvocationPacket(Packet):
|
||||
type: PacketType = PacketType.INVOCATION
|
||||
invocation_id: str | None
|
||||
target: str
|
||||
arguments: list[Any] | None = None
|
||||
stream_ids: list[str] | None = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class CompletionPacket(Packet):
|
||||
type: PacketType = PacketType.COMPLETION
|
||||
invocation_id: str
|
||||
result: Any
|
||||
error: str | None = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class PingPacket(Packet):
|
||||
type: PacketType = PacketType.PING
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class ClosePacket(Packet):
|
||||
type: PacketType = PacketType.CLOSE
|
||||
error: str | None = None
|
||||
allow_reconnect: bool = False
|
||||
|
||||
|
||||
PACKETS = {
|
||||
PacketType.INVOCATION: InvocationPacket,
|
||||
PacketType.COMPLETION: CompletionPacket,
|
||||
PacketType.PING: PingPacket,
|
||||
PacketType.CLOSE: ClosePacket,
|
||||
}
|
||||
|
||||
|
||||
class Protocol(TypingProtocol):
|
||||
@staticmethod
|
||||
def decode(input: bytes) -> list[Packet]: ...
|
||||
|
||||
@staticmethod
|
||||
def encode(packet: Packet) -> bytes: ...
|
||||
|
||||
|
||||
class MsgpackProtocol:
|
||||
@staticmethod
|
||||
def _encode_varint(value: int) -> bytes:
|
||||
result = []
|
||||
while value >= 0x80:
|
||||
result.append((value & 0x7F) | 0x80)
|
||||
value >>= 7
|
||||
result.append(value & 0x7F)
|
||||
return bytes(result)
|
||||
|
||||
@staticmethod
|
||||
def _decode_varint(data: bytes, offset: int = 0) -> tuple[int, int]:
|
||||
result = 0
|
||||
shift = 0
|
||||
pos = offset
|
||||
|
||||
while pos < len(data):
|
||||
byte = data[pos]
|
||||
result |= (byte & 0x7F) << shift
|
||||
pos += 1
|
||||
if (byte & 0x80) == 0:
|
||||
break
|
||||
shift += 7
|
||||
|
||||
return result, pos
|
||||
|
||||
@staticmethod
|
||||
def decode(input: bytes) -> list[Packet]:
|
||||
length, offset = MsgpackProtocol._decode_varint(input)
|
||||
message_data = input[offset : offset + length]
|
||||
# FIXME: custom deserializer for APIMod
|
||||
# https://github.com/ppy/osu/blob/master/osu.Game/Online/API/ModSettingsDictionaryFormatter.cs
|
||||
unpacked = msgpack.unpackb(
|
||||
message_data, raw=False, strict_map_key=False, use_list=True
|
||||
)
|
||||
packet_type = PacketType(unpacked[0])
|
||||
if packet_type not in PACKETS:
|
||||
raise ValueError(f"Unknown packet type: {packet_type}")
|
||||
match packet_type:
|
||||
case PacketType.INVOCATION:
|
||||
return [
|
||||
InvocationPacket(
|
||||
header=unpacked[1],
|
||||
invocation_id=unpacked[2],
|
||||
target=unpacked[3],
|
||||
arguments=unpacked[4] if len(unpacked) > 4 else None,
|
||||
stream_ids=unpacked[5] if len(unpacked) > 5 else None,
|
||||
)
|
||||
]
|
||||
case PacketType.COMPLETION:
|
||||
result_kind = unpacked[3]
|
||||
return [
|
||||
CompletionPacket(
|
||||
header=unpacked[1],
|
||||
invocation_id=unpacked[2],
|
||||
error=unpacked[4] if result_kind == 1 else None,
|
||||
result=unpacked[5] if result_kind == 3 else None,
|
||||
)
|
||||
]
|
||||
case PacketType.PING:
|
||||
return [PingPacket()]
|
||||
case PacketType.CLOSE:
|
||||
return [
|
||||
ClosePacket(
|
||||
error=unpacked[1],
|
||||
allow_reconnect=unpacked[2] if len(unpacked) > 2 else False,
|
||||
)
|
||||
]
|
||||
raise ValueError(f"Unsupported packet type: {packet_type}")
|
||||
|
||||
@staticmethod
|
||||
def encode(packet: Packet) -> bytes:
|
||||
payload = [packet.type.value, packet.header or {}]
|
||||
if isinstance(packet, InvocationPacket):
|
||||
payload.extend(
|
||||
[
|
||||
packet.invocation_id,
|
||||
packet.target,
|
||||
]
|
||||
)
|
||||
if packet.arguments is not None:
|
||||
payload.append(packet.arguments)
|
||||
if packet.stream_ids is not None:
|
||||
payload.append(packet.stream_ids)
|
||||
elif isinstance(packet, CompletionPacket):
|
||||
result_kind = 2
|
||||
if packet.error:
|
||||
result_kind = 1
|
||||
elif packet.result is None:
|
||||
result_kind = 3
|
||||
payload.extend(
|
||||
[
|
||||
packet.invocation_id,
|
||||
result_kind,
|
||||
packet.error or packet.result or None,
|
||||
]
|
||||
)
|
||||
elif isinstance(packet, ClosePacket):
|
||||
payload.extend(
|
||||
[
|
||||
packet.error or "",
|
||||
packet.allow_reconnect,
|
||||
]
|
||||
)
|
||||
elif isinstance(packet, PingPacket):
|
||||
payload.pop(-1)
|
||||
data = msgpack.packb(payload, use_bin_type=True, datetime=True)
|
||||
return MsgpackProtocol._encode_varint(len(data)) + data
|
||||
|
||||
|
||||
class JSONProtocol:
|
||||
@staticmethod
|
||||
def decode(input: bytes) -> list[Packet]:
|
||||
packets_raw = input.removesuffix(SEP).split(SEP)
|
||||
packets = []
|
||||
if len(packets_raw) > 1:
|
||||
for packet_raw in packets_raw:
|
||||
packets.extend(JSONProtocol.decode(packet_raw))
|
||||
return packets
|
||||
else:
|
||||
data = json.loads(packets_raw[0])
|
||||
packet_type = PacketType(data["type"])
|
||||
if packet_type not in PACKETS:
|
||||
raise ValueError(f"Unknown packet type: {packet_type}")
|
||||
match packet_type:
|
||||
case PacketType.INVOCATION:
|
||||
return [
|
||||
InvocationPacket(
|
||||
header=data.get("header"),
|
||||
invocation_id=data.get("invocationId"),
|
||||
target=data["target"],
|
||||
arguments=data.get("arguments"),
|
||||
stream_ids=data.get("streamIds"),
|
||||
)
|
||||
]
|
||||
case PacketType.COMPLETION:
|
||||
return [
|
||||
CompletionPacket(
|
||||
header=data.get("header"),
|
||||
invocation_id=data["invocationId"],
|
||||
error=data.get("error"),
|
||||
result=data.get("result"),
|
||||
)
|
||||
]
|
||||
case PacketType.PING:
|
||||
return [PingPacket()]
|
||||
case PacketType.CLOSE:
|
||||
return [
|
||||
ClosePacket(
|
||||
error=data.get("error"),
|
||||
allow_reconnect=data.get("allowReconnect", False),
|
||||
)
|
||||
]
|
||||
raise ValueError(f"Unsupported packet type: {packet_type}")
|
||||
|
||||
@staticmethod
|
||||
def encode(packet: Packet) -> bytes:
|
||||
payload: dict[str, Any] = {
|
||||
"type": packet.type.value,
|
||||
}
|
||||
if packet.header:
|
||||
payload["header"] = packet.header
|
||||
if isinstance(packet, InvocationPacket):
|
||||
payload.update(
|
||||
{
|
||||
"target": packet.target,
|
||||
}
|
||||
)
|
||||
if packet.invocation_id is not None:
|
||||
payload["invocationId"] = packet.invocation_id
|
||||
if packet.arguments is not None:
|
||||
payload["arguments"] = packet.arguments
|
||||
if packet.stream_ids is not None:
|
||||
payload["streamIds"] = packet.stream_ids
|
||||
elif isinstance(packet, CompletionPacket):
|
||||
payload.update(
|
||||
{
|
||||
"invocationId": packet.invocation_id,
|
||||
}
|
||||
)
|
||||
if packet.error is not None:
|
||||
payload["error"] = packet.error
|
||||
if packet.result is not None:
|
||||
payload["result"] = packet.result
|
||||
elif isinstance(packet, PingPacket):
|
||||
pass
|
||||
elif isinstance(packet, ClosePacket):
|
||||
payload.update(
|
||||
{
|
||||
"allowReconnect": packet.allow_reconnect,
|
||||
}
|
||||
)
|
||||
if packet.error is not None:
|
||||
payload["error"] = packet.error
|
||||
return json.dumps(payload).encode("utf-8") + SEP
|
||||
|
||||
|
||||
PROTOCOLS: dict[str, Protocol] = {
|
||||
"json": JSONProtocol,
|
||||
"messagepack": MsgpackProtocol,
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from typing import Literal
|
||||
@@ -10,9 +11,9 @@ from app.dependencies import get_current_user
|
||||
from app.dependencies.database import get_db
|
||||
from app.dependencies.user import get_current_user_by_token
|
||||
from app.models.signalr import NegotiateResponse, Transport
|
||||
from app.router.signalr.packet import SEP
|
||||
|
||||
from .hub import Hubs
|
||||
from .packet import PROTOCOLS, SEP
|
||||
|
||||
from fastapi import APIRouter, Depends, Header, Query, WebSocket
|
||||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||||
@@ -62,30 +63,41 @@ async def connect(
|
||||
await websocket.accept()
|
||||
|
||||
# handshake
|
||||
handshake = await websocket.receive_bytes()
|
||||
handshake_payload = json.loads(handshake[:-1])
|
||||
handshake = await websocket.receive()
|
||||
message = handshake.get("bytes") or handshake.get("text")
|
||||
if not message:
|
||||
await websocket.close(code=1008)
|
||||
return
|
||||
handshake_payload = json.loads(message[:-1])
|
||||
error = ""
|
||||
if (protocol := handshake_payload.get("protocol")) != "messagepack" or (
|
||||
handshake_payload.get("version")
|
||||
) != 1:
|
||||
error = f"Requested protocol '{protocol}' is not available."
|
||||
protocol = handshake_payload.get("protocol", "json")
|
||||
|
||||
client = None
|
||||
try:
|
||||
client = hub_.add_client(
|
||||
client = await hub_.add_client(
|
||||
connection_id=user_id,
|
||||
connection_token=id,
|
||||
connection=websocket,
|
||||
protocol=PROTOCOLS[protocol],
|
||||
)
|
||||
except KeyError:
|
||||
error = f"Protocol '{protocol}' is not supported."
|
||||
except TimeoutError:
|
||||
error = f"Connection {id} has waited too long."
|
||||
except ValueError as e:
|
||||
error = str(e)
|
||||
payload = {"error": error} if error else {}
|
||||
|
||||
# finish handshake
|
||||
await websocket.send_bytes(json.dumps(payload).encode() + SEP)
|
||||
if error or not client:
|
||||
await websocket.close(code=1008)
|
||||
return
|
||||
await hub_.clean_state(client, False)
|
||||
task = asyncio.create_task(hub_.on_connect(client))
|
||||
hub_.tasks.add(task)
|
||||
task.add_done_callback(hub_.tasks.discard)
|
||||
await hub_._listen_client(client)
|
||||
try:
|
||||
await websocket.close()
|
||||
except Exception:
|
||||
...
|
||||
@@ -2,9 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from typing import Any, Literal
|
||||
|
||||
from app.router.signalr.packet import ResultKind
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ResultStore:
|
||||
@@ -22,21 +20,17 @@ class ResultStore:
|
||||
return str(s)
|
||||
|
||||
def add_result(
|
||||
self, invocation_id: str, type: ResultKind, result: dict[str, Any] | None
|
||||
self, invocation_id: str, result: Any, error: str | None = None
|
||||
) -> None:
|
||||
if isinstance(invocation_id, str) and invocation_id.isdecimal():
|
||||
if future := self._futures.get(invocation_id):
|
||||
future.set_result((type, result))
|
||||
future.set_result((result, error))
|
||||
|
||||
async def fetch(
|
||||
self,
|
||||
invocation_id: str,
|
||||
timeout: float | None, # noqa: ASYNC109
|
||||
) -> (
|
||||
tuple[Literal[ResultKind.ERROR], str]
|
||||
| tuple[Literal[ResultKind.VOID], None]
|
||||
| tuple[Literal[ResultKind.HAS_VALUE], Any]
|
||||
):
|
||||
) -> tuple[Any, str | None]:
|
||||
future = asyncio.get_event_loop().create_future()
|
||||
self._futures[invocation_id] = future
|
||||
try:
|
||||
@@ -2,24 +2,20 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import inspect
|
||||
import sys
|
||||
from typing import Any, ForwardRef, cast
|
||||
|
||||
# https://github.com/pydantic/pydantic/blob/main/pydantic/v1/typing.py#L61-L75
|
||||
if sys.version_info < (3, 12, 4):
|
||||
|
||||
# https://github.com/pydantic/pydantic/blob/main/pydantic/v1/typing.py#L56-L66
|
||||
def evaluate_forwardref(
|
||||
type_: ForwardRef,
|
||||
globalns: Any,
|
||||
localns: Any,
|
||||
) -> Any:
|
||||
# Even though it is the right signature for python 3.9,
|
||||
# mypy complains with
|
||||
# `error: Too many arguments for "_evaluate" of
|
||||
# "ForwardRef"` hence the cast...
|
||||
return cast(Any, type_)._evaluate(
|
||||
globalns,
|
||||
localns,
|
||||
set(),
|
||||
)
|
||||
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
|
||||
return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set())
|
||||
else:
|
||||
|
||||
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
|
||||
return cast(Any, type_)._evaluate(
|
||||
globalns, localns, type_params=(), recursive_guard=set()
|
||||
)
|
||||
|
||||
|
||||
def get_annotation(param: inspect.Parameter, globalns: dict[str, Any]) -> Any:
|
||||
@@ -28,6 +28,11 @@ from app.models.user import (
|
||||
import rosu_pp_py as rosu
|
||||
|
||||
|
||||
def unix_timestamp_to_windows(timestamp: int) -> int:
|
||||
"""Convert a Unix timestamp to a Windows timestamp."""
|
||||
return (timestamp + 62135596800) * 10_000_000
|
||||
|
||||
|
||||
async def convert_db_user_to_api_user(db_user: DBUser, ruleset: str = "osu") -> User:
|
||||
"""将数据库用户模型转换为API用户模型(使用 Lazer 表)"""
|
||||
|
||||
@@ -205,7 +210,7 @@ async def convert_db_user_to_api_user(db_user: DBUser, ruleset: str = "osu") ->
|
||||
# 转换团队信息
|
||||
team = None
|
||||
if db_user.team_membership:
|
||||
team_member = db_user.team_membership[0] # 假设用户只属于一个团队
|
||||
team_member = db_user.team_membership # 假设用户只属于一个团队
|
||||
team = team_member.team
|
||||
|
||||
# 创建用户对象
|
||||
|
||||
12
main.py
12
main.py
@@ -10,9 +10,6 @@ from app.router import api_router, auth_router, fetcher_router, signalr_router
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
# 注意: 表结构现在通过 migrations 管理,不再自动创建
|
||||
# 如需创建表,请运行: python quick_sync.py
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
@@ -142,8 +139,15 @@ async def health_check():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from app.log import logger # noqa: F401
|
||||
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"main:app", host=settings.HOST, port=settings.PORT, reload=settings.DEBUG
|
||||
"main:app",
|
||||
host=settings.HOST,
|
||||
port=settings.PORT,
|
||||
reload=settings.DEBUG,
|
||||
log_config=None, # 禁用uvicorn默认日志配置
|
||||
access_log=True, # 启用访问日志
|
||||
)
|
||||
|
||||
99
migrations/env.py
Normal file
99
migrations/env.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
|
||||
from app.database import * # noqa: F403
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = os.environ.get("DATABASE_URL", config.get_main_option("sqlalchemy.url"))
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
compare_type=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata, compare_type=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
sa_config = config.get_section(config.config_ini_section, {})
|
||||
if db_url := os.environ.get("DATABASE_URL"):
|
||||
sa_config["sqlalchemy.url"] = db_url
|
||||
connectable = async_engine_from_config(
|
||||
sa_config,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
29
migrations/script.py.mako
Normal file
29
migrations/script.py.mako
Normal file
@@ -0,0 +1,29 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
0
migrations/versions/.gitkeep
Normal file
0
migrations/versions/.gitkeep
Normal file
@@ -11,6 +11,7 @@ dependencies = [
|
||||
"cryptography>=41.0.7",
|
||||
"fastapi>=0.104.1",
|
||||
"httpx>=0.28.1",
|
||||
"loguru>=0.7.3",
|
||||
"msgpack>=1.1.1",
|
||||
"passlib[bcrypt]>=1.7.4",
|
||||
"pydantic[email]>=2.5.0",
|
||||
@@ -77,7 +78,7 @@ mark-parentheses = false
|
||||
keep-runtime-typing = true
|
||||
|
||||
[tool.pyright]
|
||||
pythonVersion = "3.11"
|
||||
pythonVersion = "3.12"
|
||||
pythonPlatform = "All"
|
||||
|
||||
typeCheckingMode = "standard"
|
||||
|
||||
@@ -94,7 +94,7 @@ def main():
|
||||
|
||||
# 获取脚本路径
|
||||
script_dir = os.path.dirname(__file__)
|
||||
migrations_dir = os.path.join(script_dir, "migrations")
|
||||
migrations_dir = os.path.join(script_dir, "migrations_old")
|
||||
|
||||
# 第一步: 创建表结构
|
||||
print("\n步骤 1: 创建 lazer 专用表结构...")
|
||||
|
||||
5
static/README.md
Normal file
5
static/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# 静态文件
|
||||
|
||||
- `mods.json`: 包含了游戏中的所有可用mod的详细信息。
|
||||
- Origin: https://github.com/ppy/osu-web/blob/master/database/mods.json
|
||||
- Version: 2025/6/10 `b68c920b1db3d443b9302fdc3f86010c875fe380`
|
||||
3656
static/mods.json
Normal file
3656
static/mods.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -183,7 +183,7 @@ def main():
|
||||
return
|
||||
|
||||
# 执行表结构创建
|
||||
migrations_dir = os.path.join(os.path.dirname(__file__), "migrations")
|
||||
migrations_dir = os.path.join(os.path.dirname(__file__), "migrations_old")
|
||||
|
||||
print("\n步骤 1: 创建表结构...")
|
||||
add_fields_sql = os.path.join(migrations_dir, "add_missing_fields.sql")
|
||||
|
||||
180
uv.lock
generated
180
uv.lock
generated
@@ -1,6 +1,6 @@
|
||||
version = 1
|
||||
revision = 2
|
||||
requires-python = ">=3.11"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "aiomysql"
|
||||
@@ -51,15 +51,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "5.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "4.3.0"
|
||||
@@ -108,10 +99,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -132,18 +119,6 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" },
|
||||
@@ -231,12 +206,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189, upload-time = "2025-07-02T13:05:46.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769, upload-time = "2025-07-02T13:05:48.329Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878, upload-time = "2025-07-02T13:06:06.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447, upload-time = "2025-07-02T13:06:08.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778, upload-time = "2025-07-02T13:06:10.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627, upload-time = "2025-07-02T13:06:13.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593, upload-time = "2025-07-02T13:06:15.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -311,15 +280,6 @@ version = "3.2.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" },
|
||||
@@ -375,13 +335,6 @@ version = "0.6.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" },
|
||||
@@ -431,6 +384,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mako"
|
||||
version = "1.3.10"
|
||||
@@ -449,16 +415,6 @@ version = "3.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
|
||||
@@ -497,16 +453,6 @@ version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/b1/ea4f68038a18c77c9467400d166d74c4ffa536f34761f7983a104357e614/msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd", size = 173555, upload-time = "2025-06-13T06:52:51.324Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/83/97f24bf9848af23fe2ba04380388216defc49a8af6da0c28cc636d722502/msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558", size = 82728, upload-time = "2025-06-13T06:51:50.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/7f/2eaa388267a78401f6e182662b08a588ef4f3de6f0eab1ec09736a7aaa2b/msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d", size = 79279, upload-time = "2025-06-13T06:51:51.72Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/46/31eb60f4452c96161e4dfd26dbca562b4ec68c72e4ad07d9566d7ea35e8a/msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0", size = 423859, upload-time = "2025-06-13T06:51:52.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/16/a20fa8c32825cc7ae8457fab45670c7a8996d7746ce80ce41cc51e3b2bd7/msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f", size = 429975, upload-time = "2025-06-13T06:51:53.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/ea/6c958e07692367feeb1a1594d35e22b62f7f476f3c568b002a5ea09d443d/msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704", size = 413528, upload-time = "2025-06-13T06:51:55.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/05/ac84063c5dae79722bda9f68b878dc31fc3059adb8633c79f1e82c2cd946/msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2", size = 413338, upload-time = "2025-06-13T06:51:57.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/e8/fe86b082c781d3e1c09ca0f4dacd457ede60a13119b6ce939efe2ea77b76/msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2", size = 422658, upload-time = "2025-06-13T06:51:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/2b/bafc9924df52d8f3bb7c00d24e57be477f4d0f967c0a31ef5e2225e035c7/msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752", size = 427124, upload-time = "2025-06-13T06:51:59.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/3b/1f717e17e53e0ed0b68fa59e9188f3f610c79d7151f0e52ff3cd8eb6b2dc/msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295", size = 65016, upload-time = "2025-06-13T06:52:01.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/45/9d1780768d3b249accecc5a38c725eb1e203d44a191f7b7ff1941f7df60c/msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458", size = 72267, upload-time = "2025-06-13T06:52:02.568Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/26/389b9c593eda2b8551b2e7126ad3a06af6f9b44274eb3a4f054d48ff7e47/msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238", size = 82359, upload-time = "2025-06-13T06:52:03.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/65/7d1de38c8a22cf8b1551469159d4b6cf49be2126adc2482de50976084d78/msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157", size = 79172, upload-time = "2025-06-13T06:52:05.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/bd/cacf208b64d9577a62c74b677e1ada005caa9b69a05a599889d6fc2ab20a/msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce", size = 425013, upload-time = "2025-06-13T06:52:06.341Z" },
|
||||
@@ -562,6 +508,7 @@ dependencies = [
|
||||
{ name = "cryptography" },
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "loguru" },
|
||||
{ name = "msgpack" },
|
||||
{ name = "passlib", extra = ["bcrypt"] },
|
||||
{ name = "pydantic", extra = ["email"] },
|
||||
@@ -590,6 +537,7 @@ requires-dist = [
|
||||
{ name = "cryptography", specifier = ">=41.0.7" },
|
||||
{ name = "fastapi", specifier = ">=0.104.1" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "loguru", specifier = ">=0.7.3" },
|
||||
{ name = "msgpack", specifier = ">=1.1.1" },
|
||||
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
||||
{ name = "pydantic", extras = ["email"], specifier = ">=2.5.0" },
|
||||
@@ -609,6 +557,7 @@ dev = [
|
||||
{ name = "pre-commit", specifier = ">=4.2.0" },
|
||||
{ name = "ruff", specifier = ">=0.12.4" },
|
||||
]
|
||||
migration = []
|
||||
|
||||
[[package]]
|
||||
name = "passlib"
|
||||
@@ -696,20 +645,6 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
|
||||
@@ -741,15 +676,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -804,15 +730,6 @@ version = "6.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
|
||||
@@ -837,9 +754,6 @@ wheels = [
|
||||
name = "redis"
|
||||
version = "6.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11.3'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ea/9a/0551e01ba52b944f97480721656578c8a7c46b51b99d66814f85fe3a4f3e/redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977", size = 4639129, upload-time = "2025-05-28T05:01:18.91Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/13/67/e60968d3b0e077495a8fee89cf3f2373db98e528288a48f1ee44967f6e8c/redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e", size = 278659, upload-time = "2025-05-28T05:01:16.955Z" },
|
||||
@@ -851,15 +765,6 @@ version = "3.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/19/b44c30066c6e85cd6a4fd8a8983be91d2336a4e7f0ef04e576bc9b1d7c63/rosu_pp_py-3.1.0.tar.gz", hash = "sha256:4aa64eb5e68b8957357f9b304047db285423b207ad913e28829ccfcd5348d41a", size = 31144, upload-time = "2025-06-03T17:14:27.461Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/e8/a4a899997304049801c27e1affa4ce7ea60d2ba16caa7c6739a6387f1790/rosu_pp_py-3.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d584ffabb96958d2c90a696a2634fa7336966b429ee0f0d03397763fc73d3237", size = 556133, upload-time = "2025-06-03T17:13:21.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/be/fc90d17277335a0225b88dd06790f6056bc0e4385e610df4aed471f692d8/rosu_pp_py-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ccf125864d0483281ada86e913b8133b53cb62455842bd418a5a4966abb47a67", size = 513148, upload-time = "2025-06-03T17:13:23.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/9d/26893b6182bd83694974ae6931647801c060b55844696089c463645290d3/rosu_pp_py-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:587a16e928c02f1b9439d8140d53ed8ff7ccab8f663b813c44cab9c3a89a1d46", size = 526976, upload-time = "2025-06-03T17:13:24.127Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/12/8fd68740f722ffbb792f37577a515c727de52ef14fcf46f22d5c2cdde03e/rosu_pp_py-3.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:721b4f9e0c1f17402d23915f8cb8695e476c8841319c71097c5f71dab6c91f1c", size = 550737, upload-time = "2025-06-03T17:13:25.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/86/ce7b0587800ce1da69b672e7b11ea5ec8469c17bedc3d51efcd400261830/rosu_pp_py-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fbb9ae415d1f71ca8e3a153b47e584415ae081816d0b60b70a1410c7ed562", size = 566922, upload-time = "2025-06-03T17:13:26.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/63/8b752c2116777fa03f46d3793fc6e87e262a21a71460a49d503d59690cec/rosu_pp_py-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06977b5211da327c27a921e284f5cb678e4a89f00ce76520fee2c33f09b28ab8", size = 705614, upload-time = "2025-06-03T17:13:27.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/71/88d4051beaad89a29813038c4e391952f017ffe2199efee4469955257167/rosu_pp_py-3.1.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:9dbd319039d5803a85e7263a22c808f93970b8bc0ed9e846d66050995d19fdb5", size = 814233, upload-time = "2025-06-03T17:13:29.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/13/a5b55a928edd2b70fb6d3268f7f344356cc781fd2194076a75af86faedb5/rosu_pp_py-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32d039b60c80bc4c6d4d6ee50918a44ebd95ab36d154da0dcc24af38858d0807", size = 738492, upload-time = "2025-06-03T17:13:31.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/18/67fa30cab0ff4179533fd2c89e4d8141d01968278ea095a42a06e1350b39/rosu_pp_py-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:f21edd037b6e30c019a721d374dc1e72e62c10f1a9a5b22773f1b5e321cf2a1a", size = 460036, upload-time = "2025-06-03T17:13:32.141Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/04/d752d7cfb71afcbecd0513ffcc716abcf5c3b2b4b9a4e44a3c7e7fc43fba/rosu_pp_py-3.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:61275ddfedd7f67bcb5c42a136fb30a66aeb7e07323c59a67db590de687bd78d", size = 552307, upload-time = "2025-06-03T17:13:33.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/76/e7d3415cdd384b8ea0a2f461c87d9b451108cbded46e2e88676611a99875/rosu_pp_py-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04aacaa6faba9d0892ba5584884cfaf42eb1a7678dc0dff453fc6988e8be8809", size = 508787, upload-time = "2025-06-03T17:13:34.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/a0/c59168f75b32b6cf3e41d5d44dc478b113eebe38166e6b87af193ebb8d4f/rosu_pp_py-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eecd7a78aeb82abf39ac7db670350a42b6eb8a54eb4a8a13610def02c56d005", size = 525740, upload-time = "2025-06-03T17:13:35.631Z" },
|
||||
@@ -945,14 +850,6 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" },
|
||||
@@ -1049,12 +946,6 @@ version = "0.21.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410, upload-time = "2024-10-14T23:37:33.612Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476, upload-time = "2024-10-14T23:37:36.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855, upload-time = "2024-10-14T23:37:37.683Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185, upload-time = "2024-10-14T23:37:40.226Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256, upload-time = "2024-10-14T23:37:42.839Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323, upload-time = "2024-10-14T23:37:45.337Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" },
|
||||
@@ -1092,19 +983,6 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751, upload-time = "2025-06-15T19:05:07.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313, upload-time = "2025-06-15T19:05:08.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792, upload-time = "2025-06-15T19:05:09.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196, upload-time = "2025-06-15T19:05:11.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788, upload-time = "2025-06-15T19:05:13.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879, upload-time = "2025-06-15T19:05:14.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447, upload-time = "2025-06-15T19:05:15.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145, upload-time = "2025-06-15T19:05:17.17Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539, upload-time = "2025-06-15T19:05:18.557Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472, upload-time = "2025-06-15T19:05:19.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348, upload-time = "2025-06-15T19:05:20.856Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607, upload-time = "2025-06-15T19:05:21.937Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056, upload-time = "2025-06-15T19:05:23.12Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" },
|
||||
@@ -1161,10 +1039,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/c4/088825b75489cb5b6a761a4542645718893d395d8c530b38734f19da44d2/watchfiles-1.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05686b5487cfa2e2c28ff1aa370ea3e6c5accfe6435944ddea1e10d93872147", size = 452240, upload-time = "2025-06-15T19:06:26.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/8c/22b074814970eeef43b7c44df98c3e9667c1f7bf5b83e0ff0201b0bd43f9/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d0e10e6f8f6dc5762adee7dece33b722282e1f59aa6a55da5d493a97282fedd8", size = 625607, upload-time = "2025-06-15T19:06:27.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315, upload-time = "2025-06-15T19:06:29.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910, upload-time = "2025-06-15T19:06:49.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816, upload-time = "2025-06-15T19:06:50.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584, upload-time = "2025-06-15T19:06:51.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009, upload-time = "2025-06-15T19:06:52.896Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1173,17 +1047,6 @@ version = "15.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
|
||||
@@ -1208,3 +1071,12 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user