From de0c86f4a28886ad6565f080d6631b09fb02846e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=92=95=E8=B0=B7=E9=85=B1?= Date: Sun, 17 Aug 2025 23:56:46 +0800 Subject: [PATCH 1/4] add geoip --- .env.example | 9 ++ .gitignore | 2 + app/config.py | 24 +++- app/dependencies/geoip.py | 51 +++++++ app/helpers/geoip_helper.py | 153 +++++++++++++++++++++ app/router/auth.py | 25 +++- app/service/geoip_scheduler.py | 52 ++++++++ app/service/init_geoip.py | 25 ++++ main.py | 4 + pyproject.toml | 4 +- uv.lock | 236 ++++++++++++++++++++++----------- 11 files changed, 503 insertions(+), 82 deletions(-) create mode 100644 app/dependencies/geoip.py create mode 100644 app/helpers/geoip_helper.py create mode 100644 app/service/geoip_scheduler.py create mode 100644 app/service/init_geoip.py diff --git a/.env.example b/.env.example index bb4bf98..20ad3cd 100644 --- a/.env.example +++ b/.env.example @@ -48,6 +48,15 @@ LOG_LEVEL="INFO" # Sentry 设置,为空表示不启用 SENTRY_DSN +# GeoIP 配置 - MaxMind License Key(用于 IP 地址地理位置查询) +MAXMIND_LICENSE_KEY="" +# GeoIP 数据库存储目录 +GEOIP_DEST_DIR="./geoip" +# GeoIP 每周更新的星期几(0=周一,6=周日) +GEOIP_UPDATE_DAY=1 +# GeoIP 每周更新时间(小时,0-23) +GEOIP_UPDATE_HOUR=2 + # 游戏设置 ENABLE_RX=false # 启用 RX mod 统计数据 ENABLE_AP=false # 启用 AP mod Z统计数据 diff --git a/.gitignore b/.gitignore index 23b1cc8..b8a8238 100644 --- a/.gitignore +++ b/.gitignore @@ -215,3 +215,5 @@ bancho.py-master/* storage/ replays/ osu-master/* + +geoip/* \ No newline at end of file diff --git a/app/config.py b/app/config.py index e794e09..c38681f 100644 --- a/app/config.py +++ b/app/config.py @@ -3,9 +3,23 @@ from __future__ import annotations from enum import Enum from typing import Annotated, Any -from pydantic import AliasChoices, Field, HttpUrl, ValidationInfo, field_validator +from pydantic import AliasChoices, Field, HttpUrl, ValidationInfo, field_validator, BeforeValidator from pydantic_settings import BaseSettings, NoDecode, SettingsConfigDict +def _parse_list(v): + if v is None or v == "" or str(v).strip() in ("[]", "{}"): + return [] + if isinstance(v, list): + return v + s = str(v).strip() + try: + import json + parsed = json.loads(s) + if isinstance(parsed, list): + return parsed + except Exception: + pass + return [x.strip() for x in s.split(",") if x.strip()] class AWSS3StorageSettings(BaseSettings): s3_access_key_id: str @@ -96,6 +110,12 @@ class Settings(BaseSettings): # Sentry 配置 sentry_dsn: HttpUrl | None = None + # GeoIP 配置 + maxmind_license_key: str = "" + geoip_dest_dir: str = "./geoip" + geoip_update_day: int = 1 # 每周更新的星期几(0=周一,6=周日) + geoip_update_hour: int = 2 # 每周更新的小时数(0-23) + # 游戏设置 enable_rx: bool = Field( default=False, validation_alias=AliasChoices("enable_rx", "enable_osu_rx") @@ -108,7 +128,7 @@ class Settings(BaseSettings): enable_all_beatmap_leaderboard: bool = False enable_all_beatmap_pp: bool = False suspicious_score_check: bool = True - seasonal_backgrounds: list[str] = [] + seasonal_backgrounds: Annotated[list[str], BeforeValidator(_parse_list)] = [] banned_name: list[str] = [ "mrekk", "vaxei", diff --git a/app/dependencies/geoip.py b/app/dependencies/geoip.py new file mode 100644 index 0000000..34e8d3e --- /dev/null +++ b/app/dependencies/geoip.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +""" +GeoIP dependency for FastAPI +""" +from functools import lru_cache +from app.helpers.geoip_helper import GeoIPHelper +from app.config import settings + +@lru_cache() +def get_geoip_helper() -> GeoIPHelper: + """ + 获取 GeoIP 帮助类实例 + 使用 lru_cache 确保单例模式 + """ + return GeoIPHelper( + dest_dir=settings.geoip_dest_dir, + license_key=settings.maxmind_license_key, + editions=["City", "ASN"], + max_age_days=8, + timeout=60.0 + ) + + +def get_client_ip(request) -> str: + """ + Get the real client IP address + Supports proxies, load balancers, and Cloudflare headers + """ + headers = request.headers + + # 1. Cloudflare specific headers + cf_ip = headers.get("CF-Connecting-IP") + if cf_ip: + return cf_ip.strip() + + true_client_ip = headers.get("True-Client-IP") + if true_client_ip: + return true_client_ip.strip() + + # 2. Standard proxy headers + forwarded_for = headers.get("X-Forwarded-For") + if forwarded_for: + # X-Forwarded-For may contain multiple IPs, take the first + return forwarded_for.split(",")[0].strip() + + real_ip = headers.get("X-Real-IP") + if real_ip: + return real_ip.strip() + + # 3. Fallback to client host + return request.client.host if request.client else "127.0.0.1" diff --git a/app/helpers/geoip_helper.py b/app/helpers/geoip_helper.py new file mode 100644 index 0000000..e6e8785 --- /dev/null +++ b/app/helpers/geoip_helper.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +""" +GeoLite2 Helper Class +""" +import os +import tarfile +import shutil +import tempfile +import time +import httpx +import maxminddb +from pathlib import Path + +class GeoIPHelper: + BASE_URL = "https://download.maxmind.com/app/geoip_download" + EDITIONS = {"City": "GeoLite2-City", "Country": "GeoLite2-Country", "ASN": "GeoLite2-ASN"} + + def __init__(self, dest_dir="./geoip", license_key=None, editions=None, max_age_days=8, timeout=60.0): + self.dest_dir = dest_dir + self.license_key = license_key or os.getenv("MAXMIND_LICENSE_KEY") + self.editions = editions or ["City", "ASN"] + self.max_age_days = max_age_days + self.timeout = timeout + self._readers = {} + + @staticmethod + def _safe_extract(tar: tarfile.TarFile, path: str): + base = Path(path).resolve() + for m in tar.getmembers(): + target = (base / m.name).resolve() + if not str(target).startswith(str(base)): + raise RuntimeError("Unsafe path in tar file") + tar.extractall(path=path, filter='data') + + def _download_and_extract(self, edition_id: str) -> str: + """ + 下载并解压 mmdb 文件到 dest_dir,仅保留 .mmdb + - 跟随 302 重定向 + - 流式下载到临时文件 + - 临时目录退出后自动清理 + """ + if not self.license_key: + raise ValueError("缺少 MaxMind License Key,请传入或设置环境变量 MAXMIND_LICENSE_KEY") + + url = f"{self.BASE_URL}?edition_id={edition_id}&license_key={self.license_key}&suffix=tar.gz" + + with httpx.Client(follow_redirects=True, timeout=self.timeout) as client: + with client.stream("GET", url) as resp: + resp.raise_for_status() + with tempfile.TemporaryDirectory() as tmpd: + tgz_path = os.path.join(tmpd, "db.tgz") + # 流式写入 + with open(tgz_path, "wb") as f: + for chunk in resp.iter_bytes(): + if chunk: + f.write(chunk) + + # 解压并只移动 .mmdb + with tarfile.open(tgz_path, "r:gz") as tar: + # 先安全检查与解压 + self._safe_extract(tar, tmpd) + + # 递归找 .mmdb + mmdb_path = None + for root, _, files in os.walk(tmpd): + for fn in files: + if fn.endswith(".mmdb"): + mmdb_path = os.path.join(root, fn) + break + if mmdb_path: + break + + if not mmdb_path: + raise RuntimeError("未在压缩包中找到 .mmdb 文件") + + os.makedirs(self.dest_dir, exist_ok=True) + dst = os.path.join(self.dest_dir, os.path.basename(mmdb_path)) + shutil.move(mmdb_path, dst) + return dst + + def _latest_file(self, edition_id: str): + if not os.path.isdir(self.dest_dir): + return None + files = [os.path.join(self.dest_dir, f) for f in os.listdir(self.dest_dir) + if f.startswith(edition_id) and f.endswith(".mmdb")] + return max(files, key=os.path.getmtime) if files else None + + def update(self, force=False): + for ed in self.editions: + eid = self.EDITIONS[ed] + path = self._latest_file(eid) + need = force or not path + if path: + age_days = (time.time() - os.path.getmtime(path)) / 86400 + if age_days >= self.max_age_days: + need = True + if need: + path = self._download_and_extract(eid) + + + old = self._readers.get(ed) + if old: + try: + old.close() + except: + pass + if path is not None: + self._readers[ed] = maxminddb.open_database(path) + + def lookup(self, ip: str): + res = {"ip": ip} + # City + city_r = self._readers.get("City") + if city_r: + data = city_r.get(ip) + if data: + country = data.get("country") or {} + res["country_iso"] = country.get("iso_code") or "" + res["country_name"] = (country.get("names") or {}).get("en", "") + city = data.get("city") or {} + res["city_name"] = (city.get("names") or {}).get("en", "") + loc = data.get("location") or {} + res["latitude"] = str(loc.get("latitude") or "") + res["longitude"] = str(loc.get("longitude") or "") + res["time_zone"] = str(loc.get("time_zone") or "") + postal = data.get("postal") or {} + if "code" in postal: + res["postal_code"] = postal["code"] + # ASN + asn_r = self._readers.get("ASN") + if asn_r: + data = asn_r.get(ip) + if data: + res["asn"] = data.get("autonomous_system_number") + res["organization"] = data.get("autonomous_system_organization") + return res + + def close(self): + for r in self._readers.values(): + try: + r.close() + except: + pass + self._readers = {} + + + +if __name__ == "__main__": + # 示例用法 + geo = GeoIPHelper(dest_dir="./geoip", license_key="") + geo.update() + print(geo.lookup("8.8.8.8")) + geo.close() \ No newline at end of file diff --git a/app/router/auth.py b/app/router/auth.py index bc7b02e..4c3505d 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -20,6 +20,8 @@ from app.database import DailyChallengeStats, OAuthClient, User from app.database.statistics import UserStatistics from app.dependencies import get_db from app.dependencies.database import get_redis +from app.dependencies.geoip import get_geoip_helper, get_client_ip +from app.helpers.geoip_helper import GeoIPHelper from app.log import logger from app.models.oauth import ( OAuthErrorResponse, @@ -29,7 +31,7 @@ from app.models.oauth import ( ) from app.models.score import GameMode -from fastapi import APIRouter, Depends, Form +from fastapi import APIRouter, Depends, Form, Request from fastapi.responses import JSONResponse from redis.asyncio import Redis from sqlalchemy import text @@ -79,18 +81,20 @@ def validate_password(password: str) -> list[str]: router = APIRouter(tags=["osu! OAuth 认证"]) - @router.post( "/users", name="注册用户", description="用户注册接口", ) async def register_user( + request: Request, user_username: str = Form(..., alias="user[username]", description="用户名"), user_email: str = Form(..., alias="user[user_email]", description="电子邮箱"), user_password: str = Form(..., alias="user[password]", description="密码"), db: AsyncSession = Depends(get_db), + geoip: GeoIPHelper = Depends(get_geoip_helper) ): + username_errors = validate_username(user_username) email_errors = validate_email(user_email) password_errors = validate_password(user_password) @@ -119,6 +123,21 @@ async def register_user( ) try: + # 获取客户端 IP 并查询地理位置 + client_ip = get_client_ip(request) + country_code = "CN" # 默认国家代码 + + try: + # 查询 IP 地理位置 + geo_info = geoip.lookup(client_ip) + if geo_info and geo_info.get("country_iso"): + country_code = geo_info["country_iso"] + logger.info(f"User {user_username} registering from {client_ip}, country: {country_code}") + else: + logger.warning(f"Could not determine country for IP {client_ip}") + except Exception as e: + logger.warning(f"GeoIP lookup failed for {client_ip}: {e}") + # 创建新用户 # 确保 AUTO_INCREMENT 值从3开始(ID=1是BanchoBot,ID=2预留给ppy) result = await db.execute( # pyright: ignore[reportDeprecated] @@ -137,7 +156,7 @@ async def register_user( email=user_email, pw_bcrypt=get_password_hash(user_password), priv=1, # 普通用户权限 - country_code="CN", # 默认国家 + country_code=country_code, # 根据 IP 地理位置设置国家 join_date=datetime.now(UTC), last_visit=datetime.now(UTC), is_supporter=settings.enable_supporter_for_all_users, diff --git a/app/service/geoip_scheduler.py b/app/service/geoip_scheduler.py new file mode 100644 index 0000000..c5b50b0 --- /dev/null +++ b/app/service/geoip_scheduler.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +[GeoIP] Scheduled Update Service +Periodically update the MaxMind GeoIP database +""" +import asyncio +from datetime import datetime +from app.config import settings +from app.dependencies.geoip import get_geoip_helper +from app.dependencies.scheduler import get_scheduler +from app.log import logger + + +async def update_geoip_database(): + """ + Asynchronous task to update the GeoIP database + """ + try: + logger.info("[GeoIP] Starting scheduled GeoIP database update...") + geoip = get_geoip_helper() + + # Run the synchronous update method in a background thread + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, lambda: geoip.update(force=False)) + + logger.info("[GeoIP] Scheduled GeoIP database update completed successfully") + except Exception as e: + logger.error(f"[GeoIP] Scheduled GeoIP database update failed: {e}") + + +def schedule_geoip_updates(): + """ + Schedule the GeoIP database update task + """ + scheduler = get_scheduler() + + # Use settings to configure the update time: update once a week + scheduler.add_job( + update_geoip_database, + 'cron', + day_of_week=settings.geoip_update_day, + hour=settings.geoip_update_hour, + minute=0, + id='geoip_weekly_update', + name='Weekly GeoIP database update', + replace_existing=True + ) + + logger.info( + f"[GeoIP] Scheduled update task registered: " + f"every week on day {settings.geoip_update_day} at {settings.geoip_update_hour}:00" + ) diff --git a/app/service/init_geoip.py b/app/service/init_geoip.py new file mode 100644 index 0000000..f6c3512 --- /dev/null +++ b/app/service/init_geoip.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +""" +[GeoIP] Initialization Service +Initialize the GeoIP database when the application starts +""" +import asyncio +from app.dependencies.geoip import get_geoip_helper +from app.log import logger + +async def init_geoip(): + """ + Asynchronously initialize the GeoIP database + """ + try: + geoip = get_geoip_helper() + logger.info("[GeoIP] Initializing GeoIP database...") + + # Run the synchronous update method in a background thread + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, geoip.update) + + logger.info("[GeoIP] GeoIP database initialization completed") + except Exception as e: + logger.error(f"[GeoIP] GeoIP database initialization failed: {e}") + # Do not raise an exception to avoid blocking application startup diff --git a/main.py b/main.py index 85d4111..ac2509c 100644 --- a/main.py +++ b/main.py @@ -24,6 +24,8 @@ from app.router.redirect import redirect_router from app.service.calculate_all_user_rank import calculate_user_rank from app.service.create_banchobot import create_banchobot from app.service.daily_challenge import daily_challenge_job +from app.service.geoip_scheduler import schedule_geoip_updates +from app.service.init_geoip import init_geoip from app.service.osu_rx_statistics import create_rx_statistics from app.service.recalculate import recalculate @@ -38,11 +40,13 @@ import sentry_sdk async def lifespan(app: FastAPI): # on startup await get_fetcher() # 初始化 fetcher + await init_geoip() # 初始化 GeoIP 数据库 if os.environ.get("RECALCULATE", "false").lower() == "true": await recalculate() await create_rx_statistics() await calculate_user_rank(True) init_scheduler() + schedule_geoip_updates() # 调度 GeoIP 定时更新任务 await daily_challenge_job() await create_banchobot() # on shutdown diff --git a/pyproject.toml b/pyproject.toml index 6731d3b..5224ac7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ dependencies = [ "fastapi>=0.104.1", "httpx>=0.28.1", "loguru>=0.7.3", + "maxminddb>=2.8.2", "msgpack-lazer-api", "osupyparser>=1.0.7", "passlib[bcrypt]>=1.7.4", @@ -25,12 +26,13 @@ dependencies = [ "python-jose[cryptography]>=3.3.0", "python-multipart>=0.0.6", "redis>=5.0.1", + "rosu-pp-py>=3.1.0", "sentry-sdk[fastapi,httpx,loguru,sqlalchemy]>=2.34.1", "sqlalchemy>=2.0.23", "sqlmodel>=0.0.24", "uvicorn[standard]>=0.24.0", ] -authors = ["GooGuTeam"] +authors = [{ name = "GooGuTeam" }] [tool.ruff] line-length = 88 diff --git a/uv.lock b/uv.lock index dd32890..f21ae87 100644 --- a/uv.lock +++ b/uv.lock @@ -4,8 +4,8 @@ requires-python = ">=3.12" [manifest] members = [ - "msgpack-lazer-api", "g0v0-server", + "msgpack-lazer-api", ] [[package]] @@ -528,6 +528,85 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, ] +[[package]] +name = "g0v0-server" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "aioboto3" }, + { name = "aiofiles" }, + { name = "aiomysql" }, + { name = "alembic" }, + { name = "apscheduler" }, + { name = "bcrypt" }, + { name = "cryptography" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "loguru" }, + { name = "maxminddb" }, + { name = "msgpack-lazer-api" }, + { name = "osupyparser" }, + { name = "passlib", extra = ["bcrypt"] }, + { name = "pillow" }, + { name = "pydantic", extra = ["email"] }, + { name = "pydantic-settings" }, + { name = "python-dotenv" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "python-multipart" }, + { name = "redis" }, + { name = "rosu-pp-py" }, + { name = "sentry-sdk", extra = ["fastapi", "httpx", "loguru", "sqlalchemy"] }, + { name = "sqlalchemy" }, + { name = "sqlmodel" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.dev-dependencies] +dev = [ + { name = "maturin" }, + { name = "pre-commit" }, + { name = "ruff" }, + { name = "types-aioboto3", extra = ["aioboto3", "essential"] }, +] + +[package.metadata] +requires-dist = [ + { name = "aioboto3", specifier = ">=15.0.0" }, + { name = "aiofiles", specifier = ">=24.1.0" }, + { name = "aiomysql", specifier = ">=0.2.0" }, + { name = "alembic", specifier = ">=1.12.1" }, + { name = "apscheduler", specifier = ">=3.11.0" }, + { name = "bcrypt", specifier = ">=4.1.2" }, + { name = "cryptography", specifier = ">=41.0.7" }, + { name = "fastapi", specifier = ">=0.104.1" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "loguru", specifier = ">=0.7.3" }, + { name = "maxminddb", specifier = ">=2.8.2" }, + { name = "msgpack-lazer-api", editable = "packages/msgpack_lazer_api" }, + { name = "osupyparser", git = "https://github.com/MingxuanGame/osupyparser.git" }, + { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, + { name = "pillow", specifier = ">=11.3.0" }, + { name = "pydantic", extras = ["email"], specifier = ">=2.5.0" }, + { name = "pydantic-settings", specifier = ">=2.10.1" }, + { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, + { name = "python-multipart", specifier = ">=0.0.6" }, + { name = "redis", specifier = ">=5.0.1" }, + { name = "rosu-pp-py", specifier = ">=3.1.0" }, + { name = "sentry-sdk", extras = ["fastapi", "httpx", "loguru", "sqlalchemy"], specifier = ">=2.34.1" }, + { name = "sqlalchemy", specifier = ">=2.0.23" }, + { name = "sqlmodel", specifier = ">=0.0.24" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "maturin", specifier = ">=1.9.2" }, + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "ruff", specifier = ">=0.12.4" }, + { name = "types-aioboto3", extras = ["aioboto3", "essential"], specifier = ">=15.0.0" }, +] + [[package]] name = "greenlet" version = "3.2.4" @@ -731,6 +810,60 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/ea/cd66332cfb2c4f5a264c15da7f98333c63daa00180a03d0b188899b1be76/maturin-1.9.3-py3-none-win_arm64.whl", hash = "sha256:fb2ee86861e72495eb9afd83f3672de0e4061740247f14492703c189829e7928", size = 6926056, upload-time = "2025-08-04T11:50:59.654Z" }, ] +[[package]] +name = "maxminddb" +version = "2.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/9c/5af549744e7a1e986bddd119c0bbca7f7fa7fb72590b554cb860a0c3acb1/maxminddb-2.8.2.tar.gz", hash = "sha256:26a8e536228d8cc28c5b8f574a571a2704befce3b368ceca593a76d56b6590f9", size = 194388, upload-time = "2025-07-25T20:32:05.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/45/ff56248fbaaca9383d18d73aee60a544f0282d71e54af0bf0dea4128fda5/maxminddb-2.8.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bcfb9bc5e31875dd6c1e2de9d748ce403ca5d5d4bc6167973bb0b1bd294bf8d7", size = 52615, upload-time = "2025-07-25T20:30:23.369Z" }, + { url = "https://files.pythonhosted.org/packages/79/44/2703121c2dbba7d03c37294dd407cca2e31dc4542543b93808dd26fd144b/maxminddb-2.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e12bec7f672af46e2177e7c1cd5d330eb969f0dc42f672e250b3d5d72e61778d", size = 35394, upload-time = "2025-07-25T20:30:24.55Z" }, + { url = "https://files.pythonhosted.org/packages/c2/25/99e999e630b1a44936c5261827cc94def5eec82ae57a667a76d641b93925/maxminddb-2.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b23103a754ff1e795d6e107ae23bf9b3360bce9e9bff08c58e388dc2f3fd85ad", size = 35177, upload-time = "2025-07-25T20:30:26.105Z" }, + { url = "https://files.pythonhosted.org/packages/41/21/05c8f50c1b4138516f2bde2810d32c97b84c6d0aefe7e1a1b41635241041/maxminddb-2.8.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c4a10cb799ed3449d063883df962b76b55fdfe0756dfa82eed9765d95e8fd6e", size = 96062, upload-time = "2025-07-25T20:30:27.33Z" }, + { url = "https://files.pythonhosted.org/packages/66/7a/ba7995d1f6b405c057e6f4bd5751fe667535b0ba84f65ee6eb1493bccb80/maxminddb-2.8.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6315977c0512cb7d982bc2eb869355a168f12ef6d2bd5a4f2c93148bc3c03fdc", size = 94208, upload-time = "2025-07-25T20:30:28.932Z" }, + { url = "https://files.pythonhosted.org/packages/99/6f/11cc4b0f1d7f98965ef3304bd9bf2c587f5e84b99aeac27891f5661565cb/maxminddb-2.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b24594f04d03855687b8166ee2c7b788f1e1836b4c5fef2e55fc19327f507ac", size = 93448, upload-time = "2025-07-25T20:30:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d5/31664be079b71b30895875d6781ae08f871d67de04e518c64422271a8b25/maxminddb-2.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b07b72d9297179c74344aaecad48c88dfdea4422e16721b5955015800d865da2", size = 92240, upload-time = "2025-07-25T20:30:31.658Z" }, + { url = "https://files.pythonhosted.org/packages/a4/19/a5931bb077ccb7e719b8a602fb3ffcd577cdd4954cae3d2b9201272cd462/maxminddb-2.8.2-cp312-cp312-win32.whl", hash = "sha256:51d9717354ee7aa02d52c15115fec2d29bb33f31d6c9f5a8a5aaa2c25dc66e63", size = 34751, upload-time = "2025-07-25T20:30:32.883Z" }, + { url = "https://files.pythonhosted.org/packages/63/50/25720ed19f2d62440b94a1333656cccf6c3c1ce2527ed9abf7b35e2557e1/maxminddb-2.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:18132ccd77ad68863b9022451655cbe1e8fc3c973bafcad66a252eff2732a5c1", size = 36782, upload-time = "2025-07-25T20:30:34.378Z" }, + { url = "https://files.pythonhosted.org/packages/9f/30/1c3121365114678d8df4c02fd416d7520c86b1e37708cc7134ccc3c06e78/maxminddb-2.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:59934eb00274f8b7860927f470a2b9b049842f91e2524a24ade99e16755320f2", size = 33040, upload-time = "2025-07-25T20:30:35.474Z" }, + { url = "https://files.pythonhosted.org/packages/bb/33/06d8d8eb2e422bbff372628c23ce09a2d51f50b9283449c5d8cef0225fe3/maxminddb-2.8.2-cp313-cp313-android_21_arm64_v8a.whl", hash = "sha256:b32a8b61e0dae09c80f41dcd6dc4a442a3cc94b7874a18931daecfea274f640c", size = 36642, upload-time = "2025-07-25T20:30:36.627Z" }, + { url = "https://files.pythonhosted.org/packages/41/c1/dca3608b85d3889760bdf98e931ac66e236f9b8da640f47461c8549fe931/maxminddb-2.8.2-cp313-cp313-android_21_x86_64.whl", hash = "sha256:5f12674cee687cd41c9be1c9ab806bd6a777864e762d5f34ec57c0afa9a21411", size = 37052, upload-time = "2025-07-25T20:30:37.912Z" }, + { url = "https://files.pythonhosted.org/packages/c1/e0/3af26974a2c267939c394d6481723021bdb67af570f948cf510f80e6aeb1/maxminddb-2.8.2-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:995a506a02f70a33ba5ee9f73ce737ef8cdb219bfca3177db79622ebc5624057", size = 34381, upload-time = "2025-07-25T20:30:39.363Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/26e06d888f057f98b4bc269ee0f8d0ede3dad9684d38e4033acc444b08e5/maxminddb-2.8.2-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:5ef9b7f106a1e9ee08f47cd98f7ae80fa40fc0fd40d97cf0d011266738847b52", size = 34918, upload-time = "2025-07-25T20:30:40.512Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a2/0e23f5c33461d1d43d201f2c741c6318d658907833d22cec4ee475d6fab8/maxminddb-2.8.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:adeceeb591755b36a0dc544b92f6d80fc5c112519f5ed8211c34d2ad796bfac0", size = 52619, upload-time = "2025-07-25T20:30:41.645Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ec/3a69a57a9ba4c7d62105fe235642f744bf4ef7cd057f8019a14b1b8eea6d/maxminddb-2.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5c8df08cbdafaa04f7d36a0506e342e4cd679587b56b0fad065b4777e94c8065", size = 35399, upload-time = "2025-07-25T20:30:42.804Z" }, + { url = "https://files.pythonhosted.org/packages/30/b3/b904e778e347ed40e5c82717609e1ecdcdff6c7d7ea2f844a6a20578daef/maxminddb-2.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3e982112e239925c2d8739f834c71539947e54747e56e66c6d960ac356432f32", size = 35165, upload-time = "2025-07-25T20:30:45.534Z" }, + { url = "https://files.pythonhosted.org/packages/34/da/685eeae2ad155d970efabad5ca86ed745665a2ff7576d8fa3d9b9bdb7f8a/maxminddb-2.8.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ef30c32af0107e6b0b9d53f9ae949cf74ddb6882025054bd7500a7b1eb02ec0", size = 96127, upload-time = "2025-07-25T20:30:46.716Z" }, + { url = "https://files.pythonhosted.org/packages/fd/24/a7f54b2b6d808cc4dd485adc004fcd66e103d0aacbf448afd419c0c18380/maxminddb-2.8.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685df893f44606dcb1353b31762b18a2a9537015f1b9e7c0bb3ae74c9fbced32", size = 94250, upload-time = "2025-07-25T20:30:48.45Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cb/bbc5c11201497d7dd42d3240141a8ec484ff704afdf6dff7a7a2de5a6291/maxminddb-2.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3dc27c443cf27b35d4d77ff90fbc6caf1c4e28cffd967775b11cf993af5b9d1", size = 93399, upload-time = "2025-07-25T20:30:50.052Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e6/521c750ea7480fbe362b7bb2821937544313fd3b697f30f4c1975b85c816/maxminddb-2.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:742e857b4411ae3d59c555c2aa96856f72437374cf668c3bed18647092584af6", size = 92250, upload-time = "2025-07-25T20:30:51.259Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4b/9a522ba96a48882c7a954636411f05994573af2eed4b93b511ca6ea3d023/maxminddb-2.8.2-cp313-cp313-win32.whl", hash = "sha256:1fba9c16f5e492eee16362e8204aaec30241167a3466874ca9b0521dec32d63e", size = 34759, upload-time = "2025-07-25T20:30:52.936Z" }, + { url = "https://files.pythonhosted.org/packages/e8/4a/e0d7451b56821fe0ec794a917cceb67efac8510013783cc5713b733d5ff4/maxminddb-2.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfbfee615d2566124cb6232401d89f15609f5297eb4f022f1f6a14205c091df6", size = 36771, upload-time = "2025-07-25T20:30:54.076Z" }, + { url = "https://files.pythonhosted.org/packages/71/27/abffb686514905994ef26191971ca30765c45e391d82ee2ea6b2ecfe1bad/maxminddb-2.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:2ade954d94087039fc45de99eeae0e2f0480d69a767abd417bd0742bf5d177ab", size = 33041, upload-time = "2025-07-25T20:30:55.567Z" }, + { url = "https://files.pythonhosted.org/packages/03/d2/844530632ef917f622742d6d5beae5c3ebed7d424af02bf428b639e42a41/maxminddb-2.8.2-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:7d5db6d4f8caaf7b753a0f6782765ea5352409ef6d430196b0dc7c61c0a8c72b", size = 34384, upload-time = "2025-07-25T20:30:57.046Z" }, + { url = "https://files.pythonhosted.org/packages/ee/6c/ff9555963983d99a201a5068ab037c92583cd8422046d7064e2cab92c09f/maxminddb-2.8.2-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:bda6015f617b4ec6f1a49ae74b1a36c10d997602d3e9141514ef11983e6ddf8d", size = 34929, upload-time = "2025-07-25T20:30:58.194Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c2/8d093e973edb1ca0ad54a80f124b4e8d1db5508a00c0f98765d0df6bd4d5/maxminddb-2.8.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:4e32f5608af05bc0b6cee91edd0698f6a310ae9dd0f3cebfb524a6b444c003a2", size = 52616, upload-time = "2025-07-25T20:30:59.294Z" }, + { url = "https://files.pythonhosted.org/packages/5d/85/8442162353c28ff0679f348d2099f24d9be9b84f9ffa1ed21e8ecafe64dc/maxminddb-2.8.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5abf18c51f3a3e5590ea77d43bff159a9f88cec1f95a7e3fc2a39a21fc8f9e7c", size = 35405, upload-time = "2025-07-25T20:31:00.821Z" }, + { url = "https://files.pythonhosted.org/packages/14/df/f37d5b2605ae0f1d3f87d45ddbab032f36b2cae29f80f02c390001b35677/maxminddb-2.8.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3c8d57063ff2c6d0690e5d907a10b5b6ba64e0ab5e6d8661b6075fbda854e97d", size = 35174, upload-time = "2025-07-25T20:31:02.112Z" }, + { url = "https://files.pythonhosted.org/packages/32/12/5d562de6243b8631f9480b7deac92cb62ec5ae8aecd4e3ccdaecfc177c24/maxminddb-2.8.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:73d603c7202e1338bdbb3ead8a3db4f74825e419ecc8733ef8a76c14366800d2", size = 96060, upload-time = "2025-07-25T20:31:03.318Z" }, + { url = "https://files.pythonhosted.org/packages/3a/95/04c8c2526e4c0c0d2894052c7d07f39c9b8d1185bd2da5752de2effc287a/maxminddb-2.8.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:acca37ed0372efa01251da32db1a5d81189369449bc4b943d3087ebc9e30e814", size = 94013, upload-time = "2025-07-25T20:31:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/c7/98/7870de3e5cf362c567c0a9cf7a8834d3699fe0a52e601fc352c902d3ebc7/maxminddb-2.8.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1e1e3ef04a686cf7d893a8274ddc0081bd40121ac4923b67e8caa902094ac111", size = 93350, upload-time = "2025-07-25T20:31:05.815Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ef/7eb25529011cf0e18fb529792ad5225b402a3e80728cfbd7604e53c5ada3/maxminddb-2.8.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c6657615038d8fe106acccd2bf4fe073d07f72886ee893725c74649687635a1a", size = 92036, upload-time = "2025-07-25T20:31:07.03Z" }, + { url = "https://files.pythonhosted.org/packages/0c/9d/12926eac198a920a2c4f9ce6e57de33d47a6c40ccb1637362abfd268f017/maxminddb-2.8.2-cp314-cp314-win32.whl", hash = "sha256:af058500ab3448b709c43f1aefd3d9f7c5f1773af07611d589502ea78bf2b9dc", size = 35403, upload-time = "2025-07-25T20:31:08.221Z" }, + { url = "https://files.pythonhosted.org/packages/c6/eb/48636b611f604bb072b26be16e6990694bbfdd57553622a784b17c1999c7/maxminddb-2.8.2-cp314-cp314-win_amd64.whl", hash = "sha256:b5982d1b53b50b96a9afcf4f7f49db0a842501f9cf58c4c16c0d62c1b0d22840", size = 37559, upload-time = "2025-07-25T20:31:09.448Z" }, + { url = "https://files.pythonhosted.org/packages/05/4a/27e53d1b9b7b168f259bbfccec1d1383d51c07e112d7bd24e543042e07a1/maxminddb-2.8.2-cp314-cp314-win_arm64.whl", hash = "sha256:48c9f7e182c6e970a412c02e7438c2a66197c0664d0c7da81b951bff86519dd5", size = 33614, upload-time = "2025-07-25T20:31:10.555Z" }, + { url = "https://files.pythonhosted.org/packages/eb/43/e49927eb381fb44c9a06a5ac06da039951fde90bf47f100b495f082d6b37/maxminddb-2.8.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b40ed2ec586a5a479d08bd39838fbfbdff84d7deb57089317f312609f1357384", size = 53708, upload-time = "2025-07-25T20:31:11.642Z" }, + { url = "https://files.pythonhosted.org/packages/8b/d0/ff081ac508358b3a9ca1f0b39d5bf74904aa644b45d2d6d8b9112ad9566e/maxminddb-2.8.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1ba4036f823a8e6418af0d69734fb176e3d1edd0432e218f3be8362564b53ea5", size = 35925, upload-time = "2025-07-25T20:31:12.804Z" }, + { url = "https://files.pythonhosted.org/packages/bc/30/f94d3acca0314f038a4f1cb83ccbdf0a56b9f13454bab9667af0506ecca0/maxminddb-2.8.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:96531e18bddff9639061ee543417f941a2fd41efc7b1699e1e18aba4157b0b03", size = 35757, upload-time = "2025-07-25T20:31:14.322Z" }, + { url = "https://files.pythonhosted.org/packages/b0/21/5710a5aa7f83453fcf36cee11ed113c110a53cdc5a4ecf82904be797101b/maxminddb-2.8.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bb77ad5c585d6255001d701eafc4758e2d28953ba47510d9f54cc2a9e469c6b6", size = 104991, upload-time = "2025-07-25T20:31:15.542Z" }, + { url = "https://files.pythonhosted.org/packages/47/0c/8cf559f850c3e43e6f490fad458293fdb0b70debbe3fcbf7d7713558044f/maxminddb-2.8.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bfd950af416ef4133bc04b059f29ac4d4b356927fa4a500048220d65ec4c6ac", size = 101935, upload-time = "2025-07-25T20:31:16.83Z" }, + { url = "https://files.pythonhosted.org/packages/02/47/104ef451772d1cd852dea2334c2dfb02d6de7caf8d31e1358f10b9af6769/maxminddb-2.8.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bf73612f8fbfa9181ba62fa88fb3d732bdc775017bdb3725e24cdd1a0da92d4", size = 101653, upload-time = "2025-07-25T20:31:18.104Z" }, + { url = "https://files.pythonhosted.org/packages/60/03/139791f82e3857d4d0638494647f74d997a2abded7048ab4ed4622a089ad/maxminddb-2.8.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:74361fbddb0566970af38cff0a6256ec3f445cb5031da486d0cee6f19ccb9e2e", size = 99517, upload-time = "2025-07-25T20:31:19.764Z" }, + { url = "https://files.pythonhosted.org/packages/4c/45/c625fc2b84b8dcf2181eb411f130729446164215409c8e0c8fd01a53f388/maxminddb-2.8.2-cp314-cp314t-win32.whl", hash = "sha256:6bfb41c3a560a60fc20d0d87cb400003974fbb833b44571250476c2d9cb4d407", size = 36349, upload-time = "2025-07-25T20:31:21.004Z" }, + { url = "https://files.pythonhosted.org/packages/27/8d/46c202be273fd8ec985686e1fdd84ad55c7234dc66d82d6d59e5caf438e4/maxminddb-2.8.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ec6bba1b1f0fd0846aac5b0af1f84804c67702e873aa9d79c9965794a635ada8", size = 38595, upload-time = "2025-07-25T20:31:22.185Z" }, + { url = "https://files.pythonhosted.org/packages/62/33/09601f476fd9d494e967f15c1e05aa1e35bdf5ee54555596e05e5c9ec8c9/maxminddb-2.8.2-cp314-cp314t-win_arm64.whl", hash = "sha256:929a00528db82ffa5aa928a9cd1a972e8f93c36243609c25574dfd920c21533b", size = 33990, upload-time = "2025-07-25T20:31:23.367Z" }, +] + [[package]] name = "msgpack-lazer-api" source = { editable = "packages/msgpack_lazer_api" } @@ -807,81 +940,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] -[[package]] -name = "g0v0-server" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "aioboto3" }, - { name = "aiofiles" }, - { name = "aiomysql" }, - { name = "alembic" }, - { name = "apscheduler" }, - { name = "bcrypt" }, - { name = "cryptography" }, - { name = "fastapi" }, - { name = "httpx" }, - { name = "loguru" }, - { name = "msgpack-lazer-api" }, - { name = "osupyparser" }, - { name = "passlib", extra = ["bcrypt"] }, - { name = "pillow" }, - { name = "pydantic", extra = ["email"] }, - { name = "pydantic-settings" }, - { name = "python-dotenv" }, - { name = "python-jose", extra = ["cryptography"] }, - { name = "python-multipart" }, - { name = "redis" }, - { name = "sentry-sdk", extra = ["fastapi", "httpx", "loguru", "sqlalchemy"] }, - { name = "sqlalchemy" }, - { name = "sqlmodel" }, - { name = "uvicorn", extra = ["standard"] }, -] - -[package.dev-dependencies] -dev = [ - { name = "maturin" }, - { name = "pre-commit" }, - { name = "ruff" }, - { name = "types-aioboto3", extra = ["aioboto3", "essential"] }, -] - -[package.metadata] -requires-dist = [ - { name = "aioboto3", specifier = ">=15.0.0" }, - { name = "aiofiles", specifier = ">=24.1.0" }, - { name = "aiomysql", specifier = ">=0.2.0" }, - { name = "alembic", specifier = ">=1.12.1" }, - { name = "apscheduler", specifier = ">=3.11.0" }, - { name = "bcrypt", specifier = ">=4.1.2" }, - { name = "cryptography", specifier = ">=41.0.7" }, - { name = "fastapi", specifier = ">=0.104.1" }, - { name = "httpx", specifier = ">=0.28.1" }, - { name = "loguru", specifier = ">=0.7.3" }, - { name = "msgpack-lazer-api", editable = "packages/msgpack_lazer_api" }, - { name = "osupyparser", git = "https://github.com/MingxuanGame/osupyparser.git" }, - { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, - { name = "pillow", specifier = ">=11.3.0" }, - { name = "pydantic", extras = ["email"], specifier = ">=2.5.0" }, - { name = "pydantic-settings", specifier = ">=2.10.1" }, - { name = "python-dotenv", specifier = ">=1.0.0" }, - { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, - { name = "python-multipart", specifier = ">=0.0.6" }, - { name = "redis", specifier = ">=5.0.1" }, - { name = "sentry-sdk", extras = ["fastapi", "httpx", "loguru", "sqlalchemy"], specifier = ">=2.34.1" }, - { name = "sqlalchemy", specifier = ">=2.0.23" }, - { name = "sqlmodel", specifier = ">=0.0.24" }, - { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "maturin", specifier = ">=1.9.2" }, - { name = "pre-commit", specifier = ">=4.2.0" }, - { name = "ruff", specifier = ">=0.12.4" }, - { name = "types-aioboto3", extras = ["aioboto3", "essential"], specifier = ">=15.0.0" }, -] - [[package]] name = "osupyparser" version = "1.0.8" @@ -1236,6 +1294,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, ] +[[package]] +name = "rosu-pp-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/19/b44c30066c6e85cd6a4fd8a8983be91d2336a4e7f0ef04e576bc9b1d7c63/rosu_pp_py-3.1.0.tar.gz", hash = "sha256:4aa64eb5e68b8957357f9b304047db285423b207ad913e28829ccfcd5348d41a", size = 31144, upload-time = "2025-06-03T17:14:27.461Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/04/d752d7cfb71afcbecd0513ffcc716abcf5c3b2b4b9a4e44a3c7e7fc43fba/rosu_pp_py-3.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:61275ddfedd7f67bcb5c42a136fb30a66aeb7e07323c59a67db590de687bd78d", size = 552307, upload-time = "2025-06-03T17:13:33.203Z" }, + { url = "https://files.pythonhosted.org/packages/27/76/e7d3415cdd384b8ea0a2f461c87d9b451108cbded46e2e88676611a99875/rosu_pp_py-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04aacaa6faba9d0892ba5584884cfaf42eb1a7678dc0dff453fc6988e8be8809", size = 508787, upload-time = "2025-06-03T17:13:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a0/c59168f75b32b6cf3e41d5d44dc478b113eebe38166e6b87af193ebb8d4f/rosu_pp_py-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eecd7a78aeb82abf39ac7db670350a42b6eb8a54eb4a8a13610def02c56d005", size = 525740, upload-time = "2025-06-03T17:13:35.631Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c0/7b498f8ecd6650d718291994c5e6d3931e5572e408d8d7bc9000f2441575/rosu_pp_py-3.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dd5118614335e9084f076f9fa88fb139e64a9e1750c0d8020c8e8abe9e42dce", size = 550091, upload-time = "2025-06-03T17:13:36.733Z" }, + { url = "https://files.pythonhosted.org/packages/0d/21/85f67440c93bc22135e6e43f6fc1d35d184b9c1523416acfae4b8721d9e5/rosu_pp_py-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edbd67da486af4fbf5d53cd310fddc280a67d06274aea5eb3e322ffc66e82479", size = 566542, upload-time = "2025-06-03T17:13:38.308Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ed/1d3727d327097edf2ecf8a39a267d5f2ba7a82ce2f7c71e1be5b6c278870/rosu_pp_py-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:af295819cda6df49324179e5c3986eb4215d6c456a055620ec30716ed22ec97c", size = 704380, upload-time = "2025-06-03T17:13:39.839Z" }, + { url = "https://files.pythonhosted.org/packages/a3/4d/db4fb9bcd1cdebbc761728a8684d700559a5b44e5d2baec262e07907917a/rosu_pp_py-3.1.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b0367959b9ef74f51f1cc414d587b6dabab00390496a855a89073b55e08330b0", size = 813664, upload-time = "2025-06-03T17:13:41.052Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a9/3ec4502f4f44c0e22b7658308def31c96320e339b89cdf474c2612b40351/rosu_pp_py-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:adf103385961c01859ae99ded0c289e03f5ab33d86ecabdd4e8f3139c84c6240", size = 738024, upload-time = "2025-06-03T17:13:42.132Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f6/d33cde2f911ff2fdedbbc2be6b249e29f3a65e11acd1b645df77ece0747a/rosu_pp_py-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:8dc48f45aff62fc2798e3a4adf4596d9e810079f16650a98c8ed6cf1a37e506b", size = 458391, upload-time = "2025-06-03T17:13:43.706Z" }, + { url = "https://files.pythonhosted.org/packages/ac/53/3f68a24d75c65b789200241f490c2379d86a3760f48dc9e22348f0a619c9/rosu_pp_py-3.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5cda7206c2e8c96fdaccf0b531d0614df5e30ad6cd1bf217ec5556406294ed6c", size = 552011, upload-time = "2025-06-03T17:13:44.889Z" }, + { url = "https://files.pythonhosted.org/packages/b6/95/6251e0d7f615c148d17e5151b89e3da7da89ef5363de921b5957b5407510/rosu_pp_py-3.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d54606719ac93ccadbcb40acd3dda41f6e319e075303b6bbfdebf784ed451281", size = 508659, upload-time = "2025-06-03T17:13:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/7f/2b/23d449a97fb6d34ced7c421a13669d98a5522ce79fabd8151a873d3d152a/rosu_pp_py-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec88b95845851018e95e49f3f8610dc989a2cfc74273a8c40fe7ef94e4f37a6a", size = 525367, upload-time = "2025-06-03T17:13:47.56Z" }, + { url = "https://files.pythonhosted.org/packages/52/9a/c8879dd4f62632d8928cc147bca705eb7e2a21dc0ad43307d6f68e0a3b41/rosu_pp_py-3.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f39332ec3c479c68396d0f6ea09ab3ee77ca595ab14f4739581ca8a631dc33d8", size = 549600, upload-time = "2025-06-03T17:13:48.717Z" }, + { url = "https://files.pythonhosted.org/packages/e8/86/a0154a1b3149bd25884ea8009c70b9792a960dbfd4172b65ace0e55394b4/rosu_pp_py-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4a290f7920b0015e0a9d829428cce7948ae98043985b237b0d68e2b28c8dba3", size = 566082, upload-time = "2025-06-03T17:13:49.761Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/897f5cb48dfe067549dee39cb265581782d1daebc4dd27b1c1bc58551755/rosu_pp_py-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:11ab7db7903a2752b7c53458e689b2f1f724bee1e99d627d447dee69e7668299", size = 704157, upload-time = "2025-06-03T17:13:51.175Z" }, + { url = "https://files.pythonhosted.org/packages/43/7d/67ec98bed784807d543106bb517879149bed3544d1987bdf59eab6ced79e/rosu_pp_py-3.1.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:bc5350a00a37dc273f7e734364a27820f2c274a5a1715fe3b0ef62bd071fae54", size = 813310, upload-time = "2025-06-03T17:13:52.421Z" }, + { url = "https://files.pythonhosted.org/packages/a9/02/fbbb54b21cec66fbe8e2884a73837e0c4e97ca5c625587d90b378c5354f0/rosu_pp_py-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:28f171e6042d68df379be0536173626b2ae51ddc4a7b1881209ff384c468918a", size = 737638, upload-time = "2025-06-03T17:13:53.709Z" }, + { url = "https://files.pythonhosted.org/packages/18/9e/f951ef3508cbfbaf36dcee3bd828eb8f922a21b2791bc852074adc1835a1/rosu_pp_py-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a327e627bc56e55bc8dd3fcc26abcfe60af1497f310dad7aea3ef798434f2e9b", size = 457855, upload-time = "2025-06-03T17:13:55.317Z" }, +] + [[package]] name = "rsa" version = "4.9.1" From 6e496a112385f951f1560378b81fb2dd683fafba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=92=95=E8=B0=B7=E9=85=B1?= Date: Mon, 18 Aug 2025 00:23:57 +0800 Subject: [PATCH 2/4] add ip log --- app/auth.py | 11 +- app/database/__init__.py | 2 + app/database/user_login_log.py | 40 +++++ app/dependencies/geoip.py | 70 ++++++-- app/dependencies/user.py | 6 + app/router/auth.py | 47 +++++- app/service/login_log_service.py | 153 ++++++++++++++++++ ...d04d3f4dc_fix_user_login_log_table_name.py | 84 ++++++++++ .../3eef4794ded1_add_user_login_log_table.py | 56 +++++++ 9 files changed, 450 insertions(+), 19 deletions(-) create mode 100644 app/database/user_login_log.py create mode 100644 app/service/login_log_service.py create mode 100644 migrations/versions/2dcd04d3f4dc_fix_user_login_log_table_name.py create mode 100644 migrations/versions/3eef4794ded1_add_user_login_log_table.py diff --git a/app/auth.py b/app/auth.py index 6293c07..3ffac5a 100644 --- a/app/auth.py +++ b/app/auth.py @@ -114,11 +114,14 @@ async def authenticate_user_legacy( pw_md5 = hashlib.md5(password.encode()).hexdigest() # 2. 根据用户名查找用户 - statement = select(User).where(User.username == name) + statement = select(User).where(User.username == name).options() user = (await db.exec(statement)).first() if not user: return None + + await db.refresh(user) + # 3. 验证密码 if user.pw_bcrypt is None or user.pw_bcrypt == "": return None @@ -261,4 +264,8 @@ async def get_user_by_authorization_code( statement = select(User).where(User.id == int(user_id)) user = (await db.exec(statement)).first() - return (user, scopes.split(",")) if user else None + if user: + + await db.refresh(user) + return (user, scopes.split(",")) + return None diff --git a/app/database/__init__.py b/app/database/__init__.py index d8794c4..5c58b47 100644 --- a/app/database/__init__.py +++ b/app/database/__init__.py @@ -60,6 +60,7 @@ from .user_account_history import ( UserAccountHistoryResp, UserAccountHistoryType, ) +from .user_login_log import UserLoginLog __all__ = [ "APIUploadedRoom", @@ -118,6 +119,7 @@ __all__ = [ "UserAchievement", "UserAchievement", "UserAchievementResp", + "UserLoginLog", "UserResp", "UserStatistics", "UserStatisticsResp", diff --git a/app/database/user_login_log.py b/app/database/user_login_log.py new file mode 100644 index 0000000..ace1132 --- /dev/null +++ b/app/database/user_login_log.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +""" +User Login Log Database Model +""" +from datetime import datetime +from typing import Optional +from sqlmodel import Field, SQLModel + + +class UserLoginLog(SQLModel, table=True): + """User login log table""" + __tablename__ = "user_login_log" # pyright: ignore[reportAssignmentType] + + id: Optional[int] = Field(default=None, primary_key=True, description="Record ID") + user_id: int = Field(index=True, description="User ID") + ip_address: str = Field(max_length=45, index=True, description="IP address (supports IPv4 and IPv6)") + user_agent: Optional[str] = Field(default=None, max_length=500, description="User agent information") + login_time: datetime = Field(default_factory=datetime.utcnow, description="Login time") + + # GeoIP information + country_code: Optional[str] = Field(default=None, max_length=2, description="Country code") + country_name: Optional[str] = Field(default=None, max_length=100, description="Country name") + city_name: Optional[str] = Field(default=None, max_length=100, description="City name") + latitude: Optional[str] = Field(default=None, max_length=20, description="Latitude") + longitude: Optional[str] = Field(default=None, max_length=20, description="Longitude") + time_zone: Optional[str] = Field(default=None, max_length=50, description="Time zone") + + # ASN information + asn: Optional[int] = Field(default=None, description="Autonomous System Number") + organization: Optional[str] = Field(default=None, max_length=200, description="Organization name") + + # Login status + login_success: bool = Field(default=True, description="Whether the login was successful") + login_method: str = Field(max_length=50, description="Login method (password/oauth/etc.)") + + # Additional information + notes: Optional[str] = Field(default=None, max_length=500, description="Additional notes") + + class Config: + from_attributes = True diff --git a/app/dependencies/geoip.py b/app/dependencies/geoip.py index 34e8d3e..8b29679 100644 --- a/app/dependencies/geoip.py +++ b/app/dependencies/geoip.py @@ -2,6 +2,7 @@ """ GeoIP dependency for FastAPI """ +import ipaddress from functools import lru_cache from app.helpers.geoip_helper import GeoIPHelper from app.config import settings @@ -23,29 +24,76 @@ def get_geoip_helper() -> GeoIPHelper: def get_client_ip(request) -> str: """ - Get the real client IP address - Supports proxies, load balancers, and Cloudflare headers + 获取客户端真实 IP 地址 + 支持 IPv4 和 IPv6,考虑代理、负载均衡器等情况 """ headers = request.headers - # 1. Cloudflare specific headers + # 1. Cloudflare 专用头部 cf_ip = headers.get("CF-Connecting-IP") if cf_ip: - return cf_ip.strip() + ip = cf_ip.strip() + if is_valid_ip(ip): + return ip true_client_ip = headers.get("True-Client-IP") if true_client_ip: - return true_client_ip.strip() + ip = true_client_ip.strip() + if is_valid_ip(ip): + return ip - # 2. Standard proxy headers + # 2. 标准代理头部 forwarded_for = headers.get("X-Forwarded-For") if forwarded_for: - # X-Forwarded-For may contain multiple IPs, take the first - return forwarded_for.split(",")[0].strip() + # X-Forwarded-For 可能包含多个 IP,取第一个有效的 + for ip_str in forwarded_for.split(","): + ip = ip_str.strip() + if is_valid_ip(ip) and not is_private_ip(ip): + return ip real_ip = headers.get("X-Real-IP") if real_ip: - return real_ip.strip() + ip = real_ip.strip() + if is_valid_ip(ip): + return ip - # 3. Fallback to client host - return request.client.host if request.client else "127.0.0.1" + # 3. 回退到客户端 IP + client_ip = request.client.host if request.client else "127.0.0.1" + return client_ip if is_valid_ip(client_ip) else "127.0.0.1" + + +def is_valid_ip(ip_str: str) -> bool: + """ + 验证 IP 地址是否有效(支持 IPv4 和 IPv6) + """ + try: + ipaddress.ip_address(ip_str) + return True + except ValueError: + return False + + +def is_private_ip(ip_str: str) -> bool: + """ + 判断是否为私有 IP 地址 + """ + try: + ip = ipaddress.ip_address(ip_str) + return ip.is_private + except ValueError: + return False + + +def normalize_ip(ip_str: str) -> str: + """ + 标准化 IP 地址格式 + 对于 IPv6,转换为压缩格式 + """ + try: + ip = ipaddress.ip_address(ip_str) + if isinstance(ip, ipaddress.IPv6Address): + return ip.compressed + else: + return str(ip) + except ValueError: + return ip_str diff --git a/app/dependencies/user.py b/app/dependencies/user.py index 084323f..73e679f 100644 --- a/app/dependencies/user.py +++ b/app/dependencies/user.py @@ -89,6 +89,9 @@ async def get_client_user( user = (await db.exec(select(User).where(User.id == token_record.user_id))).first() if not user: raise HTTPException(status_code=401, detail="Invalid or expired token") + + + await db.refresh(user) return user @@ -125,4 +128,7 @@ async def get_current_user( user = (await db.exec(select(User).where(User.id == token_record.user_id))).first() if not user: raise HTTPException(status_code=401, detail="Invalid or expired token") + + + await db.refresh(user) return user diff --git a/app/router/auth.py b/app/router/auth.py index 4c3505d..3eac5f7 100644 --- a/app/router/auth.py +++ b/app/router/auth.py @@ -23,6 +23,7 @@ from app.dependencies.database import get_redis from app.dependencies.geoip import get_geoip_helper, get_client_ip from app.helpers.geoip_helper import GeoIPHelper from app.log import logger +from app.service.login_log_service import LoginLogService from app.models.oauth import ( OAuthErrorResponse, RegistrationRequestErrors, @@ -201,6 +202,7 @@ async def register_user( description="OAuth 令牌端点,支持密码、刷新令牌和授权码三种授权方式。", ) async def oauth_token( + request: Request, grant_type: Literal[ "authorization_code", "refresh_token", "password", "client_credentials" ] = Form(..., description="授权类型:密码/刷新令牌/授权码/客户端凭证"), @@ -268,6 +270,15 @@ async def oauth_token( # 验证用户 user = await authenticate_user(db, username, password) if not user: + # 记录失败的登录尝试 + await LoginLogService.record_failed_login( + db=db, + request=request, + attempted_username=username, + login_method="password", + notes="Invalid credentials" + ) + return create_oauth_error_response( error="invalid_grant", description=( @@ -280,18 +291,34 @@ async def oauth_token( hint="Incorrect sign in", ) + # 确保用户对象与当前会话关联 + await db.refresh(user) + + # 记录成功的登录 + user_id = getattr(user, 'id') + assert user_id is not None, "User ID should not be None after authentication" + await LoginLogService.record_login( + db=db, + user_id=user_id, + request=request, + login_success=True, + login_method="password", + notes=f"OAuth password grant for client {client_id}" + ) + # 生成令牌 access_token_expires = timedelta(minutes=settings.access_token_expire_minutes) + # 获取用户ID,避免触发延迟加载 access_token = create_access_token( - data={"sub": str(user.id)}, expires_delta=access_token_expires + data={"sub": str(user_id)}, expires_delta=access_token_expires ) refresh_token_str = generate_refresh_token() # 存储令牌 - assert user.id + assert user_id await store_token( db, - user.id, + user_id, client_id, scopes, access_token, @@ -397,18 +424,26 @@ async def oauth_token( hint="Invalid authorization code", ) user, scopes = code_result + + # 确保用户对象与当前会话关联 + await db.refresh(user) + # 生成令牌 access_token_expires = timedelta(minutes=settings.access_token_expire_minutes) + # 重新查询只获取ID,避免触发延迟加载 + id_result = await db.exec(select(User.id).where(User.username == username)) + user_id = id_result.first() + access_token = create_access_token( - data={"sub": str(user.id)}, expires_delta=access_token_expires + data={"sub": str(user_id)}, expires_delta=access_token_expires ) refresh_token_str = generate_refresh_token() # 存储令牌 - assert user.id + assert user_id await store_token( db, - user.id, + user_id, client_id, scopes, access_token, diff --git a/app/service/login_log_service.py b/app/service/login_log_service.py new file mode 100644 index 0000000..a9cc484 --- /dev/null +++ b/app/service/login_log_service.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +""" +用户登录记录服务 +""" +import asyncio +from datetime import datetime +from typing import Optional +from fastapi import Request +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.database.user_login_log import UserLoginLog +from app.dependencies.geoip import get_geoip_helper, get_client_ip, normalize_ip +from app.log import logger + + +class LoginLogService: + """用户登录记录服务""" + + @staticmethod + async def record_login( + db: AsyncSession, + user_id: int, + request: Request, + login_success: bool = True, + login_method: str = "password", + notes: Optional[str] = None + ) -> UserLoginLog: + """ + 记录用户登录信息 + + Args: + db: 数据库会话 + user_id: 用户ID + request: HTTP请求对象 + login_success: 登录是否成功 + login_method: 登录方式 + notes: 备注信息 + + Returns: + UserLoginLog: 登录记录对象 + """ + # 获取客户端IP并标准化格式 + raw_ip = get_client_ip(request) + ip_address = normalize_ip(raw_ip) + + # 获取User-Agent + user_agent = request.headers.get("User-Agent", "") + + # 创建基本的登录记录 + login_log = UserLoginLog( + user_id=user_id, + ip_address=ip_address, + user_agent=user_agent, + login_time=datetime.utcnow(), + login_success=login_success, + login_method=login_method, + notes=notes + ) + + # 异步获取GeoIP信息 + try: + geoip = get_geoip_helper() + + # 在后台线程中运行GeoIP查询(避免阻塞) + loop = asyncio.get_event_loop() + geo_info = await loop.run_in_executor( + None, + lambda: geoip.lookup(ip_address) + ) + + if geo_info: + login_log.country_code = geo_info.get("country_iso", "") + login_log.country_name = geo_info.get("country_name", "") + login_log.city_name = geo_info.get("city_name", "") + login_log.latitude = geo_info.get("latitude", "") + login_log.longitude = geo_info.get("longitude", "") + login_log.time_zone = geo_info.get("time_zone", "") + + # 处理 ASN(可能是字符串,需要转换为整数) + asn_value = geo_info.get("asn") + if asn_value is not None: + try: + login_log.asn = int(asn_value) + except (ValueError, TypeError): + login_log.asn = None + + login_log.organization = geo_info.get("organization", "") + + logger.debug(f"GeoIP lookup for {ip_address}: {geo_info.get('country_name', 'Unknown')}") + else: + logger.warning(f"GeoIP lookup failed for {ip_address}") + + except Exception as e: + logger.warning(f"GeoIP lookup error for {ip_address}: {e}") + + # 保存到数据库 + db.add(login_log) + await db.commit() + await db.refresh(login_log) + + logger.info(f"Login recorded for user {user_id} from {ip_address} ({login_method})") + return login_log + + @staticmethod + async def record_failed_login( + db: AsyncSession, + request: Request, + attempted_username: Optional[str] = None, + login_method: str = "password", + notes: Optional[str] = None + ) -> UserLoginLog: + """ + 记录失败的登录尝试 + + Args: + db: 数据库会话 + request: HTTP请求对象 + attempted_username: 尝试登录的用户名 + login_method: 登录方式 + notes: 备注信息 + + Returns: + UserLoginLog: 登录记录对象 + """ + # 对于失败的登录,使用user_id=0表示未知用户 + return await LoginLogService.record_login( + db=db, + user_id=0, # 0表示未知/失败的登录 + request=request, + login_success=False, + login_method=login_method, + notes=f"Failed login attempt: {attempted_username}" if attempted_username else "Failed login attempt" + ) + + +def get_request_info(request: Request) -> dict: + """ + 提取请求的详细信息 + + Args: + request: HTTP请求对象 + + Returns: + dict: 包含请求信息的字典 + """ + return { + "ip": get_client_ip(request), + "user_agent": request.headers.get("User-Agent", ""), + "referer": request.headers.get("Referer", ""), + "accept_language": request.headers.get("Accept-Language", ""), + "x_forwarded_for": request.headers.get("X-Forwarded-For", ""), + "x_real_ip": request.headers.get("X-Real-IP", ""), + } diff --git a/migrations/versions/2dcd04d3f4dc_fix_user_login_log_table_name.py b/migrations/versions/2dcd04d3f4dc_fix_user_login_log_table_name.py new file mode 100644 index 0000000..e361e27 --- /dev/null +++ b/migrations/versions/2dcd04d3f4dc_fix_user_login_log_table_name.py @@ -0,0 +1,84 @@ +"""Fix user login log table name + +Revision ID: 2dcd04d3f4dc +Revises: 3eef4794ded1 +Create Date: 2025-08-18 00:07:06.886879 + +""" +from __future__ import annotations + +from collections.abc import Sequence + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql +import sqlmodel + +# revision identifiers, used by Alembic. +revision: str = "2dcd04d3f4dc" +down_revision: str | Sequence[str] | None = "3eef4794ded1" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table("user_login_log", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("ip_address", sqlmodel.sql.sqltypes.AutoString(length=45), nullable=False), + sa.Column("user_agent", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True), + sa.Column("login_time", sa.DateTime(), nullable=False), + sa.Column("country_code", sqlmodel.sql.sqltypes.AutoString(length=2), nullable=True), + sa.Column("country_name", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=True), + sa.Column("city_name", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=True), + sa.Column("latitude", sqlmodel.sql.sqltypes.AutoString(length=20), nullable=True), + sa.Column("longitude", sqlmodel.sql.sqltypes.AutoString(length=20), nullable=True), + sa.Column("time_zone", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True), + sa.Column("asn", sa.Integer(), nullable=True), + sa.Column("organization", sqlmodel.sql.sqltypes.AutoString(length=200), nullable=True), + sa.Column("login_success", sa.Boolean(), nullable=False), + sa.Column("login_method", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=False), + sa.Column("notes", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True), + sa.PrimaryKeyConstraint("id") + ) + op.create_index(op.f("ix_user_login_log_ip_address"), "user_login_log", ["ip_address"], unique=False) + op.create_index(op.f("ix_user_login_log_user_id"), "user_login_log", ["user_id"], unique=False) + op.drop_index(op.f("ix_userloginlog_ip_address"), table_name="userloginlog") + op.drop_index(op.f("ix_userloginlog_user_id"), table_name="userloginlog") + op.drop_table("userloginlog") + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table("userloginlog", + sa.Column("id", mysql.INTEGER(), autoincrement=True, nullable=False), + sa.Column("user_id", mysql.INTEGER(), autoincrement=False, nullable=False), + sa.Column("ip_address", mysql.VARCHAR(length=45), nullable=False), + sa.Column("user_agent", mysql.VARCHAR(length=500), nullable=True), + sa.Column("login_time", mysql.DATETIME(), nullable=False), + sa.Column("country_code", mysql.VARCHAR(length=2), nullable=True), + sa.Column("country_name", mysql.VARCHAR(length=100), nullable=True), + sa.Column("city_name", mysql.VARCHAR(length=100), nullable=True), + sa.Column("latitude", mysql.VARCHAR(length=20), nullable=True), + sa.Column("longitude", mysql.VARCHAR(length=20), nullable=True), + sa.Column("time_zone", mysql.VARCHAR(length=50), nullable=True), + sa.Column("asn", mysql.INTEGER(), autoincrement=False, nullable=True), + sa.Column("organization", mysql.VARCHAR(length=200), nullable=True), + sa.Column("login_success", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("login_method", mysql.VARCHAR(length=50), nullable=False), + sa.Column("notes", mysql.VARCHAR(length=500), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_0900_ai_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB" + ) + op.create_index(op.f("ix_userloginlog_user_id"), "userloginlog", ["user_id"], unique=False) + op.create_index(op.f("ix_userloginlog_ip_address"), "userloginlog", ["ip_address"], unique=False) + op.drop_index(op.f("ix_user_login_log_user_id"), table_name="user_login_log") + op.drop_index(op.f("ix_user_login_log_ip_address"), table_name="user_login_log") + op.drop_table("user_login_log") + # ### end Alembic commands ### diff --git a/migrations/versions/3eef4794ded1_add_user_login_log_table.py b/migrations/versions/3eef4794ded1_add_user_login_log_table.py new file mode 100644 index 0000000..b4df540 --- /dev/null +++ b/migrations/versions/3eef4794ded1_add_user_login_log_table.py @@ -0,0 +1,56 @@ +"""Add user login log table + +Revision ID: 3eef4794ded1 +Revises: df9f725a077c +Create Date: 2025-08-18 00:00:11.369944 + +""" +from __future__ import annotations + +from collections.abc import Sequence + +from alembic import op +import sqlalchemy as sa +import sqlmodel + +# revision identifiers, used by Alembic. +revision: str = "3eef4794ded1" +down_revision: str | Sequence[str] | None = "df9f725a077c" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table("userloginlog", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("ip_address", sqlmodel.sql.sqltypes.AutoString(length=45), nullable=False), + sa.Column("user_agent", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True), + sa.Column("login_time", sa.DateTime(), nullable=False), + sa.Column("country_code", sqlmodel.sql.sqltypes.AutoString(length=2), nullable=True), + sa.Column("country_name", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=True), + sa.Column("city_name", sqlmodel.sql.sqltypes.AutoString(length=100), nullable=True), + sa.Column("latitude", sqlmodel.sql.sqltypes.AutoString(length=20), nullable=True), + sa.Column("longitude", sqlmodel.sql.sqltypes.AutoString(length=20), nullable=True), + sa.Column("time_zone", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True), + sa.Column("asn", sa.Integer(), nullable=True), + sa.Column("organization", sqlmodel.sql.sqltypes.AutoString(length=200), nullable=True), + sa.Column("login_success", sa.Boolean(), nullable=False), + sa.Column("login_method", sqlmodel.sql.sqltypes.AutoString(length=50), nullable=False), + sa.Column("notes", sqlmodel.sql.sqltypes.AutoString(length=500), nullable=True), + sa.PrimaryKeyConstraint("id") + ) + op.create_index(op.f("ix_userloginlog_ip_address"), "userloginlog", ["ip_address"], unique=False) + op.create_index(op.f("ix_userloginlog_user_id"), "userloginlog", ["user_id"], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_userloginlog_user_id"), table_name="userloginlog") + op.drop_index(op.f("ix_userloginlog_ip_address"), table_name="userloginlog") + op.drop_table("userloginlog") + # ### end Alembic commands ### From b487b286e1bc3a8ad08d02f35f860e070c36b15f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=92=95=E8=B0=B7=E9=85=B1?= Date: Mon, 18 Aug 2025 00:38:25 +0800 Subject: [PATCH 3/4] add NewRelic --- main.py | 21 ++++ newrelic.ini | 255 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 1 + uv.lock | 18 ++++ 4 files changed, 295 insertions(+) create mode 100644 newrelic.ini diff --git a/main.py b/main.py index ac2509c..a42f96e 100644 --- a/main.py +++ b/main.py @@ -29,6 +29,27 @@ from app.service.init_geoip import init_geoip from app.service.osu_rx_statistics import create_rx_statistics from app.service.recalculate import recalculate +# 检查 New Relic 配置文件是否存在,如果存在则初始化 New Relic +newrelic_config_path = os.path.join(os.path.dirname(__file__), "newrelic.ini") +if os.path.exists(newrelic_config_path): + try: + import newrelic.agent + + environment = os.environ.get( + "NEW_RELIC_ENVIRONMENT", + "production" if not settings.debug else "development" + ) + + newrelic.agent.initialize(newrelic_config_path, environment) + logger.info(f"[NewRelic] Enabled, environment: {environment}") + except ImportError: + logger.warning("[NewRelic] Config file found but 'newrelic' package is not installed") + except Exception as e: + logger.error(f"[NewRelic] Initialization failed: {e}") +else: + logger.info("[NewRelic] No newrelic.ini config file found, skipping initialization") + + from fastapi import FastAPI, HTTPException, Request from fastapi.exceptions import RequestValidationError from fastapi.middleware.cors import CORSMiddleware diff --git a/newrelic.ini b/newrelic.ini new file mode 100644 index 0000000..4a3c202 --- /dev/null +++ b/newrelic.ini @@ -0,0 +1,255 @@ +# --------------------------------------------------------------------------- + +# +# This file configures the New Relic Python Agent. +# +# The path to the configuration file should be supplied to the function +# newrelic.agent.initialize() when the agent is being initialized. +# +# The configuration file follows a structure similar to what you would +# find for Microsoft Windows INI files. For further information on the +# configuration file format see the Python ConfigParser documentation at: +# +# http://docs.python.org/library/configparser.html +# +# For further discussion on the behaviour of the Python agent that can +# be configured via this configuration file see: +# +# https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/ +# + +# --------------------------------------------------------------------------- + +# Here are the settings that are common to all environments. + +[newrelic] + +# You must specify the license key associated with your New +# Relic account. This may also be set using the NEW_RELIC_LICENSE_KEY +# environment variable. This key binds the Python Agent's data to +# your account in the New Relic service. For more information on +# storing and generating license keys, see +# https://docs.newrelic.com/docs/apis/intro-apis/new-relic-api-keys/#ingest-license-key +license_key = 142680e050caa393c0aed741f717f1baFFFFNRAL + +# The application name. Set this to be the name of your +# application as you would like it to show up in New Relic UI. +# You may also set this using the NEW_RELIC_APP_NAME environment variable. +# The UI will then auto-map instances of your application into a +# entry on your home dashboard page. You can also specify multiple +# app names to group your aggregated data. For further details, +# please see: +# https://docs.newrelic.com/docs/apm/agents/manage-apm-agents/app-naming/use-multiple-names-app/ +app_name = g0v0-server + +# When "true", the agent collects performance data about your +# application and reports this data to the New Relic UI at +# newrelic.com. This global switch is normally overridden for +# each environment below. It may also be set using the +# NEW_RELIC_MONITOR_MODE environment variable. +monitor_mode = true + +# Sets the name of a file to log agent messages to. Whatever you +# set this to, you must ensure that the permissions for the +# containing directory and the file itself are correct, and +# that the user that your web application runs as can write out +# to the file. If not able to out a log file, it is also +# possible to say "stderr" and output to standard error output. +# This would normally result in output appearing in your web +# server log. It can also be set using the NEW_RELIC_LOG +# environment variable. +log_file = stdout + +# Sets the level of detail of messages sent to the log file, if +# a log file location has been provided. Possible values, in +# increasing order of detail, are: "critical", "error", "warning", +# "info" and "debug". When reporting any agent issues to New +# Relic technical support, the most useful setting for the +# support engineers is "debug". However, this can generate a lot +# of information very quickly, so it is best not to keep the +# agent at this level for longer than it takes to reproduce the +# problem you are experiencing. This may also be set using the +# NEW_RELIC_LOG_LEVEL environment variable. +log_level = info + +# High Security Mode enforces certain security settings, and prevents +# them from being overridden, so that no sensitive data is sent to New +# Relic. Enabling High Security Mode means that request parameters are +# not collected and SQL can not be sent to New Relic in its raw form. +# To activate High Security Mode, it must be set to 'true' in this +# local .ini configuration file AND be set to 'true' in the +# server-side configuration in the New Relic user interface. It can +# also be set using the NEW_RELIC_HIGH_SECURITY environment variable. +# For details, see +# https://docs.newrelic.com/docs/subscriptions/high-security +high_security = false + +# The Python Agent will attempt to connect directly to the New +# Relic service. If there is an intermediate firewall between +# your host and the New Relic service that requires you to use a +# HTTP proxy, then you should set both the "proxy_host" and +# "proxy_port" settings to the required values for the HTTP +# proxy. The "proxy_user" and "proxy_pass" settings should +# additionally be set if proxy authentication is implemented by +# the HTTP proxy. The "proxy_scheme" setting dictates what +# protocol scheme is used in talking to the HTTP proxy. This +# would normally always be set as "http" which will result in the +# agent then using a SSL tunnel through the HTTP proxy for end to +# end encryption. +# See https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/#proxy +# for information on proxy configuration via environment variables. +# proxy_scheme = http +# proxy_host = hostname +# proxy_port = 8080 +# proxy_user = +# proxy_pass = + +# Capturing request parameters is off by default. To enable the +# capturing of request parameters, first ensure that the setting +# "attributes.enabled" is set to "true" (the default value), and +# then add "request.parameters.*" to the "attributes.include" +# setting. For details about attributes configuration, please +# consult the documentation. +# attributes.include = request.parameters.* + +# The transaction tracer captures deep information about slow +# transactions and sends this to the UI on a periodic basis. The +# transaction tracer is enabled by default. Set this to "false" +# to turn it off. +transaction_tracer.enabled = true + +# Threshold in seconds for when to collect a transaction trace. +# When the response time of a controller action exceeds this +# threshold, a transaction trace will be recorded and sent to +# the UI. Valid values are any positive float value, or (default) +# "apdex_f", which will use the threshold for a dissatisfying +# Apdex controller action - four times the Apdex T value. +transaction_tracer.transaction_threshold = apdex_f + +# When the transaction tracer is on, SQL statements can +# optionally be recorded. The recorder has three modes, "off" +# which sends no SQL, "raw" which sends the SQL statement in its +# original form, and "obfuscated", which strips out numeric and +# string literals. +transaction_tracer.record_sql = obfuscated + +# Threshold in seconds for when to collect stack trace for a SQL +# call. In other words, when SQL statements exceed this +# threshold, then capture and send to the UI the current stack +# trace. This is helpful for pinpointing where long SQL calls +# originate from in an application. +transaction_tracer.stack_trace_threshold = 0.5 + +# Determines whether the agent will capture query plans for slow +# SQL queries. Only supported in MySQL and PostgreSQL. Set this +# to "false" to turn it off. +transaction_tracer.explain_enabled = true + +# Threshold for query execution time below which query plans +# will not not be captured. Relevant only when "explain_enabled" +# is true. +transaction_tracer.explain_threshold = 0.5 + +# Space separated list of function or method names in form +# 'module:function' or 'module:class.function' for which +# additional function timing instrumentation will be added. +transaction_tracer.function_trace = + +# The error collector captures information about uncaught +# exceptions or logged exceptions and sends them to UI for +# viewing. The error collector is enabled by default. Set this +# to "false" to turn it off. For more details on errors, see +# https://docs.newrelic.com/docs/apm/agents/manage-apm-agents/agent-data/manage-errors-apm-collect-ignore-or-mark-expected/ +error_collector.enabled = true + +# To stop specific errors from reporting to the UI, set this to +# a space separated list of the Python exception type names to +# ignore. The exception name should be of the form 'module:class'. +error_collector.ignore_classes = + +# Expected errors are reported to the UI but will not affect the +# Apdex or error rate. To mark specific errors as expected, set this +# to a space separated list of the Python exception type names to +# expected. The exception name should be of the form 'module:class'. +error_collector.expected_classes = + +# Browser monitoring is the Real User Monitoring feature of the UI. +# For those Python web frameworks that are supported, this +# setting enables the auto-insertion of the browser monitoring +# JavaScript fragments. +browser_monitoring.auto_instrument = true + +# A thread profiling session can be scheduled via the UI when +# this option is enabled. The thread profiler will periodically +# capture a snapshot of the call stack for each active thread in +# the application to construct a statistically representative +# call tree. For more details on the thread profiler tool, see +# https://docs.newrelic.com/docs/apm/apm-ui-pages/events/thread-profiler-tool/ +thread_profiler.enabled = true + +# Your application deployments can be recorded through the +# New Relic REST API. To use this feature provide your API key +# below then use the `newrelic-admin record-deploy` command. +# This can also be set using the NEW_RELIC_API_KEY +# environment variable. +# api_key = + +# Distributed tracing lets you see the path that a request takes +# through your distributed system. For more information, please +# consult our distributed tracing planning guide. +# https://docs.newrelic.com/docs/transition-guide-distributed-tracing +distributed_tracing.enabled = true + +# This setting enables log decoration, the forwarding of log events, +# and the collection of logging metrics if these sub-feature +# configurations are also enabled. If this setting is false, no +# logging instrumentation features are enabled. This can also be +# set using the NEW_RELIC_APPLICATION_LOGGING_ENABLED environment +# variable. +# application_logging.enabled = true + +# If true, the agent captures log records emitted by your application +# and forwards them to New Relic. `application_logging.enabled` must +# also be true for this setting to take effect. You can also set +# this using the NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED +# environment variable. +# application_logging.forwarding.enabled = true + +# If true, the agent decorates logs with metadata to link to entities, +# hosts, traces, and spans. `application_logging.enabled` must also +# be true for this setting to take effect. This can also be set +# using the NEW_RELIC_APPLICATION_LOGGING_LOCAL_DECORATING_ENABLED +# environment variable. +# application_logging.local_decorating.enabled = true + +# If true, the agent captures metrics related to the log lines +# being sent up by your application. This can also be set +# using the NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED +# environment variable. +# application_logging.metrics.enabled = true + + +# --------------------------------------------------------------------------- + +# +# The application environments. These are specific settings which +# override the common environment settings. The settings related to a +# specific environment will be used when the environment argument to the +# newrelic.agent.initialize() function has been defined to be either +# "development", "test", "staging" or "production". +# + +[newrelic:development] +monitor_mode = false + +[newrelic:test] +monitor_mode = false + +[newrelic:staging] +app_name = g0v0-server (Staging) +monitor_mode = true + +[newrelic:production] +monitor_mode = true + +# --------------------------------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index 5224ac7..7ef93e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "loguru>=0.7.3", "maxminddb>=2.8.2", "msgpack-lazer-api", + "newrelic>=10.1.0", "osupyparser>=1.0.7", "passlib[bcrypt]>=1.7.4", "pillow>=11.3.0", diff --git a/uv.lock b/uv.lock index f21ae87..ac18584 100644 --- a/uv.lock +++ b/uv.lock @@ -545,6 +545,7 @@ dependencies = [ { name = "loguru" }, { name = "maxminddb" }, { name = "msgpack-lazer-api" }, + { name = "newrelic" }, { name = "osupyparser" }, { name = "passlib", extra = ["bcrypt"] }, { name = "pillow" }, @@ -583,6 +584,7 @@ requires-dist = [ { name = "loguru", specifier = ">=0.7.3" }, { name = "maxminddb", specifier = ">=2.8.2" }, { name = "msgpack-lazer-api", editable = "packages/msgpack_lazer_api" }, + { name = "newrelic", specifier = ">=10.1.0" }, { name = "osupyparser", git = "https://github.com/MingxuanGame/osupyparser.git" }, { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, { name = "pillow", specifier = ">=11.3.0" }, @@ -931,6 +933,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, ] +[[package]] +name = "newrelic" +version = "10.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/71/07c41fd5e8c94e95216b000d00c3cf735f769cb406c0f33c6ff83b7b7418/newrelic-10.16.0.tar.gz", hash = "sha256:d20eb934380a88d787f93e037d2ccfd5a7c80e657db5bb2e645216eaafe32e26", size = 1267210, upload-time = "2025-08-14T22:23:47.619Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/4b/ba14854270412686fdc331b1503b90554968e58d504297572bfc9cdaf3ad/newrelic-10.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f74301ed32ce625fb84c8bd0c079498dca603ff111d5baa2f59b6480ef7355b", size = 858772, upload-time = "2025-08-14T22:23:19.058Z" }, + { url = "https://files.pythonhosted.org/packages/97/41/de8f0be9e285371cee22a8399840141ffb34ffa5462d31c80c5d7a951ed5/newrelic-10.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3efef3b10f02a086d9009a80b020db8f0b1db5072075779c9facd5d2fe3bb916", size = 858430, upload-time = "2025-08-14T22:23:20.658Z" }, + { url = "https://files.pythonhosted.org/packages/52/74/2fc1e2d96029b57397eb3e2fae9f5e5840af3b98e896834b7a584f614b98/newrelic-10.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8328f621f3683b28156cfa8e1648a56b894480bf4c4953cde7e659b1480ae1e9", size = 856401, upload-time = "2025-08-14T22:23:21.981Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/fb73714fbf105b0c96faea68d17149dfecaa0209a6354b6a6d67c77ef65f/newrelic-10.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:07f65faed0a9aa40695e6e6ce4c943ca9c9643fc848c2344bb26665a65893a80", size = 856270, upload-time = "2025-08-14T22:23:23.407Z" }, + { url = "https://files.pythonhosted.org/packages/76/6d/47389d4a9390af2834f552d3bb7ed9b7c9ded30054409621e941ddbe1083/newrelic-10.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:053e73518cf0b2af412490723d3fa8338dab6ed48b748c0b7a0288160d1c33e2", size = 858759, upload-time = "2025-08-14T22:23:24.957Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3e/9e44b713450c89d9d9b0a2626059014ad6e6699992fd687aec4cacb54b35/newrelic-10.16.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8263ae16eb4745def12bace2e3e38e013e76d4e765333a77342b270702e3eaa9", size = 858435, upload-time = "2025-08-14T22:23:26.536Z" }, + { url = "https://files.pythonhosted.org/packages/61/a6/660dfd715011e2fef4d8e5f91c8c040f4ac125bc3bdc644ae0e514cbe87c/newrelic-10.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e83e1341a72ac92118d1af40c2c3ba56069c589acceb1e89d236beab8d7891a", size = 856574, upload-time = "2025-08-14T22:23:28.041Z" }, + { url = "https://files.pythonhosted.org/packages/59/9a/25935a1b999cef132a1597ca3686c028f5472e58d26bfe33876e8af38f4f/newrelic-10.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71950c5e1f26bf1ed563d3641da97f6bb209f80697675888ee12808d8ac69452", size = 856448, upload-time = "2025-08-14T22:23:29.483Z" }, +] + [[package]] name = "nodeenv" version = "1.9.1" From 3b65f0728b7195300dbc0d66ee7de8afca48e9a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=92=95=E8=B0=B7=E9=85=B1?= Date: Mon, 18 Aug 2025 00:43:52 +0800 Subject: [PATCH 4/4] del newrelic.ini --- .gitignore | 3 +- newrelic.ini | 255 --------------------------------------------------- 2 files changed, 2 insertions(+), 256 deletions(-) delete mode 100644 newrelic.ini diff --git a/.gitignore b/.gitignore index b8a8238..cf9e3b8 100644 --- a/.gitignore +++ b/.gitignore @@ -216,4 +216,5 @@ storage/ replays/ osu-master/* -geoip/* \ No newline at end of file +geoip/* +newrelic.ini \ No newline at end of file diff --git a/newrelic.ini b/newrelic.ini deleted file mode 100644 index 4a3c202..0000000 --- a/newrelic.ini +++ /dev/null @@ -1,255 +0,0 @@ -# --------------------------------------------------------------------------- - -# -# This file configures the New Relic Python Agent. -# -# The path to the configuration file should be supplied to the function -# newrelic.agent.initialize() when the agent is being initialized. -# -# The configuration file follows a structure similar to what you would -# find for Microsoft Windows INI files. For further information on the -# configuration file format see the Python ConfigParser documentation at: -# -# http://docs.python.org/library/configparser.html -# -# For further discussion on the behaviour of the Python agent that can -# be configured via this configuration file see: -# -# https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/ -# - -# --------------------------------------------------------------------------- - -# Here are the settings that are common to all environments. - -[newrelic] - -# You must specify the license key associated with your New -# Relic account. This may also be set using the NEW_RELIC_LICENSE_KEY -# environment variable. This key binds the Python Agent's data to -# your account in the New Relic service. For more information on -# storing and generating license keys, see -# https://docs.newrelic.com/docs/apis/intro-apis/new-relic-api-keys/#ingest-license-key -license_key = 142680e050caa393c0aed741f717f1baFFFFNRAL - -# The application name. Set this to be the name of your -# application as you would like it to show up in New Relic UI. -# You may also set this using the NEW_RELIC_APP_NAME environment variable. -# The UI will then auto-map instances of your application into a -# entry on your home dashboard page. You can also specify multiple -# app names to group your aggregated data. For further details, -# please see: -# https://docs.newrelic.com/docs/apm/agents/manage-apm-agents/app-naming/use-multiple-names-app/ -app_name = g0v0-server - -# When "true", the agent collects performance data about your -# application and reports this data to the New Relic UI at -# newrelic.com. This global switch is normally overridden for -# each environment below. It may also be set using the -# NEW_RELIC_MONITOR_MODE environment variable. -monitor_mode = true - -# Sets the name of a file to log agent messages to. Whatever you -# set this to, you must ensure that the permissions for the -# containing directory and the file itself are correct, and -# that the user that your web application runs as can write out -# to the file. If not able to out a log file, it is also -# possible to say "stderr" and output to standard error output. -# This would normally result in output appearing in your web -# server log. It can also be set using the NEW_RELIC_LOG -# environment variable. -log_file = stdout - -# Sets the level of detail of messages sent to the log file, if -# a log file location has been provided. Possible values, in -# increasing order of detail, are: "critical", "error", "warning", -# "info" and "debug". When reporting any agent issues to New -# Relic technical support, the most useful setting for the -# support engineers is "debug". However, this can generate a lot -# of information very quickly, so it is best not to keep the -# agent at this level for longer than it takes to reproduce the -# problem you are experiencing. This may also be set using the -# NEW_RELIC_LOG_LEVEL environment variable. -log_level = info - -# High Security Mode enforces certain security settings, and prevents -# them from being overridden, so that no sensitive data is sent to New -# Relic. Enabling High Security Mode means that request parameters are -# not collected and SQL can not be sent to New Relic in its raw form. -# To activate High Security Mode, it must be set to 'true' in this -# local .ini configuration file AND be set to 'true' in the -# server-side configuration in the New Relic user interface. It can -# also be set using the NEW_RELIC_HIGH_SECURITY environment variable. -# For details, see -# https://docs.newrelic.com/docs/subscriptions/high-security -high_security = false - -# The Python Agent will attempt to connect directly to the New -# Relic service. If there is an intermediate firewall between -# your host and the New Relic service that requires you to use a -# HTTP proxy, then you should set both the "proxy_host" and -# "proxy_port" settings to the required values for the HTTP -# proxy. The "proxy_user" and "proxy_pass" settings should -# additionally be set if proxy authentication is implemented by -# the HTTP proxy. The "proxy_scheme" setting dictates what -# protocol scheme is used in talking to the HTTP proxy. This -# would normally always be set as "http" which will result in the -# agent then using a SSL tunnel through the HTTP proxy for end to -# end encryption. -# See https://docs.newrelic.com/docs/apm/agents/python-agent/configuration/python-agent-configuration/#proxy -# for information on proxy configuration via environment variables. -# proxy_scheme = http -# proxy_host = hostname -# proxy_port = 8080 -# proxy_user = -# proxy_pass = - -# Capturing request parameters is off by default. To enable the -# capturing of request parameters, first ensure that the setting -# "attributes.enabled" is set to "true" (the default value), and -# then add "request.parameters.*" to the "attributes.include" -# setting. For details about attributes configuration, please -# consult the documentation. -# attributes.include = request.parameters.* - -# The transaction tracer captures deep information about slow -# transactions and sends this to the UI on a periodic basis. The -# transaction tracer is enabled by default. Set this to "false" -# to turn it off. -transaction_tracer.enabled = true - -# Threshold in seconds for when to collect a transaction trace. -# When the response time of a controller action exceeds this -# threshold, a transaction trace will be recorded and sent to -# the UI. Valid values are any positive float value, or (default) -# "apdex_f", which will use the threshold for a dissatisfying -# Apdex controller action - four times the Apdex T value. -transaction_tracer.transaction_threshold = apdex_f - -# When the transaction tracer is on, SQL statements can -# optionally be recorded. The recorder has three modes, "off" -# which sends no SQL, "raw" which sends the SQL statement in its -# original form, and "obfuscated", which strips out numeric and -# string literals. -transaction_tracer.record_sql = obfuscated - -# Threshold in seconds for when to collect stack trace for a SQL -# call. In other words, when SQL statements exceed this -# threshold, then capture and send to the UI the current stack -# trace. This is helpful for pinpointing where long SQL calls -# originate from in an application. -transaction_tracer.stack_trace_threshold = 0.5 - -# Determines whether the agent will capture query plans for slow -# SQL queries. Only supported in MySQL and PostgreSQL. Set this -# to "false" to turn it off. -transaction_tracer.explain_enabled = true - -# Threshold for query execution time below which query plans -# will not not be captured. Relevant only when "explain_enabled" -# is true. -transaction_tracer.explain_threshold = 0.5 - -# Space separated list of function or method names in form -# 'module:function' or 'module:class.function' for which -# additional function timing instrumentation will be added. -transaction_tracer.function_trace = - -# The error collector captures information about uncaught -# exceptions or logged exceptions and sends them to UI for -# viewing. The error collector is enabled by default. Set this -# to "false" to turn it off. For more details on errors, see -# https://docs.newrelic.com/docs/apm/agents/manage-apm-agents/agent-data/manage-errors-apm-collect-ignore-or-mark-expected/ -error_collector.enabled = true - -# To stop specific errors from reporting to the UI, set this to -# a space separated list of the Python exception type names to -# ignore. The exception name should be of the form 'module:class'. -error_collector.ignore_classes = - -# Expected errors are reported to the UI but will not affect the -# Apdex or error rate. To mark specific errors as expected, set this -# to a space separated list of the Python exception type names to -# expected. The exception name should be of the form 'module:class'. -error_collector.expected_classes = - -# Browser monitoring is the Real User Monitoring feature of the UI. -# For those Python web frameworks that are supported, this -# setting enables the auto-insertion of the browser monitoring -# JavaScript fragments. -browser_monitoring.auto_instrument = true - -# A thread profiling session can be scheduled via the UI when -# this option is enabled. The thread profiler will periodically -# capture a snapshot of the call stack for each active thread in -# the application to construct a statistically representative -# call tree. For more details on the thread profiler tool, see -# https://docs.newrelic.com/docs/apm/apm-ui-pages/events/thread-profiler-tool/ -thread_profiler.enabled = true - -# Your application deployments can be recorded through the -# New Relic REST API. To use this feature provide your API key -# below then use the `newrelic-admin record-deploy` command. -# This can also be set using the NEW_RELIC_API_KEY -# environment variable. -# api_key = - -# Distributed tracing lets you see the path that a request takes -# through your distributed system. For more information, please -# consult our distributed tracing planning guide. -# https://docs.newrelic.com/docs/transition-guide-distributed-tracing -distributed_tracing.enabled = true - -# This setting enables log decoration, the forwarding of log events, -# and the collection of logging metrics if these sub-feature -# configurations are also enabled. If this setting is false, no -# logging instrumentation features are enabled. This can also be -# set using the NEW_RELIC_APPLICATION_LOGGING_ENABLED environment -# variable. -# application_logging.enabled = true - -# If true, the agent captures log records emitted by your application -# and forwards them to New Relic. `application_logging.enabled` must -# also be true for this setting to take effect. You can also set -# this using the NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED -# environment variable. -# application_logging.forwarding.enabled = true - -# If true, the agent decorates logs with metadata to link to entities, -# hosts, traces, and spans. `application_logging.enabled` must also -# be true for this setting to take effect. This can also be set -# using the NEW_RELIC_APPLICATION_LOGGING_LOCAL_DECORATING_ENABLED -# environment variable. -# application_logging.local_decorating.enabled = true - -# If true, the agent captures metrics related to the log lines -# being sent up by your application. This can also be set -# using the NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED -# environment variable. -# application_logging.metrics.enabled = true - - -# --------------------------------------------------------------------------- - -# -# The application environments. These are specific settings which -# override the common environment settings. The settings related to a -# specific environment will be used when the environment argument to the -# newrelic.agent.initialize() function has been defined to be either -# "development", "test", "staging" or "production". -# - -[newrelic:development] -monitor_mode = false - -[newrelic:test] -monitor_mode = false - -[newrelic:staging] -app_name = g0v0-server (Staging) -monitor_mode = true - -[newrelic:production] -monitor_mode = true - -# ---------------------------------------------------------------------------