merge upstream
1
.gitignore
vendored
@@ -145,6 +145,7 @@ dmypy.json
|
||||
cython_debug/
|
||||
|
||||
.vscode/*
|
||||
.vs/*
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
# Changelog
|
||||
Documenting updates to ARTEMiS, to be updated every time the master branch is pushed to.
|
||||
|
||||
## 20250327
|
||||
+ O.N.G.E.K.I. bright MEMORY Act.3 support added
|
||||
+ CardMaker support updated
|
||||
|
||||
## 20240811
|
||||
### System
|
||||
+ Change backend from Twisted to Starlette
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from core.config import CoreConfig
|
||||
from core.allnet import AllnetServlet, BillingServlet
|
||||
from core.aimedb import AimedbServlette
|
||||
from core.title import TitleServlet
|
||||
from core.utils import Utils
|
||||
from core.mucha import MuchaServlet
|
||||
from core.frontend import FrontendServlet
|
||||
|
||||
@@ -120,7 +120,7 @@ class ADBHeader:
|
||||
if self.store_id == 0:
|
||||
raise ADBHeaderException(f"Store ID cannot be 0!")
|
||||
|
||||
if re.fullmatch(r"^A[0-9]{2}[E|X][0-9]{2}[A-HJ-NP-Z][0-9]{4}$", self.keychip_id) is None:
|
||||
if re.fullmatch(r"^A[0-9]{2}[A-Z][0-9]{2}[A-HJ-NP-Z][0-9]{4}$", self.keychip_id) is None:
|
||||
raise ADBHeaderException(f"Keychip ID {self.keychip_id} is invalid!")
|
||||
|
||||
return True
|
||||
|
||||
@@ -39,10 +39,10 @@ class ADBFelicaLookupExRequest(ADBBaseRequest):
|
||||
def __init__(self, data: bytes) -> None:
|
||||
super().__init__(data)
|
||||
self.random = struct.unpack_from("<16s", data, 0x20)[0]
|
||||
idm, pmm = struct.unpack_from(">QQ", data, 0x30)
|
||||
idm, dfc, self.arbitrary = struct.unpack_from(">QH6s", data, 0x30)
|
||||
self.card_key_ver, self.write_ct, self.maca, company, fw_ver, self.dfc = struct.unpack_from("<16s16sQccH", data, 0x40)
|
||||
self.idm = hex(idm)[2:].upper()
|
||||
self.pmm = hex(pmm)[2:].upper()
|
||||
self.dfc = hex(dfc)[2:].upper()
|
||||
self.company = CompanyCodes(int.from_bytes(company, 'little'))
|
||||
self.fw_ver = ReaderFwVer.from_byte(fw_ver)
|
||||
|
||||
|
||||
@@ -137,7 +137,7 @@ class AimedbServlette():
|
||||
resp_bytes = resp
|
||||
|
||||
elif resp is None: # Nothing to send, probably a goodbye
|
||||
self.logger.warn(f"None return by handler for {name}")
|
||||
self.logger.warning(f"None return by handler for {name}")
|
||||
return
|
||||
|
||||
else:
|
||||
@@ -177,7 +177,7 @@ class AimedbServlette():
|
||||
async def handle_lookup(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
req = ADBLookupRequest(data)
|
||||
if req.access_code == "00000000000000000000":
|
||||
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero access code from {req.head.keychip_id}")
|
||||
ret = ADBLookupResponse.from_req(req.head, -1)
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
@@ -208,7 +208,7 @@ class AimedbServlette():
|
||||
async def handle_lookup_ex(self, data: bytes, resp_code: int) -> ADBBaseResponse:
|
||||
req = ADBLookupRequest(data)
|
||||
if req.access_code == "00000000000000000000":
|
||||
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero access code from {req.head.keychip_id}")
|
||||
ret = ADBLookupExResponse.from_req(req.head, -1)
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
@@ -254,7 +254,7 @@ class AimedbServlette():
|
||||
req = ADBFelicaLookupRequest(data)
|
||||
idm = req.idm.zfill(16)
|
||||
if idm == "0000000000000000":
|
||||
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero IDm from {req.head.keychip_id}")
|
||||
ret = ADBFelicaLookupResponse.from_req(req.head, "00000000000000000000")
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
@@ -270,7 +270,7 @@ class AimedbServlette():
|
||||
ac = card['access_code']
|
||||
|
||||
self.logger.info(
|
||||
f"idm {idm} ipm {req.pmm.zfill(16)} -> access_code {ac}"
|
||||
f"idm {idm} pmm {req.pmm.zfill(16)} -> access_code {ac}"
|
||||
)
|
||||
return ADBFelicaLookupResponse.from_req(req.head, ac)
|
||||
|
||||
@@ -283,7 +283,7 @@ class AimedbServlette():
|
||||
idm = req.idm.zfill(16)
|
||||
|
||||
if idm == "0000000000000000":
|
||||
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero IDm from {req.head.keychip_id}")
|
||||
ret = ADBFelicaLookupResponse.from_req(req.head, "00000000000000000000")
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
@@ -323,7 +323,7 @@ class AimedbServlette():
|
||||
idm = req.idm.zfill(16)
|
||||
|
||||
if idm == "0000000000000000":
|
||||
self.logger.warn(f"All-zero IDm from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero IDm from {req.head.keychip_id}")
|
||||
ret = ADBFelicaLookupExResponse.from_req(req.head, -1, "00000000000000000000")
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
@@ -344,7 +344,7 @@ class AimedbServlette():
|
||||
user_id = -1
|
||||
|
||||
self.logger.info(
|
||||
f"idm {idm} ipm {req.pmm} -> access_code {access_code} user_id {user_id}"
|
||||
f"idm {idm} dfc {req.dfc} -> access_code {access_code} user_id {user_id}"
|
||||
)
|
||||
|
||||
resp = ADBFelicaLookupExResponse.from_req(req.head, user_id, access_code)
|
||||
@@ -382,7 +382,7 @@ class AimedbServlette():
|
||||
user_id = -1
|
||||
|
||||
if req.access_code == "00000000000000000000":
|
||||
self.logger.warn(f"All-zero access code from {req.head.keychip_id}")
|
||||
self.logger.warning(f"All-zero access code from {req.head.keychip_id}")
|
||||
ret = ADBLookupResponse.from_req(req.head, -1)
|
||||
ret.head.status = ADBStatus.BAN_SYS
|
||||
return ret
|
||||
|
||||
351
core/allnet.py
@@ -7,6 +7,7 @@ import logging
|
||||
import coloredlogs
|
||||
import urllib.parse
|
||||
import math
|
||||
import random
|
||||
from typing import Dict, List, Any, Optional, Union, Final
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from starlette.requests import Request
|
||||
@@ -17,7 +18,10 @@ from datetime import datetime
|
||||
from enum import Enum
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Hash import SHA
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import pad
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
import os
|
||||
from os import path, environ, mkdir, access, W_OK
|
||||
|
||||
from .config import CoreConfig
|
||||
@@ -132,12 +136,29 @@ class AllnetServlet:
|
||||
async def handle_poweron(self, request: Request):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
pragma_header = request.headers.get('Pragma', "")
|
||||
useragent_header = request.headers.get('User-Agent', "")
|
||||
is_dfi = pragma_header == "DFI"
|
||||
is_lite = useragent_header[5:] == "Windows/Lite"
|
||||
lite_id = useragent_header[:4]
|
||||
data = await request.body()
|
||||
|
||||
if not self.config.allnet.allnet_lite_keys and is_lite:
|
||||
self.logger.error("!!!LITE KEYS NOT SET!!!")
|
||||
raise AllnetRequestException()
|
||||
elif is_lite:
|
||||
for gameids, key in self.config.allnet.allnet_lite_keys.items():
|
||||
if gameids == lite_id:
|
||||
litekey = key
|
||||
|
||||
if is_lite and "litekey" not in locals():
|
||||
self.logger.error("!!!UNIQUE LITE KEY NOT FOUND!!!")
|
||||
raise AllnetRequestException()
|
||||
|
||||
try:
|
||||
if is_dfi:
|
||||
req_urlencode = self.from_dfi(data)
|
||||
elif is_lite:
|
||||
req_urlencode = self.dec_lite(litekey, data[:16], data)
|
||||
else:
|
||||
req_urlencode = data
|
||||
|
||||
@@ -145,20 +166,30 @@ class AllnetServlet:
|
||||
if req_dict is None:
|
||||
raise AllnetRequestException()
|
||||
|
||||
req = AllnetPowerOnRequest(req_dict[0])
|
||||
if is_lite:
|
||||
req = AllnetPowerOnRequestLite(req_dict[0])
|
||||
else:
|
||||
req = AllnetPowerOnRequest(req_dict[0])
|
||||
# Validate the request. Currently we only validate the fields we plan on using
|
||||
|
||||
if not req.game_id or not req.ver or not req.serial or not req.ip or not req.firm_ver or not req.boot_ver:
|
||||
if not req.game_id or not req.ver or not req.serial or not req.token and is_lite:
|
||||
raise AllnetRequestException(
|
||||
f"Bad auth request params from {request_ip} - {vars(req)}"
|
||||
)
|
||||
elif not is_lite:
|
||||
if not req.game_id or not req.ver or not req.serial or not req.ip or not req.firm_ver or not req.boot_ver:
|
||||
raise AllnetRequestException(
|
||||
f"Bad auth request params from {request_ip} - {vars(req)}"
|
||||
)
|
||||
|
||||
except AllnetRequestException as e:
|
||||
if e.message != "":
|
||||
self.logger.error(e)
|
||||
return PlainTextResponse()
|
||||
|
||||
if req.format_ver == 3:
|
||||
if is_lite:
|
||||
resp = AllnetPowerOnResponseLite(req.token)
|
||||
elif req.format_ver == 3:
|
||||
resp = AllnetPowerOnResponse3(req.token)
|
||||
elif req.format_ver == 2:
|
||||
resp = AllnetPowerOnResponse2()
|
||||
@@ -175,11 +206,14 @@ class AllnetServlet:
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_machine.value
|
||||
if is_lite:
|
||||
resp.result = ALLNET_STAT.bad_machine.value
|
||||
else:
|
||||
resp.stat = ALLNET_STAT.bad_machine.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
if machine is not None:
|
||||
if machine is not None and not is_lite:
|
||||
arcade = await self.data.arcade.get_arcade(machine["arcade"])
|
||||
if self.config.server.check_arcade_ip:
|
||||
if arcade["ip"] and arcade["ip"] is not None and arcade["ip"] != req.ip:
|
||||
@@ -257,7 +291,10 @@ class AllnetServlet:
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
resp.stat = ALLNET_STAT.bad_game.value
|
||||
if is_lite:
|
||||
resp.result = ALLNET_STAT.bad_game.value
|
||||
else:
|
||||
resp.stat = ALLNET_STAT.bad_game.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
@@ -265,8 +302,12 @@ class AllnetServlet:
|
||||
self.logger.info(
|
||||
f"Allowed unknown game {req.game_id} v{req.ver} to authenticate from {request_ip} due to 'is_develop' being enabled. S/N: {req.serial}"
|
||||
)
|
||||
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.server.hostname}:{self.config.server.port}"
|
||||
if is_lite:
|
||||
resp.uri1 = f"http://{self.config.server.hostname}:{self.config.server.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.uri2 = f"{self.config.server.hostname}:{self.config.server.port}"
|
||||
else:
|
||||
resp.uri = f"http://{self.config.server.hostname}:{self.config.server.port}/{req.game_id}/{req.ver.replace('.', '')}/"
|
||||
resp.host = f"{self.config.server.hostname}:{self.config.server.port}"
|
||||
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(resp_dict))
|
||||
@@ -277,10 +318,16 @@ class AllnetServlet:
|
||||
|
||||
int_ver = req.ver.replace(".", "")
|
||||
try:
|
||||
resp.uri, resp.host = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial)
|
||||
if is_lite:
|
||||
resp.uri1, resp.uri2 = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial)
|
||||
else:
|
||||
resp.uri, resp.host = TitleServlet.title_registry[req.game_id].get_allnet_info(req.game_id, int(int_ver), req.serial)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error running get_allnet_info for {req.game_id} - {e}")
|
||||
resp.stat = ALLNET_STAT.bad_game.value
|
||||
if is_lite:
|
||||
resp.result = ALLNET_STAT.bad_game.value
|
||||
else:
|
||||
resp.stat = ALLNET_STAT.bad_game.value
|
||||
resp_dict = {k: v for k, v in vars(resp).items() if v is not None}
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(resp_dict)) + "\n")
|
||||
|
||||
@@ -308,18 +355,38 @@ class AllnetServlet:
|
||||
"Pragma": "DFI",
|
||||
},
|
||||
)
|
||||
elif is_lite:
|
||||
iv = bytes([random.randint(2, 255) for _ in range(16)])
|
||||
return PlainTextResponse(content=self.enc_lite(litekey, iv, resp_str))
|
||||
|
||||
return PlainTextResponse(resp_str)
|
||||
|
||||
async def handle_dlorder(self, request: Request):
|
||||
request_ip = Utils.get_ip_addr(request)
|
||||
pragma_header = request.headers.get('Pragma', "")
|
||||
useragent_header = request.headers.get('User-Agent', "")
|
||||
is_dfi = pragma_header == "DFI"
|
||||
is_lite = useragent_header[5:] == "Windows/Lite"
|
||||
lite_id = useragent_header[:4]
|
||||
data = await request.body()
|
||||
|
||||
if not self.config.allnet.allnet_lite_keys and is_lite:
|
||||
self.logger.error("!!!LITE KEYS NOT SET!!!")
|
||||
raise AllnetRequestException()
|
||||
elif is_lite:
|
||||
for gameids, key in self.config.allnet.allnet_lite_keys.items():
|
||||
if gameids == lite_id:
|
||||
litekey = key
|
||||
|
||||
if is_lite and "litekey" not in locals():
|
||||
self.logger.error("!!!UNIQUE LITE KEY NOT FOUND!!!")
|
||||
raise AllnetRequestException()
|
||||
|
||||
try:
|
||||
if is_dfi:
|
||||
req_urlencode = self.from_dfi(data)
|
||||
elif is_lite:
|
||||
req_urlencode = self.dec_lite(litekey, data[:16], data)
|
||||
else:
|
||||
req_urlencode = data.decode()
|
||||
|
||||
@@ -327,7 +394,10 @@ class AllnetServlet:
|
||||
if req_dict is None:
|
||||
raise AllnetRequestException()
|
||||
|
||||
req = AllnetDownloadOrderRequest(req_dict[0])
|
||||
if is_lite:
|
||||
req = AllnetDownloadOrderRequestLite(req_dict[0])
|
||||
else:
|
||||
req = AllnetDownloadOrderRequest(req_dict[0])
|
||||
# Validate the request. Currently we only validate the fields we plan on using
|
||||
|
||||
if not req.game_id or not req.ver or not req.serial:
|
||||
@@ -343,18 +413,38 @@ class AllnetServlet:
|
||||
self.logger.info(
|
||||
f"DownloadOrder from {request_ip} -> {req.game_id} v{req.ver} serial {req.serial}"
|
||||
)
|
||||
resp = AllnetDownloadOrderResponse(serial=req.serial)
|
||||
|
||||
if is_lite:
|
||||
resp = AllnetDownloadOrderResponseLite()
|
||||
else:
|
||||
resp = AllnetDownloadOrderResponse(serial=req.serial)
|
||||
|
||||
if (
|
||||
not self.config.allnet.allow_online_updates
|
||||
or not self.config.allnet.update_cfg_folder
|
||||
):
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
|
||||
resp = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
if is_dfi:
|
||||
return PlainTextResponse(
|
||||
self.to_dfi(resp) + b"\r\n", headers={ "Pragma": "DFI" }
|
||||
)
|
||||
elif is_lite:
|
||||
iv = bytes([random.randint(2, 255) for _ in range(16)])
|
||||
return PlainTextResponse(content=self.enc_lite(litekey, iv, resp))
|
||||
return PlainTextResponse(resp)
|
||||
|
||||
else:
|
||||
machine = await self.data.arcade.get_machine(req.serial)
|
||||
if not machine or not machine['ota_enable'] or not machine['is_cab'] or machine['is_blacklisted']:
|
||||
return PlainTextResponse(urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n")
|
||||
if not machine or not machine['ota_enable'] or not machine['is_cab']:
|
||||
resp = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\n"
|
||||
if is_dfi:
|
||||
return PlainTextResponse(
|
||||
self.to_dfi(resp) + b"\r\n", headers={ "Pragma": "DFI" }
|
||||
)
|
||||
elif is_lite:
|
||||
iv = bytes([random.randint(2, 255) for _ in range(16)])
|
||||
return PlainTextResponse(content=self.enc_lite(litekey, iv, resp))
|
||||
return PlainTextResponse(resp)
|
||||
|
||||
if path.exists(
|
||||
f"{self.config.allnet.update_cfg_folder}/{req.game_id}-{req.ver.replace('.', '')}-app.ini"
|
||||
@@ -383,6 +473,9 @@ class AllnetServlet:
|
||||
"Pragma": "DFI",
|
||||
},
|
||||
)
|
||||
elif is_lite:
|
||||
iv = bytes([random.randint(2, 255) for _ in range(16)])
|
||||
return PlainTextResponse(content=self.enc_lite(litekey, iv, res_str))
|
||||
|
||||
return PlainTextResponse(res_str)
|
||||
|
||||
@@ -507,6 +600,17 @@ class AllnetServlet:
|
||||
zipped = zlib.compress(unzipped)
|
||||
return base64.b64encode(zipped)
|
||||
|
||||
def dec_lite(self, key, iv, data):
|
||||
cipher = AES.new(bytes(key), AES.MODE_CBC, iv)
|
||||
decrypted = cipher.decrypt(data)
|
||||
return decrypted[16:].decode("utf-8")
|
||||
|
||||
def enc_lite(self, key, iv, data):
|
||||
unencrypted = pad(bytes([0] * 16) + data.encode('utf-8'), 16)
|
||||
cipher = AES.new(bytes(key), AES.MODE_CBC, iv)
|
||||
encrypted = cipher.encrypt(unencrypted)
|
||||
return encrypted
|
||||
|
||||
class BillingServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str) -> None:
|
||||
self.config = core_cfg
|
||||
@@ -576,39 +680,19 @@ class BillingServlet:
|
||||
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
traces: List[TraceData] = []
|
||||
try:
|
||||
req = BillingInfo(req_dict[0])
|
||||
except KeyError as e:
|
||||
self.logger.error(f"Billing request failed to parse: {e}")
|
||||
return PlainTextResponse("result=5&linelimit=&message=field is missing or formatting is incorrect\r\n")
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
|
||||
traces.append(tmp)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warn(f"Tracelog failed to parse: {e}")
|
||||
|
||||
kc_serial_bytes = req.keychipid.encode()
|
||||
|
||||
|
||||
machine = await self.data.arcade.get_machine(req.keychipid)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
|
||||
await self.data.base.log_event(
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg, ip=request_ip, game=req.gameid, version=req.gamever
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg, ip=request_ip, game=req.gameid, version=str(req.gamever)
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
@@ -619,18 +703,101 @@ class BillingServlet:
|
||||
"billing_type": req.billingtype.name,
|
||||
"nearfull": req.nearfull,
|
||||
"playlimit": req.playlimit,
|
||||
"messages": []
|
||||
}
|
||||
playhist = "000000/0:000000/0:000000/0"
|
||||
|
||||
if machine is not None:
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, "", log_details, None, machine['arcade'], machine['id'], request_ip, req.gameid, req.gamever)
|
||||
if self.config.allnet.save_billing:
|
||||
lastcredit = await self.data.arcade.billing_get_last_playcount(machine['id'], req.gameid)
|
||||
if lastcredit is not None:
|
||||
last_playct = lastcredit['playct']
|
||||
else:
|
||||
last_playct = 0
|
||||
|
||||
# Technically if a cab resets it's playcount and then does more plays then the previous
|
||||
# playcount before a billing checkin occours, we will lose plays equal to the current playcount.
|
||||
if req.playcnt < last_playct: await self.data.arcade.billing_add_playcount(machine['id'], req.gameid, req.playcnt)
|
||||
elif req.playcnt == last_playct: pass # No plays since last checkin, skip update
|
||||
else: await self.data.arcade.billing_add_playcount(machine['id'], req.gameid, req.playcnt - last_playct)
|
||||
|
||||
plays = await self.data.arcade.billing_get_playcount_3mo(machine['id'], req.gameid)
|
||||
if plays is not None and len(plays) > 0:
|
||||
playhist = ""
|
||||
|
||||
for x in range(len(plays) - 1, -1, -1): playhist += f"{plays[x]['year']:04d}{plays[x]['month']:02d}/{plays[x]['playct']}:"
|
||||
playhist = playhist[:-1]
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
if self.config.allnet.save_billing:
|
||||
await self.data.arcade.billing_add_charge(
|
||||
machine['id'],
|
||||
tmp.game_id,
|
||||
float(tmp.game_version),
|
||||
tmp.play_count,
|
||||
tmp.play_limit,
|
||||
tmp.product_code,
|
||||
tmp.product_count,
|
||||
tmp.func_type,
|
||||
tmp.player_number
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Charge Trace from {req.keychipid}: {tmp.game_id} v{tmp.game_version} - player {tmp.player_number} got {tmp.product_count} of {tmp.product_code} func {tmp.func_type}"
|
||||
)
|
||||
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
log_details['messages'].append(tmp.message)
|
||||
self.logger.info(f"Event Trace from {req.keychipid}: {tmp.message}")
|
||||
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
if self.config.allnet.save_billing:
|
||||
await self.data.arcade.billing_set_credit(
|
||||
machine['id'],
|
||||
req.gameid,
|
||||
tmp.chute_type.value,
|
||||
tmp.service_type.value,
|
||||
tmp.operation_type.value,
|
||||
tmp.coin_rate0,
|
||||
tmp.coin_rate1,
|
||||
tmp.bonus_addition,
|
||||
tmp.credit_rate,
|
||||
tmp.credit0,
|
||||
tmp.credit1,
|
||||
tmp.credit2,
|
||||
tmp.credit3,
|
||||
tmp.credit4,
|
||||
tmp.credit5,
|
||||
tmp.credit6,
|
||||
tmp.credit7
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Credit Trace from {req.keychipid}: {tmp.operation_type} mode, {tmp.credit_rate} coins per credit, breakdown: {tmp.credit0} | {tmp.credit1} | {tmp.credit2} | {tmp.credit3} | {tmp.credit4} | {tmp.credit5} | {tmp.credit6} | {tmp.credit7} | "
|
||||
)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warning(f"Tracelog failed to parse: {e}")
|
||||
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, "", log_details, None, machine['arcade'], machine['id'], request_ip, req.gameid, str(req.gamever))
|
||||
|
||||
self.logger.info(
|
||||
f"Unregistered Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
|
||||
)
|
||||
|
||||
else:
|
||||
log_details['serial'] = req.keychipid
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK_UNREG", logging.INFO, "", log_details, None, None, None, request_ip, req.gameid, req.gamever)
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK_UNREG", logging.INFO, "", log_details, None, None, None, request_ip, req.gameid, str(req.gamever))
|
||||
|
||||
self.logger.info(
|
||||
f"Unregistered Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
@@ -638,16 +805,13 @@ class BillingServlet:
|
||||
)
|
||||
|
||||
if req.traceleft > 0:
|
||||
self.logger.warn(f"{req.traceleft} unsent tracelogs")
|
||||
kc_playlimit = req.playlimit
|
||||
kc_nearfull = req.nearfull
|
||||
self.logger.warning(f"{req.traceleft} unsent tracelogs")
|
||||
|
||||
while req.playcnt > req.playlimit:
|
||||
kc_playlimit += 1024
|
||||
kc_nearfull += 1024
|
||||
playlimit = req.playlimit
|
||||
while req.playcnt > playlimit:
|
||||
playlimit += 1024
|
||||
|
||||
playlimit = kc_playlimit
|
||||
nearfull = kc_nearfull + (req.billingtype.value * 0x00010000)
|
||||
nearfull = req.nearfull + (req.billingtype.value * 0x00010000)
|
||||
|
||||
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
|
||||
playlimit_sig = signer.sign(digest).hex()
|
||||
@@ -656,14 +820,12 @@ class BillingServlet:
|
||||
digest.update(nearfull.to_bytes(4, "little") + kc_serial_bytes)
|
||||
nearfull_sig = signer.sign(digest).hex()
|
||||
|
||||
# TODO: playhistory
|
||||
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver, playhist)
|
||||
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
|
||||
|
||||
self.logger.debug(f"response {vars(resp)}")
|
||||
if req.traceleft > 0:
|
||||
if req.traceleft > 0: # TODO: should probably move this up so we don't do a ton of work that doesn't get used
|
||||
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
|
||||
return PlainTextResponse("result=6&waittime=0&linelimit=20\r\n")
|
||||
|
||||
@@ -705,6 +867,15 @@ class AllnetPowerOnResponse:
|
||||
self.minute = datetime.now().minute
|
||||
self.second = datetime.now().second
|
||||
|
||||
class AllnetPowerOnRequestLite:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
if req is None:
|
||||
raise AllnetRequestException("Request processing failed")
|
||||
self.game_id: str = req.get("title_id", None)
|
||||
self.ver: str = req.get("title_ver", None)
|
||||
self.serial: str = req.get("client_id", None)
|
||||
self.token: str = req.get("token", None)
|
||||
|
||||
class AllnetPowerOnResponse3(AllnetPowerOnResponse):
|
||||
def __init__(self, token) -> None:
|
||||
super().__init__()
|
||||
@@ -736,6 +907,30 @@ class AllnetPowerOnResponse2(AllnetPowerOnResponse):
|
||||
self.timezone = "+09:00"
|
||||
self.res_class = "PowerOnResponseV2"
|
||||
|
||||
class AllnetPowerOnResponseLite:
|
||||
def __init__(self, token) -> None:
|
||||
# Custom Allnet Lite response
|
||||
self.result = 1
|
||||
self.place_id = "0123"
|
||||
self.uri1 = ""
|
||||
self.uri2 = ""
|
||||
self.name = "ARTEMiS"
|
||||
self.nickname = "ARTEMiS"
|
||||
self.setting = "1"
|
||||
self.region0 = "1"
|
||||
self.region_name0 = "W"
|
||||
self.region_name1 = ""
|
||||
self.region_name2 = ""
|
||||
self.region_name3 = ""
|
||||
self.country = "CHN"
|
||||
self.location_type = "1"
|
||||
self.utc_time = datetime.now(tz=pytz.timezone("UTC")).strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
self.client_timezone = "+0800"
|
||||
self.res_ver = "3"
|
||||
self.token = token
|
||||
|
||||
class AllnetDownloadOrderRequest:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
self.game_id = req.get("game_id", "")
|
||||
@@ -743,12 +938,23 @@ class AllnetDownloadOrderRequest:
|
||||
self.serial = req.get("serial", "")
|
||||
self.encode = req.get("encode", "")
|
||||
|
||||
class AllnetDownloadOrderRequestLite:
|
||||
def __init__(self, req: Dict) -> None:
|
||||
self.game_id = req.get("title_id", "")
|
||||
self.ver = req.get("title_ver", "")
|
||||
self.serial = req.get("client_id", "")
|
||||
|
||||
class AllnetDownloadOrderResponse:
|
||||
def __init__(self, stat: int = 1, serial: str = "", uri: str = "") -> None:
|
||||
def __init__(self, stat: int = 1, serial: str = "", uri: str = "null") -> None:
|
||||
self.stat = stat
|
||||
self.serial = serial
|
||||
self.uri = uri
|
||||
|
||||
class AllnetDownloadOrderResponseLite:
|
||||
def __init__(self, result: int = 1, uri: str = "null") -> None:
|
||||
self.result = result
|
||||
self.uri = uri
|
||||
|
||||
class TraceDataType(Enum):
|
||||
CHARGE = 0
|
||||
EVENT = 1
|
||||
@@ -758,14 +964,27 @@ class BillingType(Enum):
|
||||
A = 1
|
||||
B = 0
|
||||
|
||||
class TraceDataCreditChuteType(Enum):
|
||||
COMMON = 0
|
||||
INDIVIDUAL = 1
|
||||
|
||||
class TraceDataCreditOperationType(Enum):
|
||||
COIN = 0
|
||||
FREEPLAY = 1
|
||||
|
||||
class float5:
|
||||
def __init__(self, n: str = "0") -> None:
|
||||
def __init__(self, n: str = "0"):
|
||||
nf = float(n)
|
||||
if nf > 999.9 or nf < 0:
|
||||
raise ValueError('float5 must be between 0.000 and 999.9 inclusive')
|
||||
|
||||
return nf
|
||||
self.val = nf
|
||||
|
||||
def __float__(self) -> float:
|
||||
return self.val
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"%.{2 - int(math.log10(self.val))+1}f" % self.val
|
||||
|
||||
@classmethod
|
||||
def to_str(cls, f: float):
|
||||
return f"%.{2 - int(math.log10(f))+1}f" % f
|
||||
@@ -776,13 +995,13 @@ class BillingInfo:
|
||||
self.keychipid = str(data.get("keychipid", None))
|
||||
self.functype = int(data.get("functype", None))
|
||||
self.gameid = str(data.get("gameid", None))
|
||||
self.gamever = float(data.get("gamever", None))
|
||||
self.gamever = float5(data.get("gamever", None))
|
||||
self.boardid = str(data.get("boardid", None))
|
||||
self.tenpoip = str(data.get("tenpoip", None))
|
||||
self.libalibver = float(data.get("libalibver", None))
|
||||
self.libalibver = float5(data.get("libalibver", None))
|
||||
self.datamax = int(data.get("datamax", None))
|
||||
self.billingtype = BillingType(int(data.get("billingtype", None)))
|
||||
self.protocolver = float(data.get("protocolver", None))
|
||||
self.protocolver = float5(data.get("protocolver", None))
|
||||
self.operatingfix = bool(data.get("operatingfix", None))
|
||||
self.traceleft = int(data.get("traceleft", None))
|
||||
self.requestno = int(data.get("requestno", None))
|
||||
@@ -815,7 +1034,7 @@ class TraceData:
|
||||
self.date = datetime.strptime(data.get("dt", None), BILLING_DT_FORMAT)
|
||||
|
||||
self.keychip = str(data.get("kn", None))
|
||||
self.lib_ver = float(data.get("alib", 0))
|
||||
self.lib_ver = float5(data.get("alib", 0))
|
||||
except Exception as e:
|
||||
raise KeyError(e)
|
||||
|
||||
@@ -824,7 +1043,7 @@ class TraceDataCharge(TraceData):
|
||||
super().__init__(data)
|
||||
try:
|
||||
self.game_id = str(data.get("gi", None)) # these seem optional...?
|
||||
self.game_version = float(data.get("gv", 0))
|
||||
self.game_version = float5(data.get("gv", 0))
|
||||
self.board_serial = str(data.get("bn", None))
|
||||
self.shop_ip = str(data.get("ti", None))
|
||||
self.play_count = int(data.get("pc", None))
|
||||
@@ -848,9 +1067,9 @@ class TraceDataCredit(TraceData):
|
||||
def __init__(self, data: Dict) -> None:
|
||||
super().__init__(data)
|
||||
try:
|
||||
self.chute_type = int(data.get("cct", None))
|
||||
self.service_type = int(data.get("cst", None))
|
||||
self.operation_type = int(data.get("cop", None))
|
||||
self.chute_type = TraceDataCreditChuteType(int(data.get("cct", None)))
|
||||
self.service_type = TraceDataCreditChuteType(int(data.get("cst", None)))
|
||||
self.operation_type = TraceDataCreditOperationType(int(data.get("cop", None)))
|
||||
self.coin_rate0 = int(data.get("cr0", None))
|
||||
self.coin_rate1 = int(data.get("cr1", None))
|
||||
self.bonus_addition = int(data.get("cba", None))
|
||||
@@ -874,7 +1093,7 @@ class BillingResponse:
|
||||
nearfull: str = "",
|
||||
nearfull_sig: str = "",
|
||||
request_num: int = 1,
|
||||
protocol_ver: float = 1.000,
|
||||
protocol_ver: float5 = float5("1.000"),
|
||||
playhistory: str = "000000/0:000000/0:000000/0",
|
||||
) -> None:
|
||||
self.result = 0
|
||||
@@ -888,7 +1107,7 @@ class BillingResponse:
|
||||
self.nearfull = nearfull
|
||||
self.nearfullsig = nearfull_sig
|
||||
self.linelimit = 100
|
||||
self.protocolver = float5.to_str(protocol_ver)
|
||||
self.protocolver = str(protocol_ver)
|
||||
# playhistory -> YYYYMM/C:...
|
||||
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
|
||||
|
||||
@@ -987,7 +1206,9 @@ app_billing = Starlette(
|
||||
allnet = AllnetServlet(cfg, cfg_dir)
|
||||
route_lst = [
|
||||
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/net/initialize", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/net/delivery/instruction", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
|
||||
Route("/naomitest.html", allnet.handle_naomitest),
|
||||
|
||||
14
core/app.py
@@ -9,7 +9,9 @@ from starlette.responses import PlainTextResponse
|
||||
from os import environ, path, mkdir, W_OK, access
|
||||
from typing import List
|
||||
|
||||
from core import CoreConfig, TitleServlet, MuchaServlet, AllnetServlet, BillingServlet, AimedbServlette
|
||||
from core import CoreConfig, TitleServlet, MuchaServlet
|
||||
from core.allnet import AllnetServlet, BillingServlet
|
||||
from core.chimedb import ChimeServlet
|
||||
from core.frontend import FrontendServlet
|
||||
|
||||
async def dummy_rt(request: Request):
|
||||
@@ -74,7 +76,9 @@ if not cfg.allnet.standalone:
|
||||
allnet = AllnetServlet(cfg, cfg_dir)
|
||||
route_lst += [
|
||||
Route("/sys/servlet/PowerOn", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/net/initialize", allnet.handle_poweron, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/DownloadOrder", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/net/delivery/instruction", allnet.handle_dlorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/LoaderStateRecorder", allnet.handle_loaderstaterecorder, methods=["GET", "POST"]),
|
||||
Route("/sys/servlet/Alive", allnet.handle_alive, methods=["GET", "POST"]),
|
||||
Route("/naomitest.html", allnet.handle_naomitest),
|
||||
@@ -86,6 +90,14 @@ if not cfg.allnet.standalone:
|
||||
Route("/dl/ini/{file:str}", allnet.handle_dlorder_ini),
|
||||
]
|
||||
|
||||
if cfg.chimedb.enable:
|
||||
chimedb = ChimeServlet(cfg, cfg_dir)
|
||||
route_lst += [
|
||||
Route("/wc_aime/api/alive_check", chimedb.handle_qr_alive, methods=["POST"]),
|
||||
Route("/qrcode/api/alive_check", chimedb.handle_qr_alive, methods=["POST"]),
|
||||
Route("/wc_aime/api/get_data", chimedb.handle_qr_lookup, methods=["POST"])
|
||||
]
|
||||
|
||||
for code, game in title.title_registry.items():
|
||||
route_lst += game.get_routes()
|
||||
|
||||
|
||||
139
core/chimedb.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from enum import Enum
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
|
||||
import coloredlogs
|
||||
from starlette.responses import PlainTextResponse
|
||||
from starlette.requests import Request
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
|
||||
class ChimeDBStatus(Enum):
|
||||
NONE = 0
|
||||
READER_SETUP_FAIL = 1
|
||||
READER_ACCESS_FAIL = 2
|
||||
READER_INCOMPATIBLE = 3
|
||||
DB_RESOLVE_FAIL = 4
|
||||
DB_ACCESS_TIMEOUT = 5
|
||||
DB_ACCESS_FAIL = 6
|
||||
AIME_ID_INVALID = 7
|
||||
NO_BOARD_INFO = 8
|
||||
LOCK_BAN_SYSTEM_USER = 9
|
||||
LOCK_BAN_SYSTEM = 10
|
||||
LOCK_BAN_USER = 11
|
||||
LOCK_BAN = 12
|
||||
LOCK_SYSTEM_USER = 13
|
||||
LOCK_SYSTEM = 14
|
||||
LOCK_USER = 15
|
||||
|
||||
class ChimeServlet:
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_folder: str) -> None:
|
||||
self.config = core_cfg
|
||||
self.config_folder = cfg_folder
|
||||
|
||||
self.data = Data(core_cfg)
|
||||
|
||||
self.logger = logging.getLogger("chimedb")
|
||||
if not hasattr(self.logger, "initted"):
|
||||
log_fmt_str = "[%(asctime)s] Chimedb | %(levelname)s | %(message)s"
|
||||
log_fmt = logging.Formatter(log_fmt_str)
|
||||
|
||||
fileHandler = TimedRotatingFileHandler(
|
||||
"{0}/{1}.log".format(self.config.server.log_dir, "chimedb"),
|
||||
when="d",
|
||||
backupCount=10,
|
||||
)
|
||||
fileHandler.setFormatter(log_fmt)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setFormatter(log_fmt)
|
||||
|
||||
self.logger.addHandler(fileHandler)
|
||||
self.logger.addHandler(consoleHandler)
|
||||
|
||||
self.logger.setLevel(self.config.aimedb.loglevel)
|
||||
coloredlogs.install(
|
||||
level=core_cfg.aimedb.loglevel, logger=self.logger, fmt=log_fmt_str
|
||||
)
|
||||
self.logger.initted = True
|
||||
|
||||
if not core_cfg.chimedb.key:
|
||||
self.logger.error("!!!KEY NOT SET!!!")
|
||||
exit(1)
|
||||
|
||||
self.logger.info("Serving")
|
||||
|
||||
async def handle_qr_alive(self, request: Request):
|
||||
return PlainTextResponse("alive")
|
||||
|
||||
async def handle_qr_lookup(self, request: Request) -> bytes:
|
||||
req = json.loads(await request.body())
|
||||
access_code = req["qrCode"][-20:]
|
||||
timestamp = req["timestamp"]
|
||||
|
||||
try:
|
||||
userId = await self._lookup(access_code)
|
||||
data = json.dumps({
|
||||
"userID": userId,
|
||||
"errorID": 0,
|
||||
"timestamp": timestamp,
|
||||
"key": self._hash_key(userId, timestamp)
|
||||
})
|
||||
except Exception as e:
|
||||
|
||||
self.logger.error(e.with_traceback(None))
|
||||
|
||||
data = json.dumps({
|
||||
"userID": -1,
|
||||
"errorID": ChimeDBStatus.DB_ACCESS_FAIL,
|
||||
"timestamp": timestamp,
|
||||
"key": self._hash_key(-1, timestamp)
|
||||
})
|
||||
|
||||
return PlainTextResponse(data)
|
||||
|
||||
def _hash_key(self, chip_id, timestamp):
|
||||
input_string = f"{chip_id}{timestamp}{self.config.chimedb.key}"
|
||||
hash_object = hashlib.sha256(input_string.encode('utf-8'))
|
||||
hex_dig = hash_object.hexdigest()
|
||||
|
||||
formatted_hex = format(int(hex_dig, 16), '064x').upper()
|
||||
|
||||
return formatted_hex
|
||||
|
||||
async def _lookup(self, access_code):
|
||||
user_id = await self.data.card.get_user_id_from_card(access_code)
|
||||
|
||||
self.logger.info(f"access_code {access_code} -> user_id {user_id}")
|
||||
|
||||
if not user_id or user_id <= 0:
|
||||
user_id = await self._register(access_code)
|
||||
|
||||
return user_id
|
||||
|
||||
async def _register(self, access_code):
|
||||
user_id = -1
|
||||
|
||||
if self.config.server.allow_user_registration:
|
||||
user_id = await self.data.user.create_user()
|
||||
|
||||
if user_id is None:
|
||||
self.logger.error("Failed to register user!")
|
||||
user_id = -1
|
||||
else:
|
||||
card_id = await self.data.card.create_card(user_id, access_code)
|
||||
|
||||
if card_id is None:
|
||||
self.logger.error("Failed to register card!")
|
||||
user_id = -1
|
||||
|
||||
self.logger.info(
|
||||
f"Register access code {access_code} -> user_id {user_id}"
|
||||
)
|
||||
else:
|
||||
self.logger.info(f"Registration blocked!: access code {access_code}")
|
||||
|
||||
return user_id
|
||||
145
core/config.py
@@ -1,5 +1,9 @@
|
||||
import logging, os
|
||||
from typing import Any
|
||||
import logging
|
||||
import os
|
||||
import ssl
|
||||
from typing import Any, Union, Dict
|
||||
|
||||
from typing_extensions import Optional
|
||||
|
||||
class ServerConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
@@ -176,6 +180,60 @@ class DatabaseConfig:
|
||||
self.__config, "core", "database", "protocol", default="mysql"
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_enabled(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_enabled", default=False
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cafile(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_cafile", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_capath(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_capath", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_cert(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_cert", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_key(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_key", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_key_password(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_key_password", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_verify_identity(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_verify_identity", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_verify_cert(self) -> Optional[Union[str, bool]]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_verify_cert", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def ssl_ciphers(self) -> Optional[str]:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "database", "ssl_ciphers", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def sha2_password(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
@@ -202,6 +260,53 @@ class DatabaseConfig:
|
||||
self.__config, "core", "database", "memcached_host", default="localhost"
|
||||
)
|
||||
|
||||
def create_ssl_context_if_enabled(self):
|
||||
if not self.ssl_enabled:
|
||||
return
|
||||
|
||||
no_ca = (
|
||||
self.ssl_cafile is None
|
||||
and self.ssl_capath is None
|
||||
)
|
||||
|
||||
ctx = ssl.create_default_context(
|
||||
cafile=self.ssl_cafile,
|
||||
capath=self.ssl_capath,
|
||||
)
|
||||
ctx.check_hostname = not no_ca and self.ssl_verify_identity
|
||||
|
||||
if self.ssl_verify_cert is None:
|
||||
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
|
||||
elif isinstance(self.ssl_verify_cert, bool):
|
||||
ctx.verify_mode = (
|
||||
ssl.CERT_REQUIRED
|
||||
if self.ssl_verify_cert
|
||||
else ssl.CERT_NONE
|
||||
)
|
||||
elif isinstance(self.ssl_verify_cert, str):
|
||||
value = self.ssl_verify_cert.lower()
|
||||
|
||||
if value in ("none", "0", "false", "no"):
|
||||
ctx.verify_mode = ssl.CERT_NONE
|
||||
elif value == "optional":
|
||||
ctx.verify_mode = ssl.CERT_OPTIONAL
|
||||
elif value in ("required", "1", "true", "yes"):
|
||||
ctx.verify_mode = ssl.CERT_REQUIRED
|
||||
else:
|
||||
ctx.verify_mode = ssl.CERT_NONE if no_ca else ssl.CERT_REQUIRED
|
||||
|
||||
if self.ssl_cert:
|
||||
ctx.load_cert_chain(
|
||||
self.ssl_cert,
|
||||
self.ssl_key,
|
||||
self.ssl_key_password,
|
||||
)
|
||||
|
||||
if self.ssl_ciphers:
|
||||
ctx.set_ciphers(self.ssl_ciphers)
|
||||
|
||||
return ctx
|
||||
|
||||
class FrontendConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
@@ -257,7 +362,7 @@ class AllnetConfig:
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_online_updates(self) -> int:
|
||||
def allow_online_updates(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "allow_online_updates", default=False
|
||||
)
|
||||
@@ -268,6 +373,17 @@ class AllnetConfig:
|
||||
self.__config, "core", "allnet", "update_cfg_folder", default=""
|
||||
)
|
||||
|
||||
@property
|
||||
def save_billing(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "save_billing", default=False
|
||||
)
|
||||
@property
|
||||
def allnet_lite_keys(self) -> Dict:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "allnet_lite_keys", default={}
|
||||
)
|
||||
|
||||
class BillingConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
@@ -358,6 +474,28 @@ class AimedbConfig:
|
||||
self.__config, "core", "aimedb", "id_lifetime_seconds", default=86400
|
||||
)
|
||||
|
||||
class ChimedbConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "chimedb", "enable", default=True
|
||||
)
|
||||
@property
|
||||
def loglevel(self) -> int:
|
||||
return CoreConfig.str_to_loglevel(
|
||||
CoreConfig.get_config_field(
|
||||
self.__config, "core", "chimedb", "loglevel", default="info"
|
||||
)
|
||||
)
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "chimedb", "key", default=""
|
||||
)
|
||||
|
||||
class MuchaConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
@@ -379,6 +517,7 @@ class CoreConfig(dict):
|
||||
self.allnet = AllnetConfig(self)
|
||||
self.billing = BillingConfig(self)
|
||||
self.aimedb = AimedbConfig(self)
|
||||
self.chimedb = ChimedbConfig(self)
|
||||
self.mucha = MuchaConfig(self)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -45,6 +45,14 @@ class AllnetCountryCode(Enum):
|
||||
SOUTH_KOREA = "KOR"
|
||||
TAIWAN = "TWN"
|
||||
CHINA = "CHN"
|
||||
AUSTRALIA = "AUS"
|
||||
INDONESIA = "IDN"
|
||||
MYANMAR = "MMR"
|
||||
MALAYSIA = "MYS"
|
||||
NEW_ZEALAND = "NZL"
|
||||
PHILIPPINES = "PHL"
|
||||
THAILAND = "THA"
|
||||
VIETNAM = "VNM"
|
||||
|
||||
|
||||
class AllnetJapanRegionId(Enum):
|
||||
|
||||
@@ -1,8 +1,18 @@
|
||||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
from pathlib import Path
|
||||
import threading
|
||||
from logging.config import fileConfig
|
||||
|
||||
import yaml
|
||||
from alembic import context
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data.schema.base import metadata
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
@@ -37,20 +47,29 @@ def run_migrations_offline():
|
||||
script output.
|
||||
|
||||
"""
|
||||
raise Exception('Not implemented or configured!')
|
||||
raise Exception("Not implemented or configured!")
|
||||
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
@@ -59,21 +78,42 @@ def run_migrations_online():
|
||||
for override in overrides:
|
||||
ini_section[override] = overrides[override]
|
||||
|
||||
connectable = engine_from_config(
|
||||
core_config = CoreConfig()
|
||||
|
||||
with (Path("../../..") / os.environ["ARTEMIS_CFG_DIR"] / "core.yaml").open(encoding="utf-8") as f:
|
||||
core_config.update(yaml.safe_load(f))
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
ini_section,
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
poolclass=pool.NullPool,
|
||||
connect_args={
|
||||
"charset": "utf8mb4",
|
||||
"ssl": core_config.database.create_ssl_context_if_enabled(),
|
||||
}
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
# there's no event loop
|
||||
asyncio.run(run_async_migrations())
|
||||
else:
|
||||
# there's currently an event loop and trying to wait for a coroutine
|
||||
# to finish without using `await` is pretty wormy. nested event loops
|
||||
# are explicitly forbidden by asyncio.
|
||||
#
|
||||
# take the easy way out, spawn it in another thread.
|
||||
thread = threading.Thread(target=asyncio.run, args=(run_async_migrations(),))
|
||||
thread.start()
|
||||
thread.join()
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
"""ONGEKI update ongeki_static_tech_music_uk
|
||||
|
||||
Revision ID: 1d0014d35220
|
||||
Revises: 91c682918b67
|
||||
Create Date: 2025-03-26 20:44:55.590992
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1d0014d35220'
|
||||
down_revision = '91c682918b67'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint('ongeki_static_tech_music_uk', 'ongeki_static_tech_music', type_='unique')
|
||||
op.create_unique_constraint('ongeki_static_tech_music_uk', 'ongeki_static_tech_music', ['version', 'eventId', 'musicId', 'level'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint('ongeki_static_tech_music_uk', 'ongeki_static_tech_music', type_='unique')
|
||||
op.create_unique_constraint('ongeki_static_tech_music_uk', 'ongeki_static_tech_music', ['version', 'musicId'])
|
||||
# ### end Alembic commands ###
|
||||
164
core/data/alembic/versions/263884e774cc_acc_opt_tables.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""acc_opt_tables
|
||||
|
||||
Revision ID: 263884e774cc
|
||||
Revises: 1d0014d35220
|
||||
Create Date: 2025-04-07 18:05:53.349320
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '263884e774cc'
|
||||
down_revision = '1d0014d35220'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('chuni_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='chuni_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('cm_static_opts',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=True),
|
||||
sa.Column('gekiVersion', sa.INTEGER(), nullable=True),
|
||||
sa.Column('gekiReleaseVer', sa.INTEGER(), nullable=True),
|
||||
sa.Column('maiVersion', sa.INTEGER(), nullable=True),
|
||||
sa.Column('maiReleaseVer', sa.INTEGER(), nullable=True),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='cm_static_opts_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('mai2_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('cmReleaseVer', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='mai2_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('ongeki_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('cmReleaseVer', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='ongeki_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.add_column('chuni_static_avatar', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_avatar', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_cards', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_character', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_character', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_charge', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_charge', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_events', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_events', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_gachas', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_gachas', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_login_bonus', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_login_bonus', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_login_bonus_preset', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_login_bonus_preset', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_map_icon', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_map_icon', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_music', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_system_voice', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_system_voice', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_trophy', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_trophy', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_cards', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_event', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_event', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_music', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_ticket', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_ticket', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_cards', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_events', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_events', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_gachas', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_gachas', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_music', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_rewards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_rewards', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("ongeki_static_rewards_ibfk_1", 'ongeki_static_rewards', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_rewards', 'opt')
|
||||
op.drop_constraint("ongeki_static_music_ibfk_1", 'ongeki_static_music', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_music', 'opt')
|
||||
op.drop_constraint("ongeki_static_gachas_ibfk_1", 'ongeki_static_gachas', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_gachas', 'opt')
|
||||
op.drop_constraint("ongeki_static_events_ibfk_1", "ongeki_static_events", type_='foreignkey')
|
||||
op.drop_column('ongeki_static_events', 'opt')
|
||||
op.drop_constraint("ongeki_static_cards_ibfk_1", "ongeki_static_cards", type_='foreignkey')
|
||||
op.drop_column('ongeki_static_cards', 'opt')
|
||||
op.drop_constraint("mai2_static_ticket_ibfk_1", "mai2_static_ticket", type_='foreignkey')
|
||||
op.drop_column('mai2_static_ticket', 'opt')
|
||||
op.drop_constraint("mai2_static_music_ibfk_1", "mai2_static_music", type_='foreignkey')
|
||||
op.drop_column('mai2_static_music', 'opt')
|
||||
op.drop_constraint("mai2_static_event_ibfk_1", "mai2_static_event", type_='foreignkey')
|
||||
op.drop_column('mai2_static_event', 'opt')
|
||||
op.drop_constraint("mai2_static_cards_ibfk_1", "mai2_static_cards", type_='foreignkey')
|
||||
op.drop_column('mai2_static_cards', 'opt')
|
||||
op.drop_constraint("chuni_static_trophy_ibfk_1", "chuni_static_trophy", type_='foreignkey')
|
||||
op.drop_column('chuni_static_trophy', 'opt')
|
||||
op.drop_constraint("chuni_static_system_voice_ibfk_1", "chuni_static_system_voice", type_='foreignkey')
|
||||
op.drop_column('chuni_static_system_voice', 'opt')
|
||||
op.drop_constraint("chuni_static_music_ibfk_1", "chuni_static_music", type_='foreignkey')
|
||||
op.drop_column('chuni_static_music', 'opt')
|
||||
op.drop_constraint("chuni_static_map_icon_ibfk_1", "chuni_static_map_icon", type_='foreignkey')
|
||||
op.drop_column('chuni_static_map_icon', 'opt')
|
||||
op.drop_constraint("chuni_static_login_bonus_preset_ibfk_1", "chuni_static_login_bonus_preset", type_='foreignkey')
|
||||
op.drop_column('chuni_static_login_bonus_preset', 'opt')
|
||||
op.drop_constraint("chuni_static_login_bonus_ibfk_2", "chuni_static_login_bonus", type_='foreignkey')
|
||||
op.drop_column('chuni_static_login_bonus', 'opt')
|
||||
op.drop_constraint("chuni_static_gachas_ibfk_1", "chuni_static_gachas", type_='foreignkey')
|
||||
op.drop_column('chuni_static_gachas', 'opt')
|
||||
op.drop_constraint("chuni_static_events_ibfk_1", "chuni_static_events", type_='foreignkey')
|
||||
op.drop_column('chuni_static_events', 'opt')
|
||||
op.drop_constraint("chuni_static_charge_ibfk_1", "chuni_static_charge", type_='foreignkey')
|
||||
op.drop_column('chuni_static_charge', 'opt')
|
||||
op.drop_constraint("chuni_static_character_ibfk_1", "chuni_static_character", type_='foreignkey')
|
||||
op.drop_column('chuni_static_character', 'opt')
|
||||
op.drop_constraint("chuni_static_cards_ibfk_1", "chuni_static_cards", type_='foreignkey')
|
||||
op.drop_column('chuni_static_cards', 'opt')
|
||||
op.drop_constraint("chuni_static_avatar_ibfk_1", "chuni_static_avatar", type_='foreignkey')
|
||||
op.drop_column('chuni_static_avatar', 'opt')
|
||||
op.drop_table('ongeki_static_opt')
|
||||
op.drop_table('mai2_static_opt')
|
||||
op.drop_table('cm_static_opts')
|
||||
op.drop_table('chuni_static_opt')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,66 @@
|
||||
"""add_billing_tables
|
||||
|
||||
Revision ID: 27e3434740df
|
||||
Revises: ae364c078429
|
||||
Create Date: 2025-04-17 18:32:06.008601
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '27e3434740df'
|
||||
down_revision = 'ae364c078429'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('machine_billing_charge',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('game_id', sa.CHAR(length=5), nullable=False),
|
||||
sa.Column('game_ver', sa.FLOAT(), nullable=False),
|
||||
sa.Column('play_count', sa.INTEGER(), nullable=False),
|
||||
sa.Column('play_limit', sa.INTEGER(), nullable=False),
|
||||
sa.Column('product_code', sa.INTEGER(), nullable=False),
|
||||
sa.Column('product_count', sa.INTEGER(), nullable=False),
|
||||
sa.Column('func_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('player_number', sa.INTEGER(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('machine_billing_credit',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('chute_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('service_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('operation_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_rate0', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_rate1', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_bonus', sa.INTEGER(), nullable=False),
|
||||
sa.Column('credit_rate', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot0', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot1', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot2', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot3', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot4', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot5', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot6', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot7', sa.INTEGER(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('machine'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('machine_billing_credit')
|
||||
op.drop_table('machine_billing_charge')
|
||||
# ### end Alembic commands ###
|
||||
28
core/data/alembic/versions/28443e2da5b8_mai2_buddies_plus.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""mai2_buddies_plus
|
||||
|
||||
Revision ID: 28443e2da5b8
|
||||
Revises: 5ea73f89d982
|
||||
Create Date: 2024-09-15 20:44:02.351819
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '28443e2da5b8'
|
||||
down_revision = '5ea73f89d982'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('mai2_profile_detail', sa.Column('point', sa.Integer()))
|
||||
op.add_column('mai2_profile_detail', sa.Column('totalPoint', sa.Integer()))
|
||||
op.add_column('mai2_profile_detail', sa.Column('friendRegistSkip', sa.SmallInteger()))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('mai2_profile_detail', 'point')
|
||||
op.drop_column('mai2_profile_detail', 'totalPoint')
|
||||
op.drop_column('mai2_profile_detail', 'friendRegistSkip')
|
||||
122
core/data/alembic/versions/41f77ef50588_chuni_ui_overhaul.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""chuni_ui_overhaul
|
||||
|
||||
Revision ID: 41f77ef50588
|
||||
Revises: d8cd1fa04c2a
|
||||
Create Date: 2024-11-02 13:27:45.839787
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '41f77ef50588'
|
||||
down_revision = 'd8cd1fa04c2a'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('chuni_static_avatar', sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True))
|
||||
op.add_column('chuni_static_avatar', sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True))
|
||||
op.add_column('chuni_static_avatar', sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True))
|
||||
|
||||
op.create_table('chuni_static_character',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('characterId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
|
||||
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('worksName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('rareType', mysql.INTEGER(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.Column('imagePath1', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('imagePath2', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('imagePath3', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
|
||||
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_general_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('chuni_static_character_uk', 'chuni_static_character', ['version', 'characterId'], unique=True)
|
||||
op.create_table('chuni_static_map_icon',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('mapIconId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
|
||||
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('iconPath', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
|
||||
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_general_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('chuni_static_mapicon_uk', 'chuni_static_map_icon', ['version', 'mapIconId'], unique=True)
|
||||
op.create_table('chuni_static_nameplate',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('nameplateId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
|
||||
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('texturePath', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
|
||||
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_general_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('chuni_static_nameplate_uk', 'chuni_static_nameplate', ['version', 'nameplateId'], unique=True)
|
||||
op.create_table('chuni_static_trophy',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('trophyId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
|
||||
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('rareType', mysql.TINYINT(display_width=11), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
|
||||
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_general_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('chuni_static_trophy_uk', 'chuni_static_trophy', ['version', 'trophyId'], unique=True)
|
||||
op.create_table('chuni_static_system_voice',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('voiceId', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
|
||||
sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('sortName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('imagePath', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.Column('isEnabled', mysql.TINYINT(display_width=1), server_default=sa.text('1'), autoincrement=False, nullable=True),
|
||||
sa.Column('defaultHave', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_general_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('chuni_static_systemvoice_uk', 'chuni_static_system_voice', ['version', 'voiceId'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('chuni_static_systemvoice_uk', table_name='chuni_static_system_voice')
|
||||
op.drop_table('chuni_static_system_voice')
|
||||
op.drop_index('chuni_static_trophy_uk', table_name='chuni_static_trophy')
|
||||
op.drop_table('chuni_static_trophy')
|
||||
op.drop_index('chuni_static_nameplate_uk', table_name='chuni_static_nameplate')
|
||||
op.drop_table('chuni_static_nameplate')
|
||||
op.drop_index('chuni_static_mapicon_uk', table_name='chuni_static_map_icon')
|
||||
op.drop_table('chuni_static_map_icon')
|
||||
op.drop_index('chuni_static_character_uk', table_name='chuni_static_character')
|
||||
op.drop_table('chuni_static_character')
|
||||
|
||||
op.drop_column('chuni_static_avatar', 'defaultHave')
|
||||
op.drop_column('chuni_static_avatar', 'isEnabled')
|
||||
op.drop_column('chuni_static_avatar', 'sortName')
|
||||
# ### end Alembic commands ###
|
||||
43
core/data/alembic/versions/54a84103b84e_mai2_intimacy.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""mai2_intimacy
|
||||
|
||||
Revision ID: 54a84103b84e
|
||||
Revises: bc91c1206dca
|
||||
Create Date: 2024-09-16 17:47:49.164546
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import Column, Integer, UniqueConstraint
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '54a84103b84e'
|
||||
down_revision = 'bc91c1206dca'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"mai2_user_intimate",
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("user", Integer, nullable=False),
|
||||
Column("partnerId", Integer, nullable=False),
|
||||
Column("intimateLevel", Integer, nullable=False),
|
||||
Column("intimateCountRewarded", Integer, nullable=False),
|
||||
UniqueConstraint("user", "partnerId", name="mai2_user_intimate_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
op.create_foreign_key(
|
||||
None,
|
||||
"mai2_user_intimate",
|
||||
"aime_user",
|
||||
["user"],
|
||||
["id"],
|
||||
ondelete="cascade",
|
||||
onupdate="cascade",
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("mai2_user_intimate")
|
||||
@@ -0,0 +1,52 @@
|
||||
"""Mai2 PRiSM support
|
||||
|
||||
Revision ID: 5cf98cfe52ad
|
||||
Revises: 263884e774cc
|
||||
Create Date: 2025-04-08 08:00:51.243089
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5cf98cfe52ad'
|
||||
down_revision = '263884e774cc'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('mai2_score_kaleidxscope',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user', sa.Integer(), nullable=False),
|
||||
sa.Column('gateId', sa.Integer(), nullable=True),
|
||||
sa.Column('isGateFound', sa.Boolean(), nullable=True),
|
||||
sa.Column('isKeyFound', sa.Boolean(), nullable=True),
|
||||
sa.Column('isClear', sa.Boolean(), nullable=True),
|
||||
sa.Column('totalRestLife', sa.Integer(), nullable=True),
|
||||
sa.Column('totalAchievement', sa.Integer(), nullable=True),
|
||||
sa.Column('totalDeluxscore', sa.Integer(), nullable=True),
|
||||
sa.Column('bestAchievement', sa.Integer(), nullable=True),
|
||||
sa.Column('bestDeluxscore', sa.Integer(), nullable=True),
|
||||
sa.Column('bestAchievementDate', sa.String(length=25), nullable=True),
|
||||
sa.Column('bestDeluxscoreDate', sa.String(length=25), nullable=True),
|
||||
sa.Column('playCount', sa.Integer(), nullable=True),
|
||||
sa.Column('clearDate', sa.String(length=25), nullable=True),
|
||||
sa.Column('lastPlayDate', sa.String(length=25), nullable=True),
|
||||
sa.Column('isInfoWatched', sa.Boolean(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user', 'gateId', name='mai2_score_best_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.add_column('mai2_playlog', sa.Column('extBool2', sa.Boolean(), nullable=True, server_default=sa.text("NULL")))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('mai2_playlog', 'extBool2')
|
||||
op.drop_table('mai2_score_kaleidxscope')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,92 @@
|
||||
"""chuni_fix_total_scores
|
||||
|
||||
Revision ID: 91c682918b67
|
||||
Revises: 9c42e54a27fe
|
||||
Create Date: 2025-03-29 11:19:46.063173
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '91c682918b67'
|
||||
down_revision = '9c42e54a27fe'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('chuni_profile_data', 'totalMapNum',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalHiScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalBasicHighScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalExpertHighScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalMasterHighScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalRepertoireCount',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalAdvancedHighScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalUltimaHighScore',
|
||||
existing_type=mysql.INTEGER(display_width=11),
|
||||
type_=sa.BigInteger(),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'0'"))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('chuni_profile_data', 'totalUltimaHighScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'0'"))
|
||||
op.alter_column('chuni_profile_data', 'totalAdvancedHighScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalRepertoireCount',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalMasterHighScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalExpertHighScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalBasicHighScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalHiScore',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
op.alter_column('chuni_profile_data', 'totalMapNum',
|
||||
existing_type=sa.BigInteger(),
|
||||
type_=mysql.INTEGER(display_width=11),
|
||||
existing_nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,40 @@
|
||||
"""remove ongeki_static_music_ranking_list
|
||||
|
||||
Revision ID: 9c42e54a27fe
|
||||
Revises: 41f77ef50588
|
||||
Create Date: 2025-01-06 18:24:16.306748
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9c42e54a27fe'
|
||||
down_revision = '41f77ef50588'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ongeki_static_music_ranking_uk', table_name='ongeki_static_music_ranking_list')
|
||||
op.drop_table('ongeki_static_music_ranking_list')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('ongeki_static_music_ranking_list',
|
||||
sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('version', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('musicId', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('point', mysql.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('userName', mysql.VARCHAR(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_collate='utf8mb4_0900_ai_ci',
|
||||
mysql_default_charset='utf8mb4',
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
op.create_index('ongeki_static_music_ranking_uk', 'ongeki_static_music_ranking_list', ['version', 'musicId'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,30 @@
|
||||
"""chuni_nameplate_add_opt
|
||||
|
||||
Revision ID: ae364c078429
|
||||
Revises: 5cf98cfe52ad
|
||||
Create Date: 2025-04-08 00:22:22.370660
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ae364c078429'
|
||||
down_revision = '5cf98cfe52ad'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('chuni_static_nameplate', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_nameplate', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("chuni_static_nameplate_ibfk_1", 'chuni_static_nameplate', type_='foreignkey')
|
||||
op.drop_column('chuni_static_nameplate', 'opt')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,24 @@
|
||||
"""mai2_favorite_song_ordering
|
||||
|
||||
Revision ID: bc91c1206dca
|
||||
Revises: 28443e2da5b8
|
||||
Create Date: 2024-09-16 14:24:56.714066
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bc91c1206dca'
|
||||
down_revision = '28443e2da5b8'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('mai2_item_favorite_music', sa.Column('orderId', sa.Integer(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('mai2_item_favorite_music', 'orderId')
|
||||
38
core/data/alembic/versions/d8cd1fa04c2a_mai2_add_photos.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""mai2_add_photos
|
||||
|
||||
Revision ID: d8cd1fa04c2a
|
||||
Revises: 54a84103b84e
|
||||
Create Date: 2024-10-06 03:09:15.959817
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd8cd1fa04c2a'
|
||||
down_revision = '54a84103b84e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('mai2_user_photo',
|
||||
sa.Column('id', sa.VARCHAR(length=36), nullable=False),
|
||||
sa.Column('user', sa.Integer(), nullable=False),
|
||||
sa.Column('playlog_num', sa.INTEGER(), nullable=False),
|
||||
sa.Column('track_num', sa.INTEGER(), nullable=False),
|
||||
sa.Column('when_upload', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user'], ['aime_user.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user', 'playlog_num', 'track_num', name='mai2_user_photo_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('mai2_user_photo')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,50 @@
|
||||
"""add_billing_playcount
|
||||
|
||||
Revision ID: f6007bbf057d
|
||||
Revises: 27e3434740df
|
||||
Create Date: 2025-04-19 18:20:35.554137
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f6007bbf057d'
|
||||
down_revision = '27e3434740df'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('machine_billing_playcount',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('game_id', sa.CHAR(length=5), nullable=False),
|
||||
sa.Column('year', sa.INTEGER(), nullable=False),
|
||||
sa.Column('month', sa.INTEGER(), nullable=False),
|
||||
sa.Column('playct', sa.BIGINT(), server_default='1', nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('machine'),
|
||||
sa.UniqueConstraint('machine', 'game_id', 'year', 'month', name='machine_billing_playcount_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.add_column('machine_billing_credit', sa.Column('game_id', sa.CHAR(length=5), nullable=False))
|
||||
op.drop_constraint("machine_billing_credit_ibfk_1", "machine_billing_credit", "foreignkey")
|
||||
op.drop_index('machine', table_name='machine_billing_credit')
|
||||
op.create_unique_constraint('machine_billing_credit_uk', 'machine_billing_credit', ['machine', 'game_id'])
|
||||
op.create_foreign_key("machine_billing_credit_ibfk_1", "machine_billing_credit", "machine", ["machine"], ["id"], onupdate='cascade', ondelete='cascade')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("machine_billing_credit_ibfk_1", "machine_billing_credit", "foreignkey")
|
||||
op.drop_constraint('machine_billing_credit_uk', 'machine_billing_credit', type_='unique')
|
||||
op.create_index('machine', 'machine_billing_credit', ['machine'], unique=True)
|
||||
op.create_foreign_key("machine_billing_credit_ibfk_1", "machine_billing_credit", "machine", ["machine"], ["id"], onupdate='cascade', ondelete='cascade')
|
||||
op.drop_column('machine_billing_credit', 'game_id')
|
||||
op.drop_table('machine_billing_playcount')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,54 +1,70 @@
|
||||
import logging, coloredlogs
|
||||
from typing import Optional
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy import create_engine
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import logging
|
||||
import os
|
||||
import secrets, string
|
||||
import bcrypt
|
||||
import secrets
|
||||
import string
|
||||
import warnings
|
||||
from hashlib import sha256
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import ClassVar, Optional
|
||||
|
||||
import alembic.config
|
||||
import glob
|
||||
import bcrypt
|
||||
import coloredlogs
|
||||
import pymysql.err
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncEngine,
|
||||
AsyncSession,
|
||||
create_async_engine,
|
||||
)
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data.schema import *
|
||||
from core.utils import Utils
|
||||
from core.data.schema import ArcadeData, BaseData, CardData, UserData, metadata
|
||||
from core.utils import MISSING, Utils
|
||||
|
||||
|
||||
class Data:
|
||||
engine = None
|
||||
session = None
|
||||
user = None
|
||||
arcade = None
|
||||
card = None
|
||||
base = None
|
||||
engine: ClassVar[AsyncEngine] = MISSING
|
||||
session: ClassVar["sessionmaker[AsyncSession]"] = MISSING
|
||||
user: ClassVar[UserData] = MISSING
|
||||
arcade: ClassVar[ArcadeData] = MISSING
|
||||
card: ClassVar[CardData] = MISSING
|
||||
base: ClassVar[BaseData] = MISSING
|
||||
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
self.config = cfg
|
||||
|
||||
if self.config.database.sha2_password:
|
||||
passwd = sha256(self.config.database.password.encode()).digest()
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{passwd.hex()}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
|
||||
else:
|
||||
self.__url = f"{self.config.database.protocol}://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}?charset=utf8mb4"
|
||||
self.__url = f"{self.config.database.protocol}+aiomysql://{self.config.database.username}:{self.config.database.password}@{self.config.database.host}:{self.config.database.port}/{self.config.database.name}"
|
||||
|
||||
if Data.engine is None:
|
||||
Data.engine = create_engine(self.__url, pool_recycle=3600)
|
||||
if Data.engine is MISSING:
|
||||
Data.engine = create_async_engine(
|
||||
self.__url,
|
||||
pool_recycle=3600,
|
||||
isolation_level="AUTOCOMMIT",
|
||||
connect_args={
|
||||
"charset": "utf8mb4",
|
||||
"ssl": self.config.database.create_ssl_context_if_enabled(),
|
||||
},
|
||||
)
|
||||
self.__engine = Data.engine
|
||||
|
||||
if Data.session is None:
|
||||
s = sessionmaker(bind=Data.engine, autoflush=True, autocommit=True)
|
||||
Data.session = scoped_session(s)
|
||||
if Data.session is MISSING:
|
||||
Data.session = sessionmaker(Data.engine, expire_on_commit=False, class_=AsyncSession)
|
||||
|
||||
if Data.user is None:
|
||||
if Data.user is MISSING:
|
||||
Data.user = UserData(self.config, self.session)
|
||||
|
||||
if Data.arcade is None:
|
||||
if Data.arcade is MISSING:
|
||||
Data.arcade = ArcadeData(self.config, self.session)
|
||||
|
||||
if Data.card is None:
|
||||
if Data.card is MISSING:
|
||||
Data.card = CardData(self.config, self.session)
|
||||
|
||||
if Data.base is None:
|
||||
if Data.base is MISSING:
|
||||
Data.base = BaseData(self.config, self.session)
|
||||
|
||||
self.logger = logging.getLogger("database")
|
||||
@@ -94,40 +110,73 @@ class Data:
|
||||
alembic.config.main(argv=alembicArgs)
|
||||
os.chdir(old_dir)
|
||||
|
||||
def create_database(self):
|
||||
async def create_database(self):
|
||||
self.logger.info("Creating databases...")
|
||||
metadata.create_all(
|
||||
self.engine,
|
||||
checkfirst=True,
|
||||
)
|
||||
|
||||
for _, mod in Utils.get_all_titles().items():
|
||||
if hasattr(mod, "database"):
|
||||
mod.database(self.config)
|
||||
metadata.create_all(
|
||||
self.engine,
|
||||
checkfirst=True,
|
||||
)
|
||||
with warnings.catch_warnings():
|
||||
# SQLAlchemy will generate a nice primary key constraint name, but in
|
||||
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
|
||||
# custom primary key name is generated, a warning is emitted from pymysql,
|
||||
# which we don't care about. Other warnings may be helpful though, don't
|
||||
# suppress everything.
|
||||
warnings.filterwarnings(
|
||||
action="ignore",
|
||||
message=r"Name '(.+)' ignored for PRIMARY key\.",
|
||||
category=pymysql.err.Warning,
|
||||
)
|
||||
|
||||
# Stamp the end revision as if alembic had created it, so it can take off after this.
|
||||
self.__alembic_cmd(
|
||||
"stamp",
|
||||
"head",
|
||||
)
|
||||
async with self.engine.begin() as conn:
|
||||
await conn.run_sync(metadata.create_all, checkfirst=True)
|
||||
|
||||
def schema_upgrade(self, ver: str = None):
|
||||
self.__alembic_cmd(
|
||||
"upgrade",
|
||||
"head" if not ver else ver,
|
||||
)
|
||||
for _, mod in Utils.get_all_titles().items():
|
||||
if hasattr(mod, "database"):
|
||||
mod.database(self.config)
|
||||
|
||||
await conn.run_sync(metadata.create_all, checkfirst=True)
|
||||
|
||||
# Stamp the end revision as if alembic had created it, so it can take off after this.
|
||||
self.__alembic_cmd(
|
||||
"stamp",
|
||||
"head",
|
||||
)
|
||||
|
||||
def schema_upgrade(self, ver: Optional[str] = None):
|
||||
with warnings.catch_warnings():
|
||||
# SQLAlchemy will generate a nice primary key constraint name, but in
|
||||
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
|
||||
# custom primary key name is generated, a warning is emitted from pymysql,
|
||||
# which we don't care about. Other warnings may be helpful though, don't
|
||||
# suppress everything.
|
||||
warnings.filterwarnings(
|
||||
action="ignore",
|
||||
message=r"Name '(.+)' ignored for PRIMARY key\.",
|
||||
category=pymysql.err.Warning,
|
||||
)
|
||||
|
||||
self.__alembic_cmd(
|
||||
"upgrade",
|
||||
"head" if not ver else ver,
|
||||
)
|
||||
|
||||
def schema_downgrade(self, ver: str):
|
||||
self.__alembic_cmd(
|
||||
"downgrade",
|
||||
ver,
|
||||
)
|
||||
with warnings.catch_warnings():
|
||||
# SQLAlchemy will generate a nice primary key constraint name, but in
|
||||
# MySQL/MariaDB the constraint name is always PRIMARY. Every time a
|
||||
# custom primary key name is generated, a warning is emitted from pymysql,
|
||||
# which we don't care about. Other warnings may be helpful though, don't
|
||||
# suppress everything.
|
||||
warnings.filterwarnings(
|
||||
action="ignore",
|
||||
message=r"Name '(.+)' ignored for PRIMARY key\.",
|
||||
category=pymysql.err.Warning,
|
||||
)
|
||||
|
||||
async def create_owner(self, email: Optional[str] = None, code: Optional[str] = "00000000000000000000") -> None:
|
||||
self.__alembic_cmd(
|
||||
"downgrade",
|
||||
ver,
|
||||
)
|
||||
|
||||
async def create_owner(self, email: Optional[str] = None, code: str = "00000000000000000000") -> None:
|
||||
pw = "".join(
|
||||
secrets.choice(string.ascii_letters + string.digits) for i in range(20)
|
||||
)
|
||||
@@ -150,12 +199,12 @@ class Data:
|
||||
async def migrate(self) -> None:
|
||||
exist = await self.base.execute("SELECT * FROM alembic_version")
|
||||
if exist is not None:
|
||||
self.logger.warn("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
self.logger.warning("No need to migrate as you have already migrated to alembic. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
return
|
||||
|
||||
self.logger.info("Upgrading to latest with legacy system")
|
||||
if not await self.legacy_upgrade():
|
||||
self.logger.warn("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
self.logger.warning("No need to migrate as you have already deleted the old schema_versions system. If you are trying to upgrade the schema, use `upgrade` instead!")
|
||||
return
|
||||
self.logger.info("Done")
|
||||
|
||||
@@ -174,7 +223,7 @@ class Data:
|
||||
async def legacy_upgrade(self) -> bool:
|
||||
vers = await self.base.execute("SELECT * FROM schema_versions")
|
||||
if vers is None:
|
||||
self.logger.warn("Cannot legacy upgrade, schema_versions table unavailable!")
|
||||
self.logger.warning("Cannot legacy upgrade, schema_versions table unavailable!")
|
||||
return False
|
||||
|
||||
db_vers = {}
|
||||
@@ -203,7 +252,7 @@ class Data:
|
||||
game_codes = getattr(mod, "game_codes", [])
|
||||
for game in game_codes:
|
||||
if game not in db_vers:
|
||||
self.logger.warn(f"{game} does not have an antry in schema_versions, skipping")
|
||||
self.logger.warning(f"{game} does not have an antry in schema_versions, skipping")
|
||||
continue
|
||||
|
||||
now_ver = int(db_vers[game]) + 1
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
from typing import Optional, Dict, List
|
||||
from sqlalchemy import Table, Column, and_, or_
|
||||
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
|
||||
from sqlalchemy.types import Integer, String, Boolean, JSON
|
||||
from sqlalchemy.sql import func, select
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, Table, and_, or_, UniqueConstraint
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
import re
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
|
||||
from sqlalchemy.types import JSON, Boolean, Integer, String, BIGINT, INTEGER, CHAR, FLOAT
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
from core.const import *
|
||||
|
||||
arcade = Table(
|
||||
arcade: Table = Table(
|
||||
"arcade",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -26,7 +27,7 @@ arcade = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
machine = Table(
|
||||
machine: Table = Table(
|
||||
"machine",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -47,7 +48,7 @@ machine = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
arcade_owner = Table(
|
||||
arcade_owner: Table = Table(
|
||||
"arcade_owner",
|
||||
metadata,
|
||||
Column(
|
||||
@@ -67,9 +68,79 @@ arcade_owner = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
billing_charge: Table = Table(
|
||||
"machine_billing_charge",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("game_ver", FLOAT, nullable=False),
|
||||
Column("play_count", INTEGER, nullable=False),
|
||||
Column("play_limit", INTEGER, nullable=False),
|
||||
Column("product_code", INTEGER, nullable=False),
|
||||
Column("product_count", INTEGER, nullable=False),
|
||||
Column("func_type", INTEGER, nullable=False),
|
||||
Column("player_number", INTEGER, nullable=False),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
# These settings are only really of interest
|
||||
# for real cabinets operating as pay-to-play
|
||||
billing_credit: Table = Table(
|
||||
"machine_billing_credit",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("chute_type", INTEGER, nullable=False),
|
||||
Column("service_type", INTEGER, nullable=False),
|
||||
Column("operation_type", INTEGER, nullable=False),
|
||||
Column("coin_rate0", INTEGER, nullable=False),
|
||||
Column("coin_rate1", INTEGER, nullable=False),
|
||||
Column("coin_bonus", INTEGER, nullable=False),
|
||||
Column("credit_rate", INTEGER, nullable=False),
|
||||
Column("coin_count_slot0", INTEGER, nullable=False),
|
||||
Column("coin_count_slot1", INTEGER, nullable=False),
|
||||
Column("coin_count_slot2", INTEGER, nullable=False),
|
||||
Column("coin_count_slot3", INTEGER, nullable=False),
|
||||
Column("coin_count_slot4", INTEGER, nullable=False),
|
||||
Column("coin_count_slot5", INTEGER, nullable=False),
|
||||
Column("coin_count_slot6", INTEGER, nullable=False),
|
||||
Column("coin_count_slot7", INTEGER, nullable=False),
|
||||
UniqueConstraint("machine", "game_id", name="machine_billing_credit_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
billing_playct: Table = Table(
|
||||
"machine_billing_playcount",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False, unique=True
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("year", INTEGER, nullable=False),
|
||||
Column("month", INTEGER, nullable=False),
|
||||
Column("playct", BIGINT, nullable=False, server_default="1"),
|
||||
UniqueConstraint("machine", "game_id", "year", "month", name="machine_billing_playcount_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class ArcadeData(BaseData):
|
||||
async def get_machine(self, serial: str = None, id: int = None) -> Optional[Row]:
|
||||
async def get_machine(self, serial: Optional[str] = None, id: Optional[int] = None) -> Optional[Row]:
|
||||
if serial is not None:
|
||||
serial = serial.replace("-", "")
|
||||
if len(serial) == 11:
|
||||
@@ -98,8 +169,8 @@ class ArcadeData(BaseData):
|
||||
self,
|
||||
arcade_id: int,
|
||||
serial: str = "",
|
||||
board: str = None,
|
||||
game: str = None,
|
||||
board: Optional[str] = None,
|
||||
game: Optional[str] = None,
|
||||
is_cab: bool = False,
|
||||
) -> Optional[int]:
|
||||
if not arcade_id:
|
||||
@@ -115,6 +186,15 @@ class ArcadeData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def set_machine_arcade(self, machine_id: int, new_arcade: int) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(arcade = new_arcade)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} arcade to {new_arcade}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_serial(self, machine_id: int, serial: str) -> None:
|
||||
result = await self.execute(
|
||||
machine.update(machine.c.id == machine_id).values(keychip=serial)
|
||||
@@ -134,6 +214,60 @@ class ArcadeData(BaseData):
|
||||
f"Failed to update board id for machine {machine_id} -> {boardid}"
|
||||
)
|
||||
|
||||
async def set_machine_game(self, machine_id: int, new_game: Optional[str]) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(game = new_game)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} game to {new_game}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_country(self, machine_id: int, new_country: Optional[str]) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(country = new_country)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} country to {new_country}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_timezone(self, machine_id: int, new_timezone: Optional[str]) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(timezone = new_timezone)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} timezone to {new_timezone}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_real_cabinet(self, machine_id: int, is_real: bool = False) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(is_cab = is_real)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} is_cab to {is_real}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_can_ota(self, machine_id: int, can_ota: bool = False) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(ota_enable = can_ota)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} ota_enable to {can_ota}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_machine_memo(self, machine_id: int, new_memo: Optional[str]) -> bool:
|
||||
sql = machine.update(machine.c.id == machine_id).values(memo = new_memo)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update machine {machine_id} memo")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def get_arcade(self, id: int) -> Optional[Row]:
|
||||
sql = arcade.select(arcade.c.id == id)
|
||||
result = await self.execute(sql)
|
||||
@@ -150,8 +284,8 @@ class ArcadeData(BaseData):
|
||||
|
||||
async def create_arcade(
|
||||
self,
|
||||
name: str = None,
|
||||
nickname: str = None,
|
||||
name: Optional[str] = None,
|
||||
nickname: Optional[str] = None,
|
||||
country: str = "JPN",
|
||||
country_id: int = 1,
|
||||
state: str = "",
|
||||
@@ -187,8 +321,11 @@ class ArcadeData(BaseData):
|
||||
sql = select(arcade_owner.c.permissions).where(and_(arcade_owner.c.user == user_id, arcade_owner.c.arcade == arcade_id))
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
return result.fetchone()
|
||||
return None
|
||||
row = result.fetchone()
|
||||
if row:
|
||||
return row['permissions']
|
||||
return None
|
||||
|
||||
async def get_arcade_owners(self, arcade_id: int) -> Optional[Row]:
|
||||
sql = select(arcade_owner).where(arcade_owner.c.arcade == arcade_id)
|
||||
@@ -198,14 +335,25 @@ class ArcadeData(BaseData):
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def add_arcade_owner(self, arcade_id: int, user_id: int) -> None:
|
||||
sql = insert(arcade_owner).values(arcade=arcade_id, user=user_id)
|
||||
async def add_arcade_owner(self, arcade_id: int, user_id: int, permissions: int = 1) -> Optional[int]:
|
||||
sql = insert(arcade_owner).values(arcade=arcade_id, user=user_id, permissions=permissions)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def set_arcade_owner_permissions(self, arcade_id: int, user_id: int, new_permissions: int = 1) -> bool:
|
||||
sql = arcade_owner.update(
|
||||
and_(arcade_owner.c.arcade == arcade_id, arcade_owner.c.user == user_id)
|
||||
).values(permissions = new_permissions)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update arcade owner permissions to {new_permissions} for user {user_id} arcade {arcade_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def get_arcade_by_name(self, name: str) -> Optional[List[Row]]:
|
||||
sql = arcade.select(or_(arcade.c.name.like(f"%{name}%"), arcade.c.nickname.like(f"%{name}%")))
|
||||
result = await self.execute(sql)
|
||||
@@ -219,20 +367,176 @@ class ArcadeData(BaseData):
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
|
||||
async def set_arcade_name_nickname(self, arcade_id: int, new_name: Optional[str], new_nickname: Optional[str]) -> bool:
|
||||
sql = arcade.update(arcade.c.id == arcade_id).values(name = new_name, nickname = new_nickname)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update arcade {arcade_id} name to {new_name}/{new_nickname}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_arcade_region_info(self, arcade_id: int, new_country: Optional[str], new_state: Optional[str], new_city: Optional[str], new_region_id: Optional[int], new_country_id: Optional[int]) -> bool:
|
||||
sql = arcade.update(arcade.c.id == arcade_id).values(
|
||||
country = new_country,
|
||||
state = new_state,
|
||||
city = new_city,
|
||||
region_id = new_region_id,
|
||||
country_id = new_country_id
|
||||
)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update arcade {arcade_id} regional info to {new_country}/{new_state}/{new_city}/{new_region_id}/{new_country_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_arcade_timezone(self, arcade_id: int, new_timezone: Optional[str]) -> bool:
|
||||
sql = arcade.update(arcade.c.id == arcade_id).values(timezone = new_timezone)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update arcade {arcade_id} timezone to {new_timezone}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def set_arcade_vpn_ip(self, arcade_id: int, new_ip: Optional[str]) -> bool:
|
||||
sql = arcade.update(arcade.c.id == arcade_id).values(ip = new_ip)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to update arcade {arcade_id} VPN address to {new_ip}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def get_num_generated_keychips(self) -> Optional[int]:
|
||||
result = await self.execute(select(func.count("serial LIKE 'A69A%'")).select_from(machine))
|
||||
if result:
|
||||
return result.fetchone()['count_1']
|
||||
self.logger.error("Failed to count machine serials that start with A69A!")
|
||||
|
||||
async def billing_add_charge(self, machine_id: int, game_id: str, game_ver: float, playcount: int, playlimit, product_code: int, product_count: int, func_type: int, player_num: int) -> Optional[int]:
|
||||
result = await self.execute(billing_charge.insert().values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
game_ver=game_ver,
|
||||
play_count=playcount,
|
||||
play_limit=playlimit,
|
||||
product_code=product_code,
|
||||
product_count=product_count,
|
||||
func_type=func_type,
|
||||
player_number=player_num
|
||||
))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to add billing charge for machine {machine_id}!")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def billing_get_last_charge(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_charge.select(
|
||||
and_(billing_charge.c.machine == machine_id, billing_charge.c.game_id == game_id)
|
||||
).order_by(billing_charge.c.id.desc()).limit(3))
|
||||
if result:
|
||||
return result.fetchone()
|
||||
|
||||
async def billing_set_credit(self, machine_id: int, game_id: str, chute_type: int, service_type: int, op_mode: int, coin_rate0: int, coin_rate1: int,
|
||||
bonus_adder: int, coin_to_credit_rate: int, coin_count_slot0: int, coin_count_slot1: int, coin_count_slot2: int, coin_count_slot3: int,
|
||||
coin_count_slot4: int, coin_count_slot5: int, coin_count_slot6: int, coin_count_slot7: int) -> Optional[int]:
|
||||
|
||||
sql = insert(billing_credit).values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
chute_type=chute_type,
|
||||
service_type=service_type,
|
||||
operation_type=op_mode,
|
||||
coin_rate0=coin_rate0,
|
||||
coin_rate1=coin_rate1,
|
||||
coin_bonus=bonus_adder,
|
||||
credit_rate=coin_to_credit_rate,
|
||||
coin_count_slot0=coin_count_slot0,
|
||||
coin_count_slot1=coin_count_slot1,
|
||||
coin_count_slot2=coin_count_slot2,
|
||||
coin_count_slot3=coin_count_slot3,
|
||||
coin_count_slot4=coin_count_slot4,
|
||||
coin_count_slot5=coin_count_slot5,
|
||||
coin_count_slot6=coin_count_slot6,
|
||||
coin_count_slot7=coin_count_slot7,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
chute_type=chute_type,
|
||||
service_type=service_type,
|
||||
operation_type=op_mode,
|
||||
coin_rate0=coin_rate0,
|
||||
coin_rate1=coin_rate1,
|
||||
coin_bonus=bonus_adder,
|
||||
credit_rate=coin_to_credit_rate,
|
||||
coin_count_slot0=coin_count_slot0,
|
||||
coin_count_slot1=coin_count_slot1,
|
||||
coin_count_slot2=coin_count_slot2,
|
||||
coin_count_slot3=coin_count_slot3,
|
||||
coin_count_slot4=coin_count_slot4,
|
||||
coin_count_slot5=coin_count_slot5,
|
||||
coin_count_slot6=coin_count_slot6,
|
||||
coin_count_slot7=coin_count_slot7,
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set billing credit settings for machine {machine_id}!")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def billing_get_credit(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_credit.select(
|
||||
and_(billing_credit.c.machine == machine_id, billing_credit.c.game_id == game_id)
|
||||
))
|
||||
if result:
|
||||
return result.fetchone()
|
||||
|
||||
async def billing_add_playcount(self, machine_id: int, game_id: str, playct: int = 1) -> None:
|
||||
now = datetime.now()
|
||||
sql = insert(billing_playct).values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
year=now.year,
|
||||
month=now.month,
|
||||
playct=playct
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(playct=billing_playct.c.playct + playct)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to add playcount for machine {machine_id} running {game_id}")
|
||||
|
||||
async def billing_get_playcount_3mo(self, machine_id: int, game_id: str) -> Optional[List[Row]]:
|
||||
result = await self.execute(billing_playct.select(and_(
|
||||
billing_playct.c.machine == machine_id,
|
||||
billing_playct.c.game_id == game_id
|
||||
)).order_by(billing_playct.c.year.desc(), billing_playct.c.month.desc()).limit(3))
|
||||
|
||||
if result is not None:
|
||||
return result.fetchall()
|
||||
|
||||
async def billing_get_last_playcount(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_playct.select(and_(
|
||||
billing_playct.c.machine == machine_id,
|
||||
billing_playct.c.game_id == game_id
|
||||
)).order_by(billing_playct.c.year.desc(), billing_playct.c.month.desc()).limit(1))
|
||||
|
||||
if result is not None:
|
||||
return result.fetchone()
|
||||
|
||||
def format_serial(
|
||||
self, platform_code: str, platform_rev: int, serial_letter: str, serial_num: int, append: int, dash: bool = False
|
||||
) -> str:
|
||||
return f"{platform_code}{'-' if dash else ''}{platform_rev:02d}{serial_letter}{serial_num:04d}{append:04d}"
|
||||
|
||||
def validate_keychip_format(self, serial: str) -> bool:
|
||||
# For the 2nd letter, E and X are the only "real" values that have been observed
|
||||
# For the 2nd letter, E and X are the only "real" values that have been observed (A is used for generated keychips)
|
||||
if re.fullmatch(r"^A[0-9]{2}[A-Z][-]?[0-9]{2}[A-HJ-NP-Z][0-9]{4}([0-9]{4})?$", serial) is None:
|
||||
return False
|
||||
|
||||
@@ -252,7 +556,6 @@ class ArcadeData(BaseData):
|
||||
month = ((month - 1) + 9) % 12 # Offset so April=0
|
||||
return f"{year:02}{month // 6:01}{month % 6 + 1:01}"
|
||||
|
||||
|
||||
def parse_keychip_suffix(self, suffix: str) -> tuple[int, int]:
|
||||
year = int(suffix[0:2])
|
||||
half = int(suffix[2])
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from random import randrange
|
||||
from typing import Any, Optional, Dict, List
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import Column, MetaData, Table
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.engine.cursor import CursorResult
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.sql import text, func, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy import MetaData, Table, Column
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, JSON, INTEGER, TEXT
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql import func, text
|
||||
from sqlalchemy.types import INTEGER, JSON, TEXT, TIMESTAMP, Integer, String
|
||||
|
||||
from core.config import CoreConfig
|
||||
|
||||
metadata = MetaData()
|
||||
|
||||
event_log = Table(
|
||||
event_log: Table = Table(
|
||||
"event_log",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -37,7 +39,7 @@ event_log = Table(
|
||||
|
||||
|
||||
class BaseData:
|
||||
def __init__(self, cfg: CoreConfig, conn: Connection) -> None:
|
||||
def __init__(self, cfg: CoreConfig, conn: "sessionmaker[AsyncSession]") -> None:
|
||||
self.config = cfg
|
||||
self.conn = conn
|
||||
self.logger = logging.getLogger("database")
|
||||
@@ -45,21 +47,10 @@ class BaseData:
|
||||
async def execute(self, sql: str, opts: Dict[str, Any] = {}) -> Optional[CursorResult]:
|
||||
res = None
|
||||
|
||||
try:
|
||||
self.logger.debug(f"SQL Execute: {''.join(str(sql).splitlines())}")
|
||||
res = self.conn.execute(text(sql), opts)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"SQLAlchemy error {e}")
|
||||
return None
|
||||
|
||||
except UnicodeEncodeError as e:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
||||
except Exception:
|
||||
async with self.conn() as session:
|
||||
try:
|
||||
res = self.conn.execute(sql, opts)
|
||||
self.logger.debug(f"SQL Execute: {''.join(str(sql).splitlines())}")
|
||||
res = await session.execute(text(sql), opts)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"SQLAlchemy error {e}")
|
||||
@@ -70,8 +61,20 @@ class BaseData:
|
||||
return None
|
||||
|
||||
except Exception:
|
||||
self.logger.error(f"Unknown error")
|
||||
raise
|
||||
try:
|
||||
res = await session.execute(sql, opts)
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
self.logger.error(f"SQLAlchemy error {e}")
|
||||
return None
|
||||
|
||||
except UnicodeEncodeError as e:
|
||||
self.logger.error(f"UnicodeEncodeError error {e}")
|
||||
return None
|
||||
|
||||
except Exception:
|
||||
self.logger.error(f"Unknown error")
|
||||
raise
|
||||
|
||||
return res
|
||||
|
||||
@@ -83,7 +86,7 @@ class BaseData:
|
||||
|
||||
async def log_event(
|
||||
self, system: str, type: str, severity: int, message: str, details: Dict = {}, user: int = None,
|
||||
arcade: int = None, machine: int = None, ip: str = None, game: str = None, version: str = None
|
||||
arcade: int = None, machine: int = None, ip: Optional[str] = None, game: Optional[str] = None, version: Optional[str] = None
|
||||
) -> Optional[int]:
|
||||
sql = event_log.insert().values(
|
||||
system=system,
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint
|
||||
from sqlalchemy.types import Integer, String, Boolean, TIMESTAMP, BIGINT, VARCHAR
|
||||
from sqlalchemy.sql.schema import ForeignKey
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from sqlalchemy import Column, Table, UniqueConstraint
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.sql.schema import ForeignKey
|
||||
from sqlalchemy.types import BIGINT, TIMESTAMP, VARCHAR, Boolean, Integer, String
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
|
||||
aime_card = Table(
|
||||
aime_card: Table = Table(
|
||||
"aime_card",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -122,7 +123,7 @@ class CardData(BaseData):
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warn(f"Failed to update last login time for {access_code}")
|
||||
self.logger.warning(f"Failed to update last login time for {access_code}")
|
||||
|
||||
async def get_card_by_idm(self, idm: str) -> Optional[Row]:
|
||||
result = await self.execute(aime_card.select(aime_card.c.idm == idm))
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from typing import Optional, List
|
||||
from sqlalchemy import Table, Column
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
from typing import List, Optional
|
||||
|
||||
import bcrypt
|
||||
from sqlalchemy import Column, Table
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.types import TIMESTAMP, Integer, String
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
|
||||
aime_user = Table(
|
||||
aime_user: Table = Table(
|
||||
"aime_user",
|
||||
metadata,
|
||||
Column("id", Integer, nullable=False, primary_key=True, autoincrement=True),
|
||||
@@ -26,10 +26,10 @@ aime_user = Table(
|
||||
class UserData(BaseData):
|
||||
async def create_user(
|
||||
self,
|
||||
id: int = None,
|
||||
username: str = None,
|
||||
email: str = None,
|
||||
password: str = None,
|
||||
id: Optional[int] = None,
|
||||
username: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
permission: int = 1,
|
||||
) -> Optional[int]:
|
||||
if id is None:
|
||||
|
||||
258
core/frontend.py
@@ -12,7 +12,6 @@ import jwt
|
||||
import yaml
|
||||
import secrets
|
||||
import string
|
||||
import random
|
||||
from base64 import b64decode
|
||||
from enum import Enum
|
||||
from datetime import datetime, timezone
|
||||
@@ -20,6 +19,11 @@ from os import path, environ, mkdir, W_OK, access
|
||||
|
||||
from core import CoreConfig, Utils
|
||||
from core.data import Data
|
||||
from core.const import AllnetCountryCode
|
||||
|
||||
# A-HJ-NP-Z
|
||||
SERIAL_LETTERS = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
|
||||
ARTEMIS_SERIAL_PREFIX = "A69A"
|
||||
|
||||
class PermissionOffset(Enum):
|
||||
USER = 0 # Regular user
|
||||
@@ -33,8 +37,7 @@ class ShopPermissionOffset(Enum):
|
||||
VIEW = 0 # View info and cabs
|
||||
BOOKKEEP = 1 # View bookeeping info
|
||||
EDITOR = 2 # Can edit name, settings
|
||||
REGISTRAR = 3 # Can add cabs
|
||||
# 4 - 6 reserved for future use
|
||||
# 3 - 6 reserved for future use
|
||||
OWNER = 7 # Can do anything
|
||||
|
||||
class ShopOwner():
|
||||
@@ -145,10 +148,13 @@ class FrontendServlet():
|
||||
Mount("/shop", routes=[
|
||||
Route("/", self.arcade.render_GET, methods=['GET']),
|
||||
Route("/{shop_id:int}", self.arcade.render_GET, methods=['GET']),
|
||||
Route("/{shop_id:int}/info.update", self.arcade.update_shop, methods=['POST']),
|
||||
]),
|
||||
Mount("/cab", routes=[
|
||||
Route("/", self.machine.render_GET, methods=['GET']),
|
||||
Route("/{machine_id:int}", self.machine.render_GET, methods=['GET']),
|
||||
Route("/{machine_id:int}/info.update", self.machine.update_cab, methods=['POST']),
|
||||
Route("/{machine_id:int}/reassign", self.machine.reassign_cab, methods=['POST']),
|
||||
]),
|
||||
Mount("/game", routes=g_routes),
|
||||
Route("/robots.txt", self.robots)
|
||||
@@ -435,7 +441,7 @@ class FE_User(FE_Base):
|
||||
|
||||
if user_id:
|
||||
if not self.test_perm(usr_sesh.permissions, PermissionOffset.USERMOD) and user_id != usr_sesh.user_id:
|
||||
self.logger.warn(f"User {usr_sesh.user_id} does not have permission to view user {user_id}")
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to view user {user_id}")
|
||||
return RedirectResponse("/user/", 303)
|
||||
|
||||
else:
|
||||
@@ -451,6 +457,16 @@ class FE_User(FE_Base):
|
||||
card_data = []
|
||||
arcade_data = []
|
||||
|
||||
managed_arcades = await self.data.arcade.get_arcades_managed_by_user(user_id)
|
||||
if managed_arcades:
|
||||
for arcade in managed_arcades:
|
||||
ac = await self.data.arcade.get_arcade(arcade['id'])
|
||||
if ac:
|
||||
arcade_data.append({
|
||||
"id": ac['id'],
|
||||
"name": ac['name'],
|
||||
})
|
||||
|
||||
for c in cards:
|
||||
if c['is_locked']:
|
||||
status = 'Locked'
|
||||
@@ -857,14 +873,16 @@ class FE_System(FE_Base):
|
||||
name = frm.get("shopName", None)
|
||||
country = frm.get("shopCountry", "JPN")
|
||||
ip = frm.get("shopIp", None)
|
||||
owner = frm.get("shopOwner", None)
|
||||
|
||||
acid = await self.data.arcade.create_arcade(name if name else None, name if name else None, country)
|
||||
if not acid:
|
||||
return RedirectResponse("/sys/?e=99", 303)
|
||||
|
||||
if ip:
|
||||
# TODO: set IP
|
||||
pass
|
||||
await self.data.arcade.set_arcade_vpn_ip(acid, ip if ip else None)
|
||||
|
||||
if owner:
|
||||
await self.data.arcade.add_arcade_owner(acid, int(owner), 255)
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | System",
|
||||
@@ -892,10 +910,17 @@ class FE_System(FE_Base):
|
||||
generated = await self.data.arcade.get_num_generated_keychips()
|
||||
if not generated:
|
||||
generated = 0
|
||||
serial = self.data.arcade.format_serial("A69A", 1, "A", generated + 1, int(append))
|
||||
serial_dash = self.data.arcade.format_serial("A69A", 1, "A", generated + 1, int(append), True)
|
||||
|
||||
rollover = generated // 9999
|
||||
serial_num = (generated % 9999) + 1
|
||||
serial_letter = SERIAL_LETTERS[rollover]
|
||||
|
||||
serial_dash = self.data.arcade.format_serial(ARTEMIS_SERIAL_PREFIX, 1, serial_letter, serial_num, int(append), True)
|
||||
serial = serial_dash.replace("-", "")
|
||||
|
||||
cab_id = await self.data.arcade.create_machine(int(shopid), serial, None, game_code if game_code else None)
|
||||
if cab_id is None:
|
||||
return RedirectResponse("/sys/?e=4", 303)
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | System",
|
||||
@@ -938,15 +963,20 @@ class FE_Arcade(FE_Base):
|
||||
shop_id = request.path_params.get('shop_id', None)
|
||||
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh or not self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD):
|
||||
self.logger.warn(f"User {usr_sesh.user_id} does not have permission to view shops!")
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
|
||||
if not shop_id:
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | Arcade",
|
||||
sesh=vars(usr_sesh),
|
||||
), media_type="text/html; charset=utf-8")
|
||||
return Response('Not Found', status_code=404)
|
||||
|
||||
is_acmod = self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD)
|
||||
if not is_acmod:
|
||||
usr_shop_perm = await self.data.arcade.get_manager_permissions(usr_sesh.user_id, shop_id)
|
||||
if usr_shop_perm is None or usr_shop_perm == 0:
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to view shop {shop_id}!")
|
||||
return RedirectResponse("/", 303)
|
||||
else:
|
||||
usr_shop_perm = 15 # view, bookeep, edit
|
||||
|
||||
sinfo = await self.data.arcade.get_arcade(shop_id)
|
||||
if not sinfo:
|
||||
@@ -965,38 +995,204 @@ class FE_Arcade(FE_Base):
|
||||
"game": x['game'],
|
||||
})
|
||||
|
||||
managers = []
|
||||
if (usr_shop_perm & 1 << ShopPermissionOffset.OWNER.value) or is_acmod:
|
||||
mgrs = await self.data.arcade.get_arcade_owners(sinfo['id'])
|
||||
if mgrs:
|
||||
for mgr in mgrs:
|
||||
usr = await self.data.user.get_user(mgr['user'])
|
||||
managers.append({
|
||||
'user': mgr['user'],
|
||||
'name': usr['username'] if usr['username'] else 'No Name Set',
|
||||
'is_view': bool(mgr['permissions'] & 1 << ShopPermissionOffset.VIEW.value),
|
||||
'is_bookkeep': bool(mgr['permissions'] & 1 << ShopPermissionOffset.BOOKKEEP.value),
|
||||
'is_edit': bool(mgr['permissions'] & 1 << ShopPermissionOffset.EDITOR.value),
|
||||
'is_owner': bool(mgr['permissions'] & 1 << ShopPermissionOffset.OWNER.value),
|
||||
})
|
||||
|
||||
if request.query_params.get("e", None):
|
||||
err = int(request.query_params.get("e"))
|
||||
else:
|
||||
err = 0
|
||||
|
||||
if request.query_params.get("s", None):
|
||||
suc = int(request.query_params.get("s"))
|
||||
else:
|
||||
suc = 0
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | Arcade",
|
||||
sesh=vars(usr_sesh),
|
||||
arcade={
|
||||
"name": sinfo['name'],
|
||||
"id": sinfo['id'],
|
||||
"cabs": cablst
|
||||
}
|
||||
|
||||
cablst=cablst,
|
||||
arcade=sinfo._asdict(),
|
||||
can_bookkeep=bool(usr_shop_perm & 1 << ShopPermissionOffset.BOOKKEEP.value) or is_acmod,
|
||||
can_edit=bool(usr_shop_perm & 1 << ShopPermissionOffset.EDITOR.value) or is_acmod,
|
||||
is_owner=usr_shop_perm & 1 << ShopPermissionOffset.OWNER.value,
|
||||
is_acmod=is_acmod,
|
||||
managers=managers,
|
||||
error=err,
|
||||
success=suc
|
||||
), media_type="text/html; charset=utf-8")
|
||||
|
||||
async def update_shop(self, request: Request):
|
||||
shop_id = request.path_params.get('shop_id', None)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
sinfo = await self.data.arcade.get_arcade(shop_id)
|
||||
|
||||
if not shop_id or not sinfo:
|
||||
return RedirectResponse("/", 303)
|
||||
|
||||
if not self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD):
|
||||
usr_shop_perm = await self.data.arcade.get_manager_permissions(usr_sesh.user_id, sinfo['id'])
|
||||
if usr_shop_perm is None or usr_shop_perm == 0:
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to view shop {sinfo['id']}!")
|
||||
return RedirectResponse("/", 303)
|
||||
|
||||
frm = await request.form()
|
||||
new_name = frm.get('name', None)
|
||||
new_nickname = frm.get('nickname', None)
|
||||
new_country = frm.get('country', None)
|
||||
new_region1 = frm.get('region1', None)
|
||||
new_region2 = frm.get('region2', None)
|
||||
new_tz = frm.get('tz', None)
|
||||
new_ip = frm.get('ip', None)
|
||||
|
||||
try:
|
||||
AllnetCountryCode(new_country)
|
||||
except ValueError:
|
||||
new_country = 'JPN'
|
||||
|
||||
did_name = await self.data.arcade.set_arcade_name_nickname(sinfo['id'], new_name if new_name else f'Arcade{sinfo["id"]}', new_nickname if new_nickname else None)
|
||||
did_region = await self.data.arcade.set_arcade_region_info(sinfo['id'], new_country, new_region1 if new_region1 else None, new_region2 if new_region2 else None, None, None)
|
||||
did_timezone = await self.data.arcade.set_arcade_timezone(sinfo['id'], new_tz if new_tz else None)
|
||||
did_vpn = await self.data.arcade.set_arcade_vpn_ip(sinfo['id'], new_ip if new_ip else None)
|
||||
|
||||
if not did_name or not did_region or not did_timezone or not did_vpn:
|
||||
self.logger.error(f"Failed to update some shop into: Name: {did_name} Region: {did_region} TZ: {did_timezone} VPN: {did_vpn}")
|
||||
return RedirectResponse(f"/shop/{shop_id}?e=15", 303)
|
||||
|
||||
return RedirectResponse(f"/shop/{shop_id}?s=1", 303)
|
||||
|
||||
class FE_Machine(FE_Base):
|
||||
async def render_GET(self, request: Request):
|
||||
template = self.environment.get_template("core/templates/machine/index.jinja")
|
||||
cab_id = request.path_params.get('cab_id', None)
|
||||
cab_id = request.path_params.get('machine_id', None)
|
||||
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh or not self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD):
|
||||
self.logger.warn(f"User {usr_sesh.user_id} does not have permission to view shops!")
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
cab = await self.data.arcade.get_machine(id=cab_id)
|
||||
|
||||
if not cab_id:
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | Machine",
|
||||
sesh=vars(usr_sesh),
|
||||
), media_type="text/html; charset=utf-8")
|
||||
if not cab_id or not cab:
|
||||
return Response('Not Found', status_code=404)
|
||||
|
||||
shop = await self.data.arcade.get_arcade(cab['arcade'])
|
||||
|
||||
is_acmod = self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD)
|
||||
if not is_acmod:
|
||||
usr_shop_perm = await self.data.arcade.get_manager_permissions(usr_sesh.user_id, shop['id'])
|
||||
if usr_shop_perm is None or usr_shop_perm == 0:
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to view shop {shop['id']}!")
|
||||
return RedirectResponse("/", 303)
|
||||
else:
|
||||
usr_shop_perm = 15 # view, bookeep, edit
|
||||
|
||||
if request.query_params.get("e", None):
|
||||
err = int(request.query_params.get("e"))
|
||||
else:
|
||||
err = 0
|
||||
|
||||
if request.query_params.get("s", None):
|
||||
suc = int(request.query_params.get("s"))
|
||||
else:
|
||||
suc = 0
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | Machine",
|
||||
sesh=vars(usr_sesh),
|
||||
arcade={}
|
||||
arcade=shop._asdict(),
|
||||
machine=cab._asdict(),
|
||||
can_bookkeep=bool(usr_shop_perm & 1 << ShopPermissionOffset.BOOKKEEP.value) or is_acmod,
|
||||
can_edit=bool(usr_shop_perm & 1 << ShopPermissionOffset.EDITOR.value) or is_acmod,
|
||||
is_owner=usr_shop_perm & 1 << ShopPermissionOffset.OWNER.value,
|
||||
is_acmod=is_acmod,
|
||||
error=err,
|
||||
success=suc
|
||||
), media_type="text/html; charset=utf-8")
|
||||
|
||||
async def update_cab(self, request: Request):
|
||||
cab_id = request.path_params.get('machine_id', None)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
cab = await self.data.arcade.get_machine(id=cab_id)
|
||||
|
||||
if not cab_id or not cab:
|
||||
return RedirectResponse("/", 303)
|
||||
|
||||
if not self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD):
|
||||
usr_shop_perm = await self.data.arcade.get_manager_permissions(usr_sesh.user_id, cab['arcade'])
|
||||
if usr_shop_perm is None or usr_shop_perm == 0:
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to view shop {cab['arcade']}!")
|
||||
return RedirectResponse("/", 303)
|
||||
|
||||
frm = await request.form()
|
||||
new_game = frm.get('game', None)
|
||||
new_country = frm.get('country', None)
|
||||
new_tz = frm.get('tz', None)
|
||||
new_is_cab = frm.get('is_cab', False) == 'on'
|
||||
new_is_ota = frm.get('is_ota', False) == 'on'
|
||||
new_memo = frm.get('memo', None)
|
||||
|
||||
try:
|
||||
AllnetCountryCode(new_country)
|
||||
except ValueError:
|
||||
new_country = None
|
||||
|
||||
did_game = await self.data.arcade.set_machine_game(cab['id'], new_game if new_game else None)
|
||||
did_country = await self.data.arcade.set_machine_country(cab['id'], new_country if new_country else None)
|
||||
did_timezone = await self.data.arcade.set_machine_timezone(cab['id'], new_tz if new_tz else None)
|
||||
did_real_cab = await self.data.arcade.set_machine_real_cabinet(cab['id'], new_is_cab)
|
||||
did_ota = await self.data.arcade.set_machine_can_ota(cab['id'], new_is_ota)
|
||||
did_memo = await self.data.arcade.set_machine_memo(cab['id'], new_memo if new_memo else None)
|
||||
|
||||
if not did_game or not did_country or not did_timezone or not did_real_cab or not did_ota or not did_memo:
|
||||
self.logger.error(f"Failed to update some shop into: Game: {did_game} Country: {did_country} TZ: {did_timezone} Real: {did_real_cab} OTA: {did_ota} Memo: {did_memo}")
|
||||
return RedirectResponse(f"/cab/{cab['id']}?e=15", 303)
|
||||
|
||||
return RedirectResponse(f"/cab/{cab_id}?s=1", 303)
|
||||
|
||||
async def reassign_cab(self, request: Request):
|
||||
cab_id = request.path_params.get('machine_id', None)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
cab = await self.data.arcade.get_machine(id=cab_id)
|
||||
|
||||
if not cab_id or not cab:
|
||||
return RedirectResponse("/", 303)
|
||||
|
||||
frm = await request.form()
|
||||
new_shop = frm.get('new_arcade', None)
|
||||
|
||||
if not self.test_perm(usr_sesh.permissions, PermissionOffset.ACMOD):
|
||||
self.logger.warning(f"User {usr_sesh.user_id} does not have permission to reassign cab {cab['id']} to arcade !")
|
||||
return RedirectResponse(f"/cab/{cab_id}?e=11", 303)
|
||||
|
||||
new_sinfo = await self.data.arcade.get_arcade(new_shop)
|
||||
if not new_sinfo:
|
||||
return RedirectResponse(f"/cab/{cab_id}?e=14", 303)
|
||||
|
||||
if not await self.data.arcade.set_machine_arcade(cab['id'], new_sinfo['id']):
|
||||
return RedirectResponse(f"/cab/{cab_id}?e=99", 303)
|
||||
|
||||
return RedirectResponse(f"/cab/{cab_id}?s=2", 303)
|
||||
|
||||
cfg_dir = environ.get("ARTEMIS_CFG_DIR", "config")
|
||||
cfg: CoreConfig = CoreConfig()
|
||||
|
||||
@@ -64,7 +64,7 @@ class MuchaServlet:
|
||||
self.logger.debug(f"Mucha request {vars(req)}")
|
||||
|
||||
if not req.gameCd or not req.gameVer or not req.sendDate or not req.countryCd or not req.serialNum:
|
||||
self.logger.warn(f"Missing required fields - {vars(req)}")
|
||||
self.logger.warning(f"Missing required fields - {vars(req)}")
|
||||
return PlainTextResponse("RESULTS=000")
|
||||
|
||||
minfo = self.mucha_registry.get(req.gameCd, {})
|
||||
@@ -133,7 +133,7 @@ class MuchaServlet:
|
||||
self.logger.info(f"Allow unknown serial {netid} ({sn_decrypt}) to auth")
|
||||
|
||||
else:
|
||||
self.logger.warn(f'Auth failed for NetID {netid}')
|
||||
self.logger.warning(f'Auth failed for NetID {netid}')
|
||||
return PlainTextResponse("RESULTS=000")
|
||||
|
||||
self.logger.debug(f"Mucha response {vars(resp)}")
|
||||
|
||||
@@ -2,18 +2,104 @@
|
||||
{% block content %}
|
||||
{% if arcade is defined %}
|
||||
<h1>{{ arcade.name }}</h1>
|
||||
<h2>PCBs assigned to this arcade <button class="btn btn-success" id="btn_add_cab" onclick="toggle_add_cab_form()">Add</button></h2>
|
||||
<h2>Assigned Machines</h2>
|
||||
{% if success is defined and success == 3 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Cab added successfully
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if success is defined and success == 1 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Info Updated
|
||||
</div>
|
||||
{% endif %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
<ul style="font-size: 20px;">
|
||||
{% for c in arcade.cabs %}
|
||||
<li><a href="/cab/{{ c.id }}">{{ c.serial }}</a> ({{ c.game if c.game else "Any" }}) <button class="btn btn-secondary" onclick="prep_edit_form()">Edit</button> <button class="btn-danger btn">Delete</button></li>
|
||||
{% for c in cablst %}
|
||||
<li><a href="/cab/{{ c.id }}">{{ c.serial }}</a> ({{ c.game if c.game else "Any" }})</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
Info
|
||||
<form style="max-width: 50%;" action="/shop/{{ arcade.id }}/info.update" method="post" id="shop_info">
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<label for="name" class="form-label">Name</label>
|
||||
<input type="text" class="form-control" id="name" name="name" maxlength="255" value="{{ arcade.name if arcade.name is not none else "" }}">
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="nickname" class="form-label">Nickname</label>
|
||||
<input type="text" class="form-control" id="nickname" name="nickname" maxlength="255" value="{{ arcade.nickname if arcade.nickname is not none else "" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<label for="country" class="form-label">Country</label>
|
||||
<select id="country" name="country" class="form-select bg-dark text-white">
|
||||
<option value="JPN" {{ 'selected' if arcade.country == 'JPN' else ''}}>Japan</option>
|
||||
<option value="USA" {{ 'selected' if arcade.country == 'USA' else ''}}>USA</option>
|
||||
<option value="HKG" {{ 'selected' if arcade.country == 'HKG' else ''}}>Hong Kong</option>
|
||||
<option value="SGP" {{ 'selected' if arcade.country == 'SGP' else ''}}>Singapore</option>
|
||||
<option value="KOR" {{ 'selected' if arcade.country == 'KOR' else ''}}>South Korea</option>
|
||||
<option value="TWN" {{ 'selected' if arcade.country == 'TWN' else ''}}>Taiwan</option>
|
||||
<option value="CHN" {{ 'selected' if arcade.country == 'CHN' else ''}}>China</option>
|
||||
<option value="AUS" {{ 'selected' if arcade.country == 'AUS' else ''}}>Australia</option>
|
||||
<option value="IDN" {{ 'selected' if arcade.country == 'IDN' else ''}}>Indonesia</option>
|
||||
<option value="MMR" {{ 'selected' if arcade.country == 'MMR' else ''}}>Myanmar</option>
|
||||
<option value="MYS" {{ 'selected' if arcade.country == 'MYS' else ''}}>Malaysia</option>
|
||||
<option value="NZL" {{ 'selected' if arcade.country == 'NZL' else ''}}>New Zealand</option>
|
||||
<option value="PHL" {{ 'selected' if arcade.country == 'PHL' else ''}}>Philippines</option>
|
||||
<option value="THA" {{ 'selected' if arcade.country == 'THA' else ''}}>Thailand</option>
|
||||
<option value="VNM" {{ 'selected' if arcade.country == 'VNM' else ''}}>Vietnam</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="region1" class="form-label">Region 1</label>
|
||||
<input type="text" class="form-control" id="region1" name="region1" maxlength="255" value="{{ arcade.state if arcade.state is not none else "" }}">
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="region2" class="form-label">Region 2</label>
|
||||
<input type="text" class="form-control" id="region2" name="region2" maxlength="255" value="{{ arcade.city if arcade.city is not none else "" }}">
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="tz" class="form-label">Timezone</label>
|
||||
<input type="text" class="form-control" id="tz" name="tz" placeholder="+09:00" maxlength="255" value="{{ arcade.timezone if arcade.timezone is not none else "" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<label for="ip" class="form-label">VPN IP</label>
|
||||
<input type="text" class="form-control" id="ip" name="ip" maxlength="39" value="{{ arcade.ip if arcade.ip is not none else "" }}">
|
||||
</div>
|
||||
</div>
|
||||
{% if can_edit %}
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<input type="submit" value="Update" class="btn btn-primary">
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</form>
|
||||
{% if is_owner or is_acmod %}
|
||||
<br>
|
||||
<h2>Arcade Managers <button type="button" class="btn btn-success">Add</button></h2>
|
||||
<ul style="font-size: 20px;">
|
||||
{% for u in managers %}
|
||||
<li>{{ u.name }}:
|
||||
<label for="is_view_{{ u.user }}" class="form-label">View Arcade</label>
|
||||
<input type="checkbox" class="form-control-check" id="is_view_{{ u.user }}" name="is_view" {{ 'checked' if u.is_view else ''}}> |
|
||||
<label for="is_bookkeep_{{ u.user }}" class="form-label">View Bookkeeping</label>
|
||||
<input type="checkbox" class="form-control-check" id="is_bookkeep_{{ u.user }}" name="is_bookkeep" {{ 'checked' if u.is_bookkeep else ''}}> |
|
||||
<label for="is_edit_{{ u.user }}" class="form-label">Edit Arcade</label>
|
||||
<input type="checkbox" class="form-control-check" id="is_edit_{{ u.user }}" name="is_edit" {{ 'checked' if u.is_edit else ''}}> |
|
||||
<label for="is_owner_{{ u.user }}" class="form-label">Owner</label>
|
||||
<input type="checkbox" class="form-control-check" id="is_owner_{{ u.user }}" name="is_owner" {{ 'checked' if u.is_owner else ''}}> |
|
||||
<button type="submit" class="btn btn-primary">Update</button>
|
||||
<button type="button" class="btn btn-danger">Delete</button>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<h3>Arcade Not Found</h3>
|
||||
{% endif %}
|
||||
{% endblock content %}
|
||||
{% endblock content %}
|
||||
|
||||
@@ -1,4 +1,113 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<h1>Machine Management</h1>
|
||||
{% endblock content %}
|
||||
<script type="text/javascript">
|
||||
function swap_ota() {
|
||||
let is_cab = document.getElementById("is_cab").checked;
|
||||
let cbx_ota = document.getElementById("is_ota");
|
||||
|
||||
cbx_ota.disabled = !is_cab;
|
||||
|
||||
if (cbx_ota.disabled) {
|
||||
cbx_ota.checked = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<h1>Machine: {{machine.serial}}</h1>
|
||||
<h3>Arcade: <a href=/shop/{{ arcade.id }}>{{ arcade.name }}</a>{% if is_acmod %} <button class="btn btn-danger" data-bs-toggle="modal" data-bs-target="#reassign_modal">Reassign</button>{% endif %}</h3>
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
{% if success is defined and success == 1 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Info Updated
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if success is defined and success == 2 %}
|
||||
<div style="background-color: #00AA00; padding: 20px; margin-bottom: 10px; width: 15%;">
|
||||
Machine successfully reassigned
|
||||
</div>
|
||||
{% endif %}
|
||||
Info
|
||||
<form style="max-width: 50%;" action="/cab/{{ machine.id }}/info.update" method="post" id="mech_info">
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<label for="game" class="form-label">Game</label>
|
||||
<input type="text" class="form-control" id="game" name="game" placeholder="SXXX" maxlength="4" value="{{ machine.game if machine.game is not none else "" }}">
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="country" class="form-label">Country Override</label>
|
||||
<select id="country" name="country" class="form-select bg-dark text-white">
|
||||
<option value="" {{ 'selected' if machine.country is none else ''}}>Same As Arcade</option>
|
||||
<option value="JPN" {{ 'selected' if machine.country == 'JPN' else ''}}>Japan</option>
|
||||
<option value="USA" {{ 'selected' if machine.country == 'USA' else ''}}>USA</option>
|
||||
<option value="HKG" {{ 'selected' if machine.country == 'HKG' else ''}}>Hong Kong</option>
|
||||
<option value="SGP" {{ 'selected' if machine.country == 'SGP' else ''}}>Singapore</option>
|
||||
<option value="KOR" {{ 'selected' if machine.country == 'KOR' else ''}}>South Korea</option>
|
||||
<option value="TWN" {{ 'selected' if machine.country == 'TWN' else ''}}>Taiwan</option>
|
||||
<option value="CHN" {{ 'selected' if machine.country == 'CHN' else ''}}>China</option>
|
||||
<option value="AUS" {{ 'selected' if machine.country == 'AUS' else ''}}>Australia</option>
|
||||
<option value="IDN" {{ 'selected' if machine.country == 'IDN' else ''}}>Indonesia</option>
|
||||
<option value="MMR" {{ 'selected' if machine.country == 'MMR' else ''}}>Myanmar</option>
|
||||
<option value="MYS" {{ 'selected' if machine.country == 'MYS' else ''}}>Malaysia</option>
|
||||
<option value="NZL" {{ 'selected' if machine.country == 'NZL' else ''}}>New Zealand</option>
|
||||
<option value="PHL" {{ 'selected' if machine.country == 'PHL' else ''}}>Philippines</option>
|
||||
<option value="THA" {{ 'selected' if machine.country == 'THA' else ''}}>Thailand</option>
|
||||
<option value="VNM" {{ 'selected' if machine.country == 'VNM' else ''}}>Vietnam</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<label for="tz" class="form-label">Timezone Override</label>
|
||||
<input type="text" class="form-control" id="tz" name="tz" placeholder="+09:00" maxlength="6" value="{{ machine.timezone if machine.timezone is not none else "" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<input type="checkbox" class="form-control-check" id="is_cab" name="is_cab" {{ 'checked' if machine.is_cab else ''}} onchange="swap_ota()">
|
||||
<label for="is_cab" class="form-label">Real Cabinet</label>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
<input type="checkbox" class="form-control-check" id="is_ota" name="is_ota" {{ 'checked' if machine.ota_enable else ''}}>
|
||||
<label for="is_ota" class="form-label">Allow OTA updates</label>
|
||||
</div>
|
||||
<div class="col mb-3">
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<label for="memo" class="form-label">Memo</label>
|
||||
<input type="text" class="form-control" id="memo" name="memo" maxlength="255" value="{{ machine.memo if machine.memo is not none else "" }}">
|
||||
</div>
|
||||
</div>
|
||||
{% if can_edit %}
|
||||
<div class="row">
|
||||
<div class="col mb-3">
|
||||
<input type="submit" value="Update" class="btn btn-primary">
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</form>
|
||||
{% if is_acmod %}
|
||||
<form id="frm_reassign" method="post" action="/cab/{{ machine.id }}/reassign" style="outline: 0px;">
|
||||
<div class="modal" tabindex="-1" id="reassign_modal">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">Reassign {{ machine.serial }}</h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p>This will reassign this cabinet from the current arcade "{{ arcade.name }}" to the arcade who's ID you enter below.</p>
|
||||
<label for="new_arcade" class="form-label">New Arcade</label>
|
||||
<input type="text" class="form-control" id="new_arcade" name="new_arcade" value="{{ arcade.id }}">
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
<button type="submit" class="btn btn-primary">Save changes</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
{% endif %}
|
||||
<script type="text/javascript">
|
||||
swap_ota();
|
||||
</script>
|
||||
{% endblock content %}
|
||||
|
||||
@@ -137,6 +137,11 @@
|
||||
<input type="text" class="form-control" id="shopIp" name="shopIp">
|
||||
</div>
|
||||
<br />
|
||||
<div class="form-group">
|
||||
<label for="shopOwner">Owner User ID</label>
|
||||
<input type="text" class="form-control" id="shopOwner" name="shopOwner">
|
||||
</div>
|
||||
<br />
|
||||
<button type="submit" class="btn btn-primary">Add</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
@@ -159,19 +159,10 @@ Update successful
|
||||
</form>
|
||||
|
||||
{% if arcades is defined and arcades|length > 0 %}
|
||||
<h2>Arcades</h2>
|
||||
<h2>Arcades you manage</h2>
|
||||
<ul>
|
||||
{% for a in arcades %}
|
||||
<li><h3>{{ a.name }}</h3>
|
||||
{% if a.machines|length > 0 %}
|
||||
<table>
|
||||
<tr><th>Serial</th><th>Game</th><th>Last Seen</th></tr>
|
||||
{% for m in a.machines %}
|
||||
<tr><td>{{ m.serial }}</td><td>{{ m.game }}</td><td>{{ m.last_seen }}</td></tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
{% endif %}
|
||||
</li>
|
||||
<li><h3><a href=/shop/{{a.id}}>{{ a.name }}</a></h3></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
@@ -27,6 +27,10 @@ Access Denied
|
||||
Card already registered
|
||||
{% elif error == 13 %}
|
||||
AmusementIC Access Codes beginning with 5 must have IDm
|
||||
{% elif error == 14 %}
|
||||
Arcade does not exist
|
||||
{% elif error == 15 %}
|
||||
Some info failed to update
|
||||
{% else %}
|
||||
An unknown error occoured
|
||||
{% endif %}
|
||||
|
||||
@@ -86,11 +86,11 @@ class BaseServlet:
|
||||
return (False, [], [])
|
||||
|
||||
async def render_POST(self, request: Request) -> bytes:
|
||||
self.logger.warn(f"Game Does not dispatch POST")
|
||||
self.logger.warning(f"Game Does not dispatch POST")
|
||||
return Response()
|
||||
|
||||
async def render_GET(self, request: Request) -> bytes:
|
||||
self.logger.warn(f"Game Does not dispatch GET")
|
||||
self.logger.warning(f"Game Does not dispatch GET")
|
||||
return Response()
|
||||
|
||||
class TitleServlet:
|
||||
|
||||
107
core/utils.py
@@ -1,18 +1,48 @@
|
||||
from typing import Dict, Any, Optional
|
||||
from types import ModuleType
|
||||
from starlette.requests import Request
|
||||
import logging
|
||||
import importlib
|
||||
from os import walk
|
||||
import jwt
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
from datetime import datetime, timezone
|
||||
from os import walk
|
||||
from types import ModuleType
|
||||
from typing import Any, Dict, Optional
|
||||
import math
|
||||
|
||||
import jwt
|
||||
from starlette.requests import Request
|
||||
|
||||
from .config import CoreConfig
|
||||
|
||||
|
||||
class _MissingSentinel:
|
||||
__slots__: tuple[str, ...] = ()
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
return False
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return 0
|
||||
|
||||
def __repr__(self):
|
||||
return "..."
|
||||
|
||||
|
||||
MISSING: Any = _MissingSentinel()
|
||||
"""This is different from `None` in that its type is `Any`, and so it can be used
|
||||
as a placeholder for values that are *definitely* going to be initialized,
|
||||
so they don't have to be typed as `T | None`, which makes type checkers
|
||||
angry when an attribute is accessed.
|
||||
|
||||
This can also be used for when `None` has actual meaning as a value, and so a
|
||||
separate value is needed to mean "unset"."""
|
||||
|
||||
|
||||
class Utils:
|
||||
real_title_port = None
|
||||
real_title_port_ssl = None
|
||||
|
||||
@classmethod
|
||||
def get_all_titles(cls) -> Dict[str, ModuleType]:
|
||||
ret: Dict[str, Any] = {}
|
||||
@@ -36,27 +66,58 @@ class Utils:
|
||||
def get_ip_addr(cls, req: Request) -> str:
|
||||
ip = req.headers.get("x-forwarded-for", req.client.host)
|
||||
return ip.split(", ")[0]
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_title_port(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port is not None: return cls.real_title_port
|
||||
if cls.real_title_port is not None:
|
||||
return cls.real_title_port
|
||||
|
||||
cls.real_title_port = (
|
||||
cfg.server.proxy_port
|
||||
if cfg.server.is_using_proxy and cfg.server.proxy_port
|
||||
else cfg.server.port
|
||||
)
|
||||
|
||||
cls.real_title_port = cfg.server.proxy_port if cfg.server.is_using_proxy and cfg.server.proxy_port else cfg.server.port
|
||||
|
||||
return cls.real_title_port
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_title_port_ssl(cls, cfg: CoreConfig):
|
||||
if cls.real_title_port_ssl is not None: return cls.real_title_port_ssl
|
||||
if cls.real_title_port_ssl is not None:
|
||||
return cls.real_title_port_ssl
|
||||
|
||||
cls.real_title_port_ssl = (
|
||||
cfg.server.proxy_port_ssl
|
||||
if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl
|
||||
else 443
|
||||
)
|
||||
|
||||
cls.real_title_port_ssl = cfg.server.proxy_port_ssl if cfg.server.is_using_proxy and cfg.server.proxy_port_ssl else 443
|
||||
|
||||
return cls.real_title_port_ssl
|
||||
|
||||
def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str, b64_secret: str, exp_seconds: int = 86400, err_logger: str = 'aimedb') -> Optional[str]:
|
||||
def floor_to_nearest_005(version: int) -> int:
|
||||
return (version // 5) * 5
|
||||
|
||||
def create_sega_auth_key(
|
||||
aime_id: int,
|
||||
game: str,
|
||||
place_id: int,
|
||||
keychip_id: str,
|
||||
b64_secret: str,
|
||||
exp_seconds: int = 86400,
|
||||
err_logger: str = "aimedb",
|
||||
) -> Optional[str]:
|
||||
logger = logging.getLogger(err_logger)
|
||||
try:
|
||||
return jwt.encode({ "aime_id": aime_id, "game": game, "place_id": place_id, "keychip_id": keychip_id, "exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds }, b64decode(b64_secret), algorithm="HS256")
|
||||
return jwt.encode(
|
||||
{
|
||||
"aime_id": aime_id,
|
||||
"game": game,
|
||||
"place_id": place_id,
|
||||
"keychip_id": keychip_id,
|
||||
"exp": int(datetime.now(tz=timezone.utc).timestamp()) + exp_seconds,
|
||||
},
|
||||
b64decode(b64_secret),
|
||||
algorithm="HS256",
|
||||
)
|
||||
except jwt.InvalidKeyError:
|
||||
logger.error("Failed to encode Sega Auth Key because the secret is invalid!")
|
||||
return None
|
||||
@@ -64,10 +125,19 @@ def create_sega_auth_key(aime_id: int, game: str, place_id: int, keychip_id: str
|
||||
logger.error(f"Unknown exception occoured when encoding Sega Auth Key! {e}")
|
||||
return None
|
||||
|
||||
def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb') -> Optional[Dict]:
|
||||
|
||||
def decode_sega_auth_key(
|
||||
token: str, b64_secret: str, err_logger: str = "aimedb"
|
||||
) -> Optional[Dict]:
|
||||
logger = logging.getLogger(err_logger)
|
||||
try:
|
||||
return jwt.decode(token, "secret", b64decode(b64_secret), algorithms=["HS256"], options={"verify_signature": True})
|
||||
return jwt.decode(
|
||||
token,
|
||||
"secret",
|
||||
b64decode(b64_secret),
|
||||
algorithms=["HS256"],
|
||||
options={"verify_signature": True},
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.error("Sega Auth Key failed to validate due to an expired signature!")
|
||||
return None
|
||||
@@ -83,4 +153,3 @@ def decode_sega_auth_key(token: str, b64_secret: str, err_logger: str = 'aimedb'
|
||||
except Exception as e:
|
||||
logger.error(f"Unknown exception occoured when decoding Sega Auth Key! {e}")
|
||||
return None
|
||||
|
||||
30
dbutils.py
@@ -1,14 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import logging
|
||||
from os import mkdir, path, access, W_OK
|
||||
import yaml
|
||||
import asyncio
|
||||
import logging
|
||||
from os import W_OK, access, environ, mkdir, path
|
||||
|
||||
import yaml
|
||||
|
||||
from core.data import Data
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
|
||||
if __name__ == "__main__":
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(description="Database utilities")
|
||||
parser.add_argument(
|
||||
"--config", "-c", type=str, help="Config folder to use", default="config"
|
||||
@@ -25,10 +26,11 @@ if __name__ == "__main__":
|
||||
parser.add_argument("action", type=str, help="create, upgrade, downgrade, create-owner, migrate, create-revision, create-autorevision")
|
||||
args = parser.parse_args()
|
||||
|
||||
environ["ARTEMIS_CFG_DIR"] = args.config
|
||||
|
||||
cfg = CoreConfig()
|
||||
if path.exists(f"{args.config}/core.yaml"):
|
||||
cfg_dict = yaml.safe_load(open(f"{args.config}/core.yaml"))
|
||||
cfg_dict.get("database", {})["loglevel"] = "info"
|
||||
cfg.update(cfg_dict)
|
||||
|
||||
if not path.exists(cfg.server.log_dir):
|
||||
@@ -42,10 +44,8 @@ if __name__ == "__main__":
|
||||
|
||||
data = Data(cfg)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if args.action == "create":
|
||||
data.create_database()
|
||||
await data.create_database()
|
||||
|
||||
elif args.action == "upgrade":
|
||||
data.schema_upgrade(args.version)
|
||||
@@ -57,16 +57,20 @@ if __name__ == "__main__":
|
||||
data.schema_downgrade(args.version)
|
||||
|
||||
elif args.action == "create-owner":
|
||||
loop.run_until_complete(data.create_owner(args.email, args.access_code))
|
||||
await data.create_owner(args.email, args.access_code)
|
||||
|
||||
elif args.action == "migrate":
|
||||
loop.run_until_complete(data.migrate())
|
||||
await data.migrate()
|
||||
|
||||
elif args.action == "create-revision":
|
||||
loop.run_until_complete(data.create_revision(args.message))
|
||||
await data.create_revision(args.message)
|
||||
|
||||
elif args.action == "create-autorevision":
|
||||
loop.run_until_complete(data.create_revision_auto(args.message))
|
||||
await data.create_revision_auto(args.message)
|
||||
|
||||
else:
|
||||
logging.getLogger("database").info(f"Unknown action {args.action}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
- `name`: Name of the database the server should expect. Default `aime`
|
||||
- `port`: Port the database server is listening on. Default `3306`
|
||||
- `protocol`: Protocol used in the connection string, e.i `mysql` would result in `mysql://...`. Default `mysql`
|
||||
- `ssl_enabled`: Enforce SSL to be used in the connection string. Default `False`
|
||||
- `sha2_password`: Whether or not the password in the connection string should be hashed via SHA2. Default `False`
|
||||
- `loglevel`: Logging level for the database. Default `info`
|
||||
- `memcached_host`: Host of the memcached server. Default `localhost`
|
||||
@@ -40,6 +41,13 @@
|
||||
- `loglevel`: Logging level for the allnet server. Default `info`
|
||||
- `allow_online_updates`: Allow allnet to distribute online updates via DownloadOrders. This system is currently non-functional, so leave it disabled. Default `False`
|
||||
- `update_cfg_folder`: Folder where delivery INI files will be checked for. Ignored if `allow_online_updates` is `False`. Default `""`
|
||||
- `allnet_lite_keys:` Allnet Lite (Chinese Allnet) PowerOn/DownloadOrder unique keys. Default ` `
|
||||
```yaml
|
||||
allnet_lite_keys:
|
||||
"SDJJ": [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
|
||||
"SDHJ": [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
|
||||
"SDGB": [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
|
||||
```
|
||||
## Billing
|
||||
- `standalone`: Whether the billing server should launch it's own servlet on it's own port, or be part of the main servlet on the default port. Setting this to `True` requires that you have `ssl_key` and `ssl_cert` set. Default `False`
|
||||
- `loglevel`: Logging level for the billing server. Default `info`
|
||||
@@ -55,3 +63,8 @@
|
||||
- `key`: Key to encrypt/decrypt aimedb requests and responses. MUST be set or the server will not start. If set incorrectly, your server will not properly handle aimedb requests. Default `""`
|
||||
- `id_secret`: Base64-encoded JWT secret for Sega Auth IDs. Leaving this blank disables this feature. Default `""`
|
||||
- `id_lifetime_seconds`: Number of secons a JWT generated should be valid for. Default `86400` (1 day)
|
||||
## Chimedb
|
||||
- `enable`: Whether or not chimedb should run. Default `False`
|
||||
- `loglevel`: Logging level for the chimedb server. Default `info`
|
||||
- `key`: Key to hash chimedb requests and responses. MUST be set or the server will not start. If set incorrectly, your server will not properly handle chimedb requests. Default `""`
|
||||
|
||||
|
||||
@@ -26,11 +26,14 @@ python dbutils.py migrate
|
||||
- [CHUNITHM](#chunithm)
|
||||
- [crossbeats REV.](#crossbeats-rev)
|
||||
- [maimai DX](#maimai-dx)
|
||||
- [Project Diva](#hatsune-miku-project-diva)
|
||||
- [O.N.G.E.K.I.](#o-n-g-e-k-i)
|
||||
- [Card Maker](#card-maker)
|
||||
- [WACCA](#wacca)
|
||||
- [Sword Art Online Arcade](#sao)
|
||||
- [Initial D Zero](#initial-d-zero)
|
||||
- [Initial D THE ARCADE](#initial-d-the-arcade)
|
||||
- [Pokken Tournament](#pokken)
|
||||
|
||||
|
||||
# Supported Games
|
||||
@@ -57,13 +60,14 @@ Games listed below have been tested and confirmed working.
|
||||
|
||||
### SDHD/SDBT
|
||||
|
||||
| Version ID | Version Name |
|
||||
| ---------- | ------------------- |
|
||||
| 11 | CHUNITHM NEW!! |
|
||||
| 12 | CHUNITHM NEW PLUS!! |
|
||||
| 13 | CHUNITHM SUN |
|
||||
| 14 | CHUNITHM SUN PLUS |
|
||||
| 15 | CHUNITHM LUMINOUS |
|
||||
| Version ID | Version Name |
|
||||
| ---------- | ---------------------- |
|
||||
| 11 | CHUNITHM NEW!! |
|
||||
| 12 | CHUNITHM NEW PLUS!! |
|
||||
| 13 | CHUNITHM SUN |
|
||||
| 14 | CHUNITHM SUN PLUS |
|
||||
| 15 | CHUNITHM LUMINOUS |
|
||||
| 16 | CHUNITHM LUMINOUS PLUS |
|
||||
|
||||
|
||||
### Importer
|
||||
@@ -74,18 +78,21 @@ In order to use the importer locate your game installation folder and execute:
|
||||
python read.py --game SDBT --version <version ID> --binfolder /path/to/game/folder --optfolder /path/to/game/option/folder
|
||||
```
|
||||
|
||||
The importer for Chunithm will import: Events, Music, Charge Items and Avatar Accesories.
|
||||
The importer for Chunithm will import: Events, Music, Charge Items, Avatar Accesories, Nameplates, Characters, Trophies, Map Icons, and System Voices.
|
||||
|
||||
### Config
|
||||
|
||||
Config file is located in `config/chuni.yaml`.
|
||||
|
||||
| Option | Info |
|
||||
|------------------|----------------------------------------------------------------------------------------------------------------|
|
||||
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
|
||||
| `name` | If this is set, all players that are not on a team will use this one by default. |
|
||||
| `use_login_bonus`| This is used to enable the login bonuses |
|
||||
| `crypto` | This option is used to enable the TLS Encryption |
|
||||
| Option | Info |
|
||||
|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `news_msg` | If this is set, the news at the top of the main screen will be displayed (up to Chunithm Paradise Lost) |
|
||||
| `name` | If this is set, all players that are not on a team will use this one by default. |
|
||||
| `use_login_bonus` | This is used to enable the login bonuses |
|
||||
| `stock_tickets` | If this is set, specifies tickets to auto-stock at login. Format is a comma-delimited list of IDs. Defaults to None |
|
||||
| `stock_count` | Ignored if stock_tickets is not specified. Number to stock of each ticket. Defaults to 99 |
|
||||
| `forced_item_unlocks` | Frontend UI customization overrides that allow all items of given types to be used (instead of just those unlocked/purchased by the user) |
|
||||
| `crypto` | This option is used to enable the TLS Encryption |
|
||||
|
||||
|
||||
If you would like to use network encryption, add the keys to the `keys` section under `crypto`, where the key
|
||||
@@ -101,6 +108,7 @@ crypto:
|
||||
keys:
|
||||
13: ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000"]
|
||||
"13_int": ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000", 42]
|
||||
"13_chn": ["0000000000000000000000000000000000000000000000000000000000000000", "00000000000000000000000000000000", "0000000000000000", 8]
|
||||
```
|
||||
|
||||
### Database upgrade
|
||||
@@ -148,12 +156,15 @@ INSERT INTO aime.chuni_profile_team (teamName) VALUES (<teamName>);
|
||||
Team names can be regular ASCII, and they will be displayed ingame.
|
||||
|
||||
### Favorite songs
|
||||
You can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
|
||||
Favorites can be set through the Frontend Web UI for songs previously played. Alternatively, you can set the songs that will be in a user's Favorite Songs category using the following SQL entries:
|
||||
```sql
|
||||
INSERT INTO aime.chuni_item_favorite (user, version, favId, favKind) VALUES (<user>, <version>, <songId>, 1);
|
||||
```
|
||||
The songId is based on the actual ID within your version of Chunithm.
|
||||
|
||||
### Profile Customization
|
||||
The Frontend Web UI supports configuration of the userbox, avatar (NEW!! and newer), map icon (AMAZON and newer), and system voice (AMAZON and newer).
|
||||
|
||||
|
||||
## crossbeats REV.
|
||||
|
||||
@@ -185,15 +196,15 @@ Config file is located in `config/cxb.yaml`.
|
||||
### Presents
|
||||
Presents are items given to the user when they login, with a little animation (for example, the KOP song was given to the finalists as a present). To add a present, you must insert it into the `mai2_item_present` table. In that table, a NULL version means any version, a NULL user means any user, a NULL start date means always open, and a NULL end date means it never expires. Below is a list of presents one might wish to add:
|
||||
|
||||
| Game Version | Item ID | Item Kind | Item Description | Present Description |
|
||||
|--------------|---------|-----------|-------------------------------------------------|------------------------------------------------|
|
||||
| BUDDiES (21) | 409505 | Icon (3) | 旅行スタンプ(月面基地) (Travel Stamp - Moon Base) | Officially obtained on the webui with a serial |
|
||||
| | | | | number, for project raputa |
|
||||
| Game Version | Item ID | Item Kind | Item Description | Present Description |
|
||||
|--------------|---------|----------------------|--------------------------------------------|----------------------------------------------------------------------------|
|
||||
| BUDDiES (21) | 409505 | Icon (3) | 旅行スタンプ(月面基地) (Travel Stamp - Moon Base) | Officially obtained on the webui with a serial number, for project raputa |
|
||||
| PRiSM (23) | 3 | KaleidxScopeKey (15) | 紫の鍵 (Purple Key) | Officially obtained on the webui with a serial number, for KaleidxScope |
|
||||
|
||||
### Versions
|
||||
|
||||
| Game Code | Version ID | Version Name |
|
||||
| --------- | ---------- | ----------------------- |
|
||||
|-----------|------------|-------------------------|
|
||||
| SBXL | 0 | maimai |
|
||||
| SBXL | 1 | maimai PLUS |
|
||||
| SBZF | 2 | maimai GreeN |
|
||||
@@ -216,6 +227,9 @@ Presents are items given to the user when they login, with a little animation (f
|
||||
| SDEZ | 19 | maimai DX FESTiVAL |
|
||||
| SDEZ | 20 | maimai DX FESTiVAL PLUS |
|
||||
| SDEZ | 21 | maimai DX BUDDiES |
|
||||
| SDEZ | 22 | maimai DX BUDDiES PLUS |
|
||||
| SDEZ | 23 | maimai DX PRiSM |
|
||||
|
||||
|
||||
### Importer
|
||||
|
||||
@@ -290,6 +304,23 @@ Always make sure your database (tables) are up-to-date:
|
||||
python dbutils.py upgrade
|
||||
```
|
||||
|
||||
### Using NGINX
|
||||
|
||||
Diva's netcode does not send a `Host` header with it's network requests. This renders it incompatable with NGINX as configured in the example config, because nginx relies on the header to determine how to proxy the request. If you'd still like to use NGINX with diva, please see the sample config below.
|
||||
|
||||
```conf
|
||||
server {
|
||||
listen 80 default_server;
|
||||
server_name _;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://127.0.0.1:8080/;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## O.N.G.E.K.I.
|
||||
|
||||
### SDDT
|
||||
@@ -435,7 +466,10 @@ After that, on next login the present should be received (or whenever it suppose
|
||||
* FESTiVAL: Yes (added in A031)
|
||||
* FESTiVAL PLUS: Yes (added in A035)
|
||||
* BUDDiES: Yes (added in A039)
|
||||
* O.N.G.E.K.I. bright MEMORY: Yes
|
||||
* BUDDiES PLUS: Yes (added in A047)
|
||||
* O.N.G.E.K.I.:
|
||||
* bright MEMORY: Yes
|
||||
* bright MEMORY Act.3 (added in A046)
|
||||
|
||||
|
||||
### Importer
|
||||
@@ -648,21 +682,32 @@ python dbutils.py upgrade
|
||||
```
|
||||
|
||||
### Notes
|
||||
- Defrag Match will crash at loading
|
||||
- Co-Op Online is not supported
|
||||
- Shop is displayed but cannot purchase heroes or items
|
||||
- Defrag Match and online coop requires a cloud instance of Photon and a working application ID
|
||||
- Player title is currently static and cannot be changed in-game
|
||||
- QR Card Scanning currently only load a static hero
|
||||
- Ex-quests progression not supported yet
|
||||
- QR Card Scanning of existing cards requires them to be registered on the webui
|
||||
- Daily Missions not implemented
|
||||
- EX TOWER 1,2 & 3 are not yet supported
|
||||
- Daily Yui coin not yet fixed
|
||||
- Terminal functionality is almost entirely untested
|
||||
|
||||
### Credits for SAO support:
|
||||
|
||||
- Midorica - Network Support
|
||||
- Dniel97 - Helping with network base
|
||||
- tungnotpunk - Source
|
||||
- Hay1tsme - fixing many issues with the original implemetation
|
||||
|
||||
## Initial D Zero
|
||||
### SDDF
|
||||
|
||||
| Version ID | Version Name |
|
||||
| ---------- | -------------------- |
|
||||
| 0 | Initial D Zero v1.10 |
|
||||
| 1 | Initial D Zero v1.30 |
|
||||
| 2 | Initial D Zero v2.10 |
|
||||
| 3 | Initial D Zero v2.30 |
|
||||
|
||||
### Info
|
||||
|
||||
TODO, probably just leave disabled unless you're doing development things for it.
|
||||
|
||||
## Initial D THE ARCADE
|
||||
|
||||
@@ -794,3 +839,82 @@ python dbutils.py upgrade
|
||||
|
||||
A huge thanks to all people who helped shaping this project to what it is now and don't want to be mentioned here.
|
||||
|
||||
## Pokken
|
||||
|
||||
### SDAK
|
||||
|
||||
| Version ID | Version Name |
|
||||
| ---------- | ------------ |
|
||||
| 0 | Pokken |
|
||||
|
||||
### Config
|
||||
|
||||
Config file is `pokken.yaml`
|
||||
|
||||
#### server
|
||||
|
||||
| Option | Info | Default |
|
||||
| ------ | ---- | ------- |
|
||||
| `hostname` | Hostname override for allnet to tell the game where to connect. Useful for local setups that need to use a different hostname for pokken's proxy. Otherwise, it should match `server`->`hostname` in `core.yaml`. | `localhost` |
|
||||
| `enabled` | `True` if the pokken service should be enabled. `False` otherwise. | `True` |
|
||||
| `loglevel` | String indicating how verbose pokken logs should be. Acceptable values are `debug`, `info`, `warn`, and `error`. | `info` |
|
||||
| `auto_register` | For games that don't use aimedb, this controls weather connecting cards that aren't registered should automatically be registered when making a profile. Set to `False` to require cards be already registered before being usable with Pokken. | `True` |
|
||||
| `enable_matching` | If `True`, allow non-local matching. This doesn't currently work because BIWA, the matching protocol the game uses, is not understood, so this should be set to `False`. | `False` |
|
||||
| `stun_server_host` | Hostname of the STUN server the game will use for matching. | `stunserver.stunprotocol.org` (might not work anymore? recomend changing) |
|
||||
| `stun_server_port` | Port for the external STUN server. Will probably be moved to the `ports` section in the future. | `3478` |
|
||||
|
||||
#### ports
|
||||
| Option | Info | Default |
|
||||
| ------ | ---- | ------- |
|
||||
| `game` | Override for the title server port sent by allnet. Useful for local setups utalizing NGINX. | `9000` |
|
||||
| `admission` | Port for the admission server used in global matching. May be obsolited later. | `9001` |
|
||||
|
||||
### Connecting to Artemis
|
||||
|
||||
Pokken is a bit tricky to get working due to it having a hard requirement of the connection being HTTPS. This is simplified somewhat by Pokken simply not validating the certificate in any way, shape or form (it can be self-signed, expired, for a different domain, etc.) but it does have to be there. The work-around is to spin up a local NGINX (or other proxy) instance and point traffic back to artemis. See below for a sample nginx config:
|
||||
`nginx.conf`
|
||||
```conf
|
||||
# This example assumes your artemis instance is configured to listed on port 8080, and your certs exists at /path/to/cert and are called title.crt and title.key.
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name your.hostname.here;
|
||||
|
||||
ssl_certificate /path/to/cert/title.crt;
|
||||
ssl_certificate_key /path/to/cert/title.key;
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers "ALL:@SECLEVEL=0";
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
location / {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_pass http://127.0.0.1:8080/;
|
||||
}
|
||||
}
|
||||
```
|
||||
`pokken.yaml`
|
||||
```yaml
|
||||
server:
|
||||
hostname: "your.hostname.here"
|
||||
enable: True
|
||||
loglevel: "info"
|
||||
auto_register: True
|
||||
enable_matching: False
|
||||
stun_server_host: "stunserver.stunprotocol.org"
|
||||
stun_server_port: 3478
|
||||
|
||||
ports:
|
||||
game: 443
|
||||
admission: 9001
|
||||
```
|
||||
|
||||
### Info
|
||||
|
||||
The arcade release is missing a few fighters and supports compared to the switch version. It may be possible to mod these in in the future, but not much headway has been made on this as far as I know. Mercifully, the game uses the pokedex number (illustration_book_no) wherever possible when referingto both fighters and supports. Customization is entirely done on the webui. Artemis currently only supports changing your name, gender, and supporrt teams, but more is planned for the future.
|
||||
|
||||
### Credits
|
||||
Special thanks to Pocky for pointing me in the right direction in terms of getting this game to function at all, and Lightning and other pokken cab owners for doing testing and reporting bugs/issues.
|
||||
|
||||
@@ -8,6 +8,6 @@ version:
|
||||
chuni: 2.00.00
|
||||
maimai: 1.20.00
|
||||
1:
|
||||
ongeki: 1.35.03
|
||||
chuni: 2.10.00
|
||||
maimai: 1.30.00
|
||||
ongeki: 1.45.01
|
||||
chuni: 2.25.00
|
||||
maimai: 1.45.00
|
||||
|
||||
@@ -8,7 +8,22 @@ team:
|
||||
|
||||
mods:
|
||||
use_login_bonus: True
|
||||
# stock_tickets allows specified ticket IDs to be auto-stocked at login. Format is a comma-delimited string of ticket IDs
|
||||
# note: quanity is not refreshed on "continue" after set - only on subsequent login
|
||||
stock_tickets:
|
||||
stock_count: 99
|
||||
|
||||
# Allow use of all available customization items in frontend web ui
|
||||
# note: This effectively makes every available item appear to be in the user's inventory. It does _not_ override the "disableFlag" setting on individual items
|
||||
# warning: This can result in pushing a lot of data, especially the userbox items. Recommended for local network use only.
|
||||
forced_item_unlocks:
|
||||
map_icons: False
|
||||
system_voices: False
|
||||
avatar_accessories: False
|
||||
nameplates: False
|
||||
trophies: False
|
||||
character_icons: False
|
||||
|
||||
version:
|
||||
11:
|
||||
rom: 2.00.00
|
||||
@@ -25,6 +40,9 @@ version:
|
||||
15:
|
||||
rom: 2.20.00
|
||||
data: 2.20.00
|
||||
16:
|
||||
rom: 2.25.00
|
||||
data: 2.25.00
|
||||
|
||||
crypto:
|
||||
encrypted_only: False
|
||||
|
||||
@@ -27,6 +27,7 @@ database:
|
||||
name: "aime"
|
||||
port: 3306
|
||||
protocol: "mysql"
|
||||
ssl_enabled: False
|
||||
sha2_password: False
|
||||
loglevel: "info"
|
||||
enable_memcached: True
|
||||
@@ -44,6 +45,8 @@ allnet:
|
||||
loglevel: "info"
|
||||
allow_online_updates: False
|
||||
update_cfg_folder: ""
|
||||
save_billing: True
|
||||
allnet_lite_key: []
|
||||
|
||||
billing:
|
||||
standalone: True
|
||||
@@ -62,5 +65,10 @@ aimedb:
|
||||
id_secret: ""
|
||||
id_lifetime_seconds: 86400
|
||||
|
||||
chimedb:
|
||||
enable: False
|
||||
loglevel: "info"
|
||||
key: ""
|
||||
|
||||
mucha:
|
||||
loglevel: "info"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
server:
|
||||
enable: True
|
||||
enable: False
|
||||
loglevel: "info"
|
||||
hostname: ""
|
||||
news: ""
|
||||
|
||||
@@ -7,12 +7,12 @@ gachas:
|
||||
enabled_gachas:
|
||||
- 1011
|
||||
- 1012
|
||||
- 1043
|
||||
- 1067
|
||||
- 1068
|
||||
- 1069
|
||||
- 1070
|
||||
- 1071
|
||||
# - 1043
|
||||
# - 1067
|
||||
# - 1068
|
||||
# - 1069
|
||||
# - 1070
|
||||
# - 1071
|
||||
- 1072
|
||||
- 1073
|
||||
- 1074
|
||||
@@ -30,12 +30,22 @@ gachas:
|
||||
- 1156
|
||||
- 1163
|
||||
- 1164
|
||||
# 5th anniversary gacha
|
||||
- 1165
|
||||
# 2024 gacha
|
||||
- 1166
|
||||
# 6th anniversary gacha
|
||||
- 1167
|
||||
# 2025 gacha
|
||||
- 1168
|
||||
|
||||
version:
|
||||
6:
|
||||
card_maker: 1.30.01
|
||||
7:
|
||||
card_maker: 1.35.03
|
||||
8:
|
||||
card_maker: 1.45.01
|
||||
|
||||
crypto:
|
||||
encrypted_only: False
|
||||
3
index.py
@@ -6,7 +6,8 @@ import uvicorn
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from core import CoreConfig, AimedbServlette
|
||||
from core.config import CoreConfig
|
||||
from core.aimedb import AimedbServlette
|
||||
|
||||
async def launch_main(cfg: CoreConfig, ssl: bool) -> None:
|
||||
if ssl:
|
||||
|
||||
24
read.py
@@ -1,16 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
import yaml
|
||||
from os import path
|
||||
import logging
|
||||
import coloredlogs
|
||||
import asyncio
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from os import path
|
||||
from typing import List, Optional
|
||||
|
||||
import coloredlogs
|
||||
import yaml
|
||||
|
||||
from core import CoreConfig, Utils
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ class BaseReader:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(description="Import Game Information")
|
||||
parser.add_argument(
|
||||
"--game",
|
||||
@@ -140,8 +140,12 @@ if __name__ == "__main__":
|
||||
for dir, mod in titles.items():
|
||||
if args.game in mod.game_codes:
|
||||
handler = mod.reader(config, args.version, bin_arg, opt_arg, args.extra)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(handler.read())
|
||||
|
||||
await handler.read()
|
||||
|
||||
|
||||
logger.info("Done")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
38
readme.md
@@ -8,6 +8,11 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ 1.30
|
||||
+ 1.35
|
||||
|
||||
+ CHUNITHM CHINA
|
||||
+ NEW
|
||||
+ 2024 (NEW)
|
||||
+ 2024 (LUMINOUS)
|
||||
|
||||
+ CHUNITHM INTL
|
||||
+ SUPERSTAR
|
||||
+ SUPERSTAR PLUS
|
||||
@@ -15,6 +20,8 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ NEW PLUS
|
||||
+ SUN
|
||||
+ SUN PLUS
|
||||
+ LUMINOUS
|
||||
+ LUMINOUS PLUS
|
||||
|
||||
+ CHUNITHM JP
|
||||
+ AIR
|
||||
@@ -30,6 +37,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ SUN
|
||||
+ SUN PLUS
|
||||
+ LUMINOUS
|
||||
+ LUMINOUS PLUS
|
||||
|
||||
+ crossbeats REV.
|
||||
+ Crossbeats REV.
|
||||
@@ -42,7 +50,16 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ Initial D THE ARCADE
|
||||
+ Season 2
|
||||
|
||||
+ maimai DX
|
||||
+ maimai DX CHINA
|
||||
+ DX (Muji)
|
||||
+ 2021 (Muji)
|
||||
+ 2022 (Muji)
|
||||
+ 2023 (FESTiVAL)
|
||||
+ 2024 (BUDDiES)
|
||||
|
||||
+ maimai DX INTL
|
||||
+ DX
|
||||
+ DX Plus
|
||||
+ Splash
|
||||
+ Splash Plus
|
||||
+ UNiVERSE
|
||||
@@ -50,6 +67,21 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ FESTiVAL
|
||||
+ FESTiVAL PLUS
|
||||
+ BUDDiES
|
||||
+ BUDDiES PLUS
|
||||
+ PRiSM
|
||||
|
||||
+ maimai DX
|
||||
+ DX
|
||||
+ DX Plus
|
||||
+ Splash
|
||||
+ Splash Plus
|
||||
+ UNiVERSE
|
||||
+ UNiVERSE PLUS
|
||||
+ FESTiVAL
|
||||
+ FESTiVAL PLUS
|
||||
+ BUDDiES
|
||||
+ BUDDiES PLUS
|
||||
+ PRiSM
|
||||
|
||||
+ O.N.G.E.K.I.
|
||||
+ SUMMER
|
||||
@@ -58,6 +90,7 @@ Games listed below have been tested and confirmed working. Only game versions ol
|
||||
+ R.E.D. PLUS
|
||||
+ bright
|
||||
+ bright MEMORY
|
||||
+ bright MEMORY Act.3
|
||||
|
||||
+ POKKÉN TOURNAMENT
|
||||
+ Final Online
|
||||
@@ -83,3 +116,6 @@ Read [Games specific info](docs/game_specific_info.md) for all supported games,
|
||||
|
||||
## Production guide
|
||||
See the [production guide](docs/prod.md) for running a production server.
|
||||
|
||||
## Text User Interface
|
||||
Invoke `tui.py` (with optional `-c <command dir>` parameter) for an interactive TUI to perform management actions (add, edit or delete users, cards, arcades and machines) without needing to spin up the frontend. Requires installing asciimatics via `pip install asciimatics`
|
||||
|
||||
@@ -3,7 +3,7 @@ wheel
|
||||
pytz
|
||||
pyyaml
|
||||
sqlalchemy==1.4.46
|
||||
mysqlclient
|
||||
aiomysql
|
||||
pyopenssl
|
||||
service_identity
|
||||
PyCryptodome
|
||||
@@ -21,4 +21,4 @@ starlette
|
||||
asyncio
|
||||
uvicorn
|
||||
alembic
|
||||
python-multipart
|
||||
python-multipart
|
||||
|
||||
@@ -8,4 +8,4 @@ index = ChuniServlet
|
||||
database = ChuniData
|
||||
reader = ChuniReader
|
||||
frontend = ChuniFrontend
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW, ChuniConstants.GAME_CODE_INT]
|
||||
game_codes = [ChuniConstants.GAME_CODE, ChuniConstants.GAME_CODE_NEW, ChuniConstants.GAME_CODE_INT, ChuniConstants.GAME_CODE_CHN]
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import logging
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from time import strftime
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import pytz
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.config import ChuniConfig
|
||||
SCORE_BUFFER = {}
|
||||
from titles.chuni.const import ChuniConstants, FavoriteItemKind, ItemKind
|
||||
from titles.chuni.database import ChuniData
|
||||
|
||||
|
||||
class ChuniBase:
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
@@ -24,20 +24,35 @@ class ChuniBase:
|
||||
|
||||
async def handle_game_login_api_request(self, data: Dict) -> Dict:
|
||||
"""
|
||||
Handles the login bonus logic, required for the game because
|
||||
getUserLoginBonus gets called after getUserItem and therefore the
|
||||
Handles the login bonus and ticket stock logic, required for the game
|
||||
because getUserLoginBonus gets called after getUserItem; therefore the
|
||||
items needs to be inserted in the database before they get requested.
|
||||
|
||||
Adds a bonusCount after a user logged in after 24 hours, makes sure
|
||||
loginBonus 30 gets looped, only show the login banner every 24 hours,
|
||||
adds the bonus to items (itemKind 6)
|
||||
- Adds a stock for each specified ticket (itemKind 5)
|
||||
- Adds a bonusCount after a user logged in after 24 hours, makes sure
|
||||
loginBonus 30 gets looped, only show the login banner every 24 hours,
|
||||
adds the bonus to items (itemKind 6)
|
||||
"""
|
||||
|
||||
user_id = data["userId"]
|
||||
|
||||
# If we want to make certain tickets always available, stock them now
|
||||
if self.game_cfg.mods.stock_tickets:
|
||||
for ticket in self.game_cfg.mods.stock_tickets.split(","):
|
||||
await self.data.item.put_item(
|
||||
user_id,
|
||||
{
|
||||
"itemId": ticket.strip(),
|
||||
"itemKind": ItemKind.TICKET.value,
|
||||
"stock": self.game_cfg.mods.stock_count,
|
||||
"isValid": True,
|
||||
},
|
||||
)
|
||||
|
||||
# ignore the login bonus if disabled in config
|
||||
if not self.game_cfg.mods.use_login_bonus:
|
||||
return {"returnCode": 1}
|
||||
|
||||
user_id = data["userId"]
|
||||
login_bonus_presets = await self.data.static.get_login_bonus_presets(self.version)
|
||||
|
||||
for preset in login_bonus_presets:
|
||||
@@ -101,7 +116,7 @@ class ChuniBase:
|
||||
user_id,
|
||||
{
|
||||
"itemId": login_item["presentId"],
|
||||
"itemKind": 6,
|
||||
"itemKind": ItemKind.PRESENT.value,
|
||||
"stock": login_item["itemNum"],
|
||||
"isValid": True,
|
||||
},
|
||||
@@ -262,35 +277,39 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
async def handle_get_user_character_api_request(self, data: Dict) -> Dict:
|
||||
characters = await self.data.item.get_characters(data["userId"])
|
||||
if characters is None:
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
# add one to the limit so we know if there's a next page of items
|
||||
rows = await self.data.item.get_characters(
|
||||
user_id, limit=max_ct + 1, offset=next_idx
|
||||
)
|
||||
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCharacterList": [],
|
||||
}
|
||||
|
||||
character_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(characters)):
|
||||
tmp = characters[x]._asdict()
|
||||
tmp.pop("user")
|
||||
for row in rows[:max_ct]:
|
||||
tmp = row._asdict()
|
||||
tmp.pop("id")
|
||||
tmp.pop("user")
|
||||
|
||||
character_list.append(tmp)
|
||||
|
||||
if len(character_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(characters) >= next_idx + max_ct:
|
||||
if len(rows) > max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": len(character_list),
|
||||
"nextIndex": next_idx,
|
||||
"userCharacterList": character_list,
|
||||
@@ -320,29 +339,31 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
async def handle_get_user_course_api_request(self, data: Dict) -> Dict:
|
||||
user_course_list = await self.data.score.get_courses(data["userId"])
|
||||
if user_course_list is None:
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
rows = await self.data.score.get_courses(
|
||||
user_id, limit=max_ct + 1, offset=next_idx
|
||||
)
|
||||
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userCourseList": [],
|
||||
}
|
||||
|
||||
course_list = []
|
||||
next_idx = int(data.get("nextIndex", 0))
|
||||
max_ct = int(data.get("maxCount", 300))
|
||||
|
||||
for x in range(next_idx, len(user_course_list)):
|
||||
tmp = user_course_list[x]._asdict()
|
||||
for row in rows[:max_ct]:
|
||||
tmp = row._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
course_list.append(tmp)
|
||||
|
||||
if len(user_course_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(user_course_list) >= next_idx + max_ct:
|
||||
if len(rows) > max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
@@ -410,75 +431,94 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
async def handle_get_user_rival_music_api_request(self, data: Dict) -> Dict:
|
||||
rival_id = data["rivalId"]
|
||||
next_index = int(data["nextIndex"])
|
||||
max_count = int(data["maxCount"])
|
||||
user_rival_music_list = []
|
||||
user_id = int(data["userId"])
|
||||
rival_id = int(data["rivalId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
rival_levels = [int(x["level"]) for x in data["userRivalMusicLevelList"]]
|
||||
|
||||
# Fetch all the rival music entries for the user
|
||||
all_entries = await self.data.score.get_rival_music(rival_id)
|
||||
rows = await self.data.score.get_scores(
|
||||
rival_id,
|
||||
levels=rival_levels,
|
||||
limit=max_ct + 1,
|
||||
offset=next_idx,
|
||||
)
|
||||
|
||||
# Process the entries based on max_count and nextIndex
|
||||
for music in all_entries:
|
||||
music_id = music["musicId"]
|
||||
level = music["level"]
|
||||
score = music["scoreMax"]
|
||||
rank = music["scoreRank"]
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": user_id,
|
||||
"rivalId": rival_id,
|
||||
"nextIndex": -1,
|
||||
"userRivalMusicList": [],
|
||||
}
|
||||
|
||||
# Create a music entry for the current music_id if it's unique
|
||||
music_entry = next((entry for entry in user_rival_music_list if entry["musicId"] == music_id), None)
|
||||
if music_entry is None:
|
||||
music_entry = {
|
||||
"musicId": music_id,
|
||||
"length": 0,
|
||||
"userRivalMusicDetailList": []
|
||||
}
|
||||
user_rival_music_list.append(music_entry)
|
||||
music_details = [x._asdict() for x in rows]
|
||||
returned_music_details_count = 0
|
||||
music_list = []
|
||||
|
||||
# Create a level entry for the current level if it's unique or has a higher score
|
||||
level_entry = next((entry for entry in music_entry["userRivalMusicDetailList"] if entry["level"] == level), None)
|
||||
if level_entry is None:
|
||||
level_entry = {
|
||||
"level": level,
|
||||
"scoreMax": score,
|
||||
"scoreRank": rank
|
||||
}
|
||||
music_entry["userRivalMusicDetailList"].append(level_entry)
|
||||
elif score > level_entry["scoreMax"]:
|
||||
level_entry["scoreMax"] = score
|
||||
level_entry["scoreRank"] = rank
|
||||
# note that itertools.groupby will only work on sorted keys, which is already sorted by
|
||||
# the query in get_scores
|
||||
for music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
|
||||
details: list[dict[Any, Any]] = [
|
||||
{"level": d["level"], "scoreMax": d["scoreMax"]}
|
||||
for d in details_iter
|
||||
]
|
||||
|
||||
# Calculate the length for each "musicId" by counting the unique levels
|
||||
for music_entry in user_rival_music_list:
|
||||
music_entry["length"] = len(music_entry["userRivalMusicDetailList"])
|
||||
music_list.append({"musicId": music_id, "length": len(details), "userRivalMusicDetailList": details})
|
||||
returned_music_details_count += len(details)
|
||||
|
||||
# Prepare the result dictionary with user rival music data
|
||||
result = {
|
||||
"userId": data["userId"],
|
||||
"rivalId": data["rivalId"],
|
||||
"nextIndex": str(next_index + len(user_rival_music_list[next_index: next_index + max_count]) if max_count <= len(user_rival_music_list[next_index: next_index + max_count]) else -1),
|
||||
"userRivalMusicList": user_rival_music_list[next_index: next_index + max_count]
|
||||
if len(music_list) >= max_ct:
|
||||
break
|
||||
|
||||
# if we returned fewer PBs than we originally asked for from the database, that means
|
||||
# we queried for the PBs of max_ct + 1 songs.
|
||||
if returned_music_details_count < len(rows):
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": user_id,
|
||||
"rivalId": rival_id,
|
||||
"length": len(music_list),
|
||||
"nextIndex": next_idx,
|
||||
"userRivalMusicList": music_list,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
async def handle_get_user_favorite_item_api_request(self, data: Dict) -> Dict:
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
kind = int(data["kind"])
|
||||
is_all_favorite_item = str(data["isAllFavoriteItem"]) == "true"
|
||||
|
||||
user_fav_item_list = []
|
||||
|
||||
# still needs to be implemented on WebUI
|
||||
# 1: Music, 2: User, 3: Character
|
||||
fav_list = await self.data.item.get_all_favorites(
|
||||
data["userId"], self.version, fav_kind=int(data["kind"])
|
||||
rows = await self.data.item.get_all_favorites(
|
||||
user_id,
|
||||
self.version,
|
||||
fav_kind=kind,
|
||||
limit=max_ct + 1,
|
||||
offset=next_idx,
|
||||
)
|
||||
if fav_list is not None:
|
||||
for fav in fav_list:
|
||||
|
||||
if rows is not None:
|
||||
for fav in rows[:max_ct]:
|
||||
user_fav_item_list.append({"id": fav["favId"]})
|
||||
|
||||
if rows is None or len(rows) <= max_ct:
|
||||
next_idx = -1
|
||||
else:
|
||||
next_idx += max_ct
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": len(user_fav_item_list),
|
||||
"kind": data["kind"],
|
||||
"nextIndex": -1,
|
||||
"kind": kind,
|
||||
"nextIndex": next_idx,
|
||||
"userFavoriteItemList": user_fav_item_list,
|
||||
}
|
||||
|
||||
@@ -490,36 +530,39 @@ class ChuniBase:
|
||||
return {"userId": data["userId"], "length": 0, "userFavoriteMusicList": []}
|
||||
|
||||
async def handle_get_user_item_api_request(self, data: Dict) -> Dict:
|
||||
kind = int(int(data["nextIndex"]) / 10000000000)
|
||||
next_idx = int(int(data["nextIndex"]) % 10000000000)
|
||||
user_item_list = await self.data.item.get_items(data["userId"], kind)
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
if user_item_list is None or len(user_item_list) == 0:
|
||||
kind = next_idx // 10000000000
|
||||
next_idx = next_idx % 10000000000
|
||||
rows = await self.data.item.get_items(
|
||||
user_id, kind, limit=max_ct + 1, offset=next_idx
|
||||
)
|
||||
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"nextIndex": -1,
|
||||
"itemKind": kind,
|
||||
"userItemList": [],
|
||||
}
|
||||
|
||||
items: List[Dict[str, Any]] = []
|
||||
for i in range(next_idx, len(user_item_list)):
|
||||
tmp = user_item_list[i]._asdict()
|
||||
|
||||
for row in rows[:max_ct]:
|
||||
tmp = row._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
items.append(tmp)
|
||||
if len(items) >= int(data["maxCount"]):
|
||||
break
|
||||
|
||||
xout = kind * 10000000000 + next_idx + len(items)
|
||||
|
||||
if len(items) < int(data["maxCount"]):
|
||||
next_idx = 0
|
||||
if len(rows) > max_ct:
|
||||
next_idx = kind * 10000000000 + next_idx + max_ct
|
||||
else:
|
||||
next_idx = xout
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"nextIndex": next_idx,
|
||||
"itemKind": kind,
|
||||
"length": len(items),
|
||||
@@ -571,62 +614,55 @@ class ChuniBase:
|
||||
}
|
||||
|
||||
async def handle_get_user_music_api_request(self, data: Dict) -> Dict:
|
||||
music_detail = await self.data.score.get_scores(data["userId"])
|
||||
if music_detail is None:
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
rows = await self.data.score.get_scores(
|
||||
user_id, limit=max_ct + 1, offset=next_idx
|
||||
)
|
||||
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userMusicList": [], # 240
|
||||
}
|
||||
|
||||
song_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
music_details = [x._asdict() for x in rows]
|
||||
returned_music_details_count = 0
|
||||
music_list = []
|
||||
|
||||
for x in range(next_idx, len(music_detail)):
|
||||
found = False
|
||||
tmp = music_detail[x]._asdict()
|
||||
tmp.pop("user")
|
||||
tmp.pop("id")
|
||||
# note that itertools.groupby will only work on sorted keys, which is already sorted by
|
||||
# the query in get_scores
|
||||
for _music_id, details_iter in itertools.groupby(music_details, key=lambda x: x["musicId"]):
|
||||
details: list[dict[Any, Any]] = []
|
||||
|
||||
for song in song_list:
|
||||
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
|
||||
if song["userMusicDetailList"][0]["musicId"] == tmp["musicId"]:
|
||||
found = True
|
||||
song["userMusicDetailList"].append(tmp)
|
||||
song["length"] = len(song["userMusicDetailList"])
|
||||
score_buf.append(tmp["musicId"])
|
||||
SCORE_BUFFER[str(data["userId"])] = score_buf
|
||||
for d in details_iter:
|
||||
d.pop("id")
|
||||
d.pop("user")
|
||||
|
||||
score_buf = SCORE_BUFFER.get(str(data["userId"])) or []
|
||||
if not found and tmp["musicId"] not in score_buf:
|
||||
song_list.append({"length": 1, "userMusicDetailList": [tmp]})
|
||||
score_buf.append(tmp["musicId"])
|
||||
SCORE_BUFFER[str(data["userId"])] = score_buf
|
||||
details.append(d)
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
music_list.append({"length": len(details), "userMusicDetailList": details})
|
||||
returned_music_details_count += len(details)
|
||||
|
||||
if len(music_list) >= max_ct:
|
||||
break
|
||||
|
||||
for songIdx in range(len(song_list)):
|
||||
for recordIdx in range(x+1, len(music_detail)):
|
||||
if song_list[songIdx]["userMusicDetailList"][0]["musicId"] == music_detail[recordIdx]["musicId"]:
|
||||
music = music_detail[recordIdx]._asdict()
|
||||
music.pop("user")
|
||||
music.pop("id")
|
||||
song_list[songIdx]["userMusicDetailList"].append(music)
|
||||
song_list[songIdx]["length"] += 1
|
||||
|
||||
if len(song_list) >= max_ct:
|
||||
next_idx += len(song_list)
|
||||
|
||||
# if we returned fewer PBs than we originally asked for from the database, that means
|
||||
# we queried for the PBs of max_ct + 1 songs.
|
||||
if returned_music_details_count < len(rows):
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
SCORE_BUFFER[str(data["userId"])] = []
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"length": len(song_list),
|
||||
"userId": user_id,
|
||||
"length": len(music_list),
|
||||
"nextIndex": next_idx,
|
||||
"userMusicList": song_list, # 240
|
||||
"userMusicList": music_list,
|
||||
}
|
||||
|
||||
async def handle_get_user_option_api_request(self, data: Dict) -> Dict:
|
||||
@@ -978,6 +1014,32 @@ class ChuniBase:
|
||||
)
|
||||
await self.data.profile.put_net_battle(user_id, net_battle)
|
||||
|
||||
# New in LUMINOUS PLUS
|
||||
if "userFavoriteMusicList" in upsert:
|
||||
# musicId, orderId
|
||||
music_ids = set(
|
||||
int(m["musicId"])
|
||||
for m in upsert["userFavoriteMusicList"]
|
||||
if m["musicId"] != "-1"
|
||||
)
|
||||
current_favorites = await self.data.item.get_all_favorites(
|
||||
user_id, self.version, fav_kind=FavoriteItemKind.MUSIC.value
|
||||
)
|
||||
|
||||
if current_favorites is None:
|
||||
current_favorites = []
|
||||
|
||||
current_favorite_ids = set(x.favId for x in current_favorites)
|
||||
keep_ids = current_favorite_ids.intersection(music_ids)
|
||||
deleted_ids = current_favorite_ids - keep_ids
|
||||
added_ids = music_ids - keep_ids
|
||||
|
||||
for fav_id in deleted_ids:
|
||||
await self.data.item.delete_favorite_music(user_id, self.version, fav_id)
|
||||
|
||||
for fav_id in added_ids:
|
||||
await self.data.item.put_favorite_music(user_id, self.version, fav_id)
|
||||
|
||||
return {"returnCode": "1"}
|
||||
|
||||
async def handle_upsert_user_chargelog_api_request(self, data: Dict) -> Dict:
|
||||
|
||||
@@ -53,6 +53,29 @@ class ChuniModsConfig:
|
||||
self.__config, "chuni", "mods", "use_login_bonus", default=True
|
||||
)
|
||||
|
||||
@property
|
||||
def stock_tickets(self) -> str:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "mods", "stock_tickets", default=None
|
||||
)
|
||||
|
||||
@property
|
||||
def stock_count(self) -> int:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "mods", "stock_count", default=99
|
||||
)
|
||||
|
||||
def forced_item_unlocks(self, item: str) -> bool:
|
||||
forced_item_unlocks = CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "mods", "forced_item_unlocks", default={}
|
||||
)
|
||||
|
||||
if item not in forced_item_unlocks.keys():
|
||||
# default to no forced unlocks
|
||||
return False
|
||||
|
||||
return forced_item_unlocks[item]
|
||||
|
||||
|
||||
class ChuniVersionConfig:
|
||||
def __init__(self, parent_config: "ChuniConfig") -> None:
|
||||
@@ -63,9 +86,14 @@ class ChuniVersionConfig:
|
||||
in the form of:
|
||||
11: {"rom": 2.00.00, "data": 2.00.00}
|
||||
"""
|
||||
return CoreConfig.get_config_field(
|
||||
versions = CoreConfig.get_config_field(
|
||||
self.__config, "chuni", "version", default={}
|
||||
)[version]
|
||||
)
|
||||
|
||||
if version not in versions.keys():
|
||||
return None
|
||||
|
||||
return versions[version]
|
||||
|
||||
|
||||
class ChuniCryptoConfig:
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from enum import Enum
|
||||
|
||||
from enum import Enum, IntEnum
|
||||
from typing import Optional
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class ChuniConstants:
|
||||
GAME_CODE = "SDBT"
|
||||
GAME_CODE_NEW = "SDHD"
|
||||
GAME_CODE_INT = "SDGS"
|
||||
GAME_CODE_CHN = "SDHJ"
|
||||
|
||||
CONFIG_NAME = "chuni.yaml"
|
||||
|
||||
@@ -25,6 +27,7 @@ class ChuniConstants:
|
||||
VER_CHUNITHM_SUN = 13
|
||||
VER_CHUNITHM_SUN_PLUS = 14
|
||||
VER_CHUNITHM_LUMINOUS = 15
|
||||
VER_CHUNITHM_LUMINOUS_PLUS = 16
|
||||
|
||||
VERSION_NAMES = [
|
||||
"CHUNITHM",
|
||||
@@ -43,6 +46,7 @@ class ChuniConstants:
|
||||
"CHUNITHM SUN",
|
||||
"CHUNITHM SUN PLUS",
|
||||
"CHUNITHM LUMINOUS",
|
||||
"CHUNITHM LUMINOUS PLUS",
|
||||
]
|
||||
|
||||
SCORE_RANK_INTERVALS_OLD = [
|
||||
@@ -76,18 +80,106 @@ class ChuniConstants:
|
||||
( 0, "D"),
|
||||
]
|
||||
|
||||
VERSION_LUT = {
|
||||
"100": VER_CHUNITHM,
|
||||
"105": VER_CHUNITHM_PLUS,
|
||||
"110": VER_CHUNITHM_AIR,
|
||||
"115": VER_CHUNITHM_AIR_PLUS,
|
||||
"120": VER_CHUNITHM_STAR,
|
||||
"125": VER_CHUNITHM_STAR_PLUS,
|
||||
"130": VER_CHUNITHM_AMAZON,
|
||||
"135": VER_CHUNITHM_AMAZON_PLUS,
|
||||
"140": VER_CHUNITHM_CRYSTAL,
|
||||
"145": VER_CHUNITHM_CRYSTAL_PLUS,
|
||||
"150": VER_CHUNITHM_PARADISE,
|
||||
"200": VER_CHUNITHM_NEW,
|
||||
"205": VER_CHUNITHM_NEW_PLUS,
|
||||
"210": VER_CHUNITHM_SUN,
|
||||
"215": VER_CHUNITHM_SUN_PLUS,
|
||||
"220": VER_CHUNITHM_LUMINOUS,
|
||||
"225": VER_CHUNITHM_LUMINOUS_PLUS,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
return cls.VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
class MapAreaConditionType(Enum):
|
||||
UNLOCKED = 0
|
||||
class MapAreaConditionType(IntEnum):
|
||||
"""Condition types for the GetGameMapAreaConditionApi endpoint. Incomplete.
|
||||
|
||||
For the MAP_CLEARED/MAP_AREA_CLEARED/TROPHY_OBTAINED conditions, the conditionId
|
||||
is the map/map area/trophy.
|
||||
|
||||
For the RANK_*/ALL_JUSTICE conditions, the conditionId is songId * 100 + difficultyId.
|
||||
For example, Halcyon [ULTIMA] would be 173 * 100 + 4 = 17304.
|
||||
"""
|
||||
|
||||
ALWAYS_UNLOCKED = 0
|
||||
|
||||
MAP_CLEARED = 1
|
||||
MAP_AREA_CLEARED = 2
|
||||
|
||||
TROPHY_OBTAINED = 3
|
||||
|
||||
RANK_SSSP = 18
|
||||
RANK_SSS = 19
|
||||
RANK_SSP = 20
|
||||
RANK_SS = 21
|
||||
RANK_SP = 22
|
||||
RANK_S = 23
|
||||
|
||||
ALL_JUSTICE = 28
|
||||
|
||||
|
||||
class MapAreaConditionLogicalOperator(Enum):
|
||||
AND = 1
|
||||
OR = 2
|
||||
|
||||
|
||||
class AvatarCategory(Enum):
|
||||
WEAR = 1
|
||||
HEAD = 2
|
||||
FACE = 3
|
||||
SKIN = 4
|
||||
ITEM = 5
|
||||
FRONT = 6
|
||||
BACK = 7
|
||||
|
||||
class ItemKind(IntEnum):
|
||||
NAMEPLATE = 1
|
||||
|
||||
FRAME = 2
|
||||
"""
|
||||
"Frame" is the background for the gauge/score/max combo display
|
||||
shown during gameplay. This item cannot be equipped (as of LUMINOUS PLUS)
|
||||
and is hardcoded to the current game's version.
|
||||
"""
|
||||
|
||||
TROPHY = 3
|
||||
SKILL = 4
|
||||
|
||||
TICKET = 5
|
||||
"""A statue is also a ticket."""
|
||||
|
||||
PRESENT = 6
|
||||
MUSIC_UNLOCK = 7
|
||||
MAP_ICON = 8
|
||||
SYSTEM_VOICE = 9
|
||||
SYMBOL_CHAT = 10
|
||||
AVATAR_ACCESSORY = 11
|
||||
|
||||
ULTIMA_UNLOCK = 12
|
||||
"""This only applies to ULTIMA difficulties that are *not* unlocked by
|
||||
reaching S rank on EXPERT difficulty or above.
|
||||
"""
|
||||
|
||||
|
||||
class FavoriteItemKind(IntEnum):
|
||||
MUSIC = 1
|
||||
RIVAL = 2
|
||||
CHARACTER = 3
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
from core.data import Data
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.schema import *
|
||||
|
||||
from .config import ChuniConfig
|
||||
|
||||
class ChuniData(Data):
|
||||
def __init__(self, cfg: CoreConfig) -> None:
|
||||
def __init__(self, cfg: CoreConfig, chuni_cfg: ChuniConfig = None) -> None:
|
||||
super().__init__(cfg)
|
||||
|
||||
self.item = ChuniItemData(cfg, self.session)
|
||||
self.profile = ChuniProfileData(cfg, self.session)
|
||||
self.score = ChuniScoreData(cfg, self.session)
|
||||
self.static = ChuniStaticData(cfg, self.session)
|
||||
|
||||
# init rom versioning for use with score playlog data
|
||||
if chuni_cfg:
|
||||
ChuniRomVersion.init_versions(chuni_cfg)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from typing import List
|
||||
from typing import List, Tuple, Dict
|
||||
from starlette.routing import Route, Mount
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response, RedirectResponse
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from sqlalchemy.engine import Row
|
||||
from os import path
|
||||
import yaml
|
||||
import jinja2
|
||||
@@ -10,7 +12,7 @@ from core.frontend import FE_Base, UserSession
|
||||
from core.config import CoreConfig
|
||||
from .database import ChuniData
|
||||
from .config import ChuniConfig
|
||||
from .const import ChuniConstants
|
||||
from .const import ChuniConstants, AvatarCategory, ItemKind
|
||||
|
||||
|
||||
def pairwise(iterable):
|
||||
@@ -81,12 +83,12 @@ class ChuniFrontend(FE_Base):
|
||||
self, cfg: CoreConfig, environment: jinja2.Environment, cfg_dir: str
|
||||
) -> None:
|
||||
super().__init__(cfg, environment)
|
||||
self.data = ChuniData(cfg)
|
||||
self.game_cfg = ChuniConfig()
|
||||
if path.exists(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"):
|
||||
self.game_cfg.update(
|
||||
yaml.safe_load(open(f"{cfg_dir}/{ChuniConstants.CONFIG_NAME}"))
|
||||
)
|
||||
self.data = ChuniData(cfg, self.game_cfg)
|
||||
self.nav_name = "Chunithm"
|
||||
|
||||
def get_routes(self) -> List[Route]:
|
||||
@@ -97,8 +99,18 @@ class ChuniFrontend(FE_Base):
|
||||
Route("/", self.render_GET_playlog, methods=['GET']),
|
||||
Route("/{index}", self.render_GET_playlog, methods=['GET']),
|
||||
]),
|
||||
Route("/favorites", self.render_GET_favorites, methods=['GET']),
|
||||
Route("/userbox", self.render_GET_userbox, methods=['GET']),
|
||||
Route("/avatar", self.render_GET_avatar, methods=['GET']),
|
||||
Route("/update.map-icon", self.update_map_icon, methods=['POST']),
|
||||
Route("/update.system-voice", self.update_system_voice, methods=['POST']),
|
||||
Route("/update.userbox", self.update_userbox, methods=['POST']),
|
||||
Route("/update.avatar", self.update_avatar, methods=['POST']),
|
||||
Route("/update.name", self.update_name, methods=['POST']),
|
||||
Route("/update.favorite_music_playlog", self.update_favorite_music_playlog, methods=['POST']),
|
||||
Route("/update.favorite_music_favorites", self.update_favorite_music_favorites, methods=['POST']),
|
||||
Route("/version.change", self.version_change, methods=['POST']),
|
||||
Mount('/img', app=StaticFiles(directory='titles/chuni/img'), name="img")
|
||||
]
|
||||
|
||||
async def render_GET(self, request: Request) -> bytes:
|
||||
@@ -111,22 +123,35 @@ class ChuniFrontend(FE_Base):
|
||||
|
||||
if usr_sesh.user_id > 0:
|
||||
versions = await self.data.profile.get_all_profile_versions(usr_sesh.user_id)
|
||||
profile = []
|
||||
profile = None
|
||||
if versions:
|
||||
# chunithm_version is -1 means it is not initialized yet, select a default version from existing.
|
||||
if usr_sesh.chunithm_version < 0:
|
||||
usr_sesh.chunithm_version = versions[0]
|
||||
profile = await self.data.profile.get_profile_data(usr_sesh.user_id, usr_sesh.chunithm_version)
|
||||
|
||||
user_id = usr_sesh.user_id
|
||||
version = usr_sesh.chunithm_version
|
||||
|
||||
# While map icons and system voices weren't present prior to AMAZON, we don't need to bother checking
|
||||
# version here - it'll just end up being empty sets and the jinja will ignore the variables anyway.
|
||||
map_icons, total_map_icons = await self.get_available_map_icons(version, profile)
|
||||
system_voices, total_system_voices = await self.get_available_system_voices(version, profile)
|
||||
|
||||
resp = Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
user_id=usr_sesh.user_id,
|
||||
user_id=user_id,
|
||||
profile=profile,
|
||||
version_list=ChuniConstants.VERSION_NAMES,
|
||||
versions=versions,
|
||||
cur_version=usr_sesh.chunithm_version
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version),
|
||||
map_icons=map_icons,
|
||||
system_voices=system_voices,
|
||||
total_map_icons=total_map_icons,
|
||||
total_system_voices=total_system_voices
|
||||
), media_type="text/html; charset=utf-8")
|
||||
|
||||
if usr_sesh.chunithm_version >= 0:
|
||||
@@ -184,6 +209,8 @@ class ChuniFrontend(FE_Base):
|
||||
profile=profile,
|
||||
hot_list=hot_list,
|
||||
base_list=base_list,
|
||||
cur_version=usr_sesh.chunithm_version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(usr_sesh.chunithm_version)
|
||||
), media_type="text/html; charset=utf-8")
|
||||
else:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
@@ -205,43 +232,470 @@ class ChuniFrontend(FE_Base):
|
||||
else:
|
||||
index = int(path_index) - 1 # 0 and 1 are 1st page
|
||||
user_id = usr_sesh.user_id
|
||||
playlog_count = await self.data.score.get_user_playlogs_count(user_id)
|
||||
version = usr_sesh.chunithm_version
|
||||
playlog_count = await self.data.score.get_user_playlogs_count(user_id, version)
|
||||
if playlog_count < index * 20 :
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
playlog_count=0
|
||||
playlog_count=0,
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version)
|
||||
), media_type="text/html; charset=utf-8")
|
||||
playlog = await self.data.score.get_playlogs_limited(user_id, index, 20)
|
||||
playlog = await self.data.score.get_playlogs_limited(user_id, version, index, 20)
|
||||
playlog_with_title = []
|
||||
for record in playlog:
|
||||
music_chart = await self.data.static.get_music_chart(usr_sesh.chunithm_version, record.musicId, record.level)
|
||||
for idx,record in enumerate(playlog):
|
||||
music_chart = await self.data.static.get_music_chart(version, record.musicId, record.level)
|
||||
if music_chart:
|
||||
difficultyNum=music_chart.level
|
||||
artist=music_chart.artist
|
||||
title=music_chart.title
|
||||
(jacket, ext) = path.splitext(music_chart.jacketPath)
|
||||
jacket += ".png"
|
||||
else:
|
||||
difficultyNum=0
|
||||
artist="unknown"
|
||||
title="musicid: " + str(record.musicId)
|
||||
jacket = "unknown.png"
|
||||
|
||||
# Check if this song is a favorite so we can populate the add/remove button
|
||||
is_favorite = await self.data.item.is_favorite(user_id, version, record.musicId)
|
||||
|
||||
playlog_with_title.append({
|
||||
# Values for the actual readable results
|
||||
"raw": record,
|
||||
"title": title,
|
||||
"difficultyNum": difficultyNum,
|
||||
"artist": artist,
|
||||
"jacket": jacket,
|
||||
# Values used solely for favorite updates
|
||||
"idx": idx,
|
||||
"musicId": record.musicId,
|
||||
"isFav": is_favorite
|
||||
})
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
user_id=usr_sesh.user_id,
|
||||
user_id=user_id,
|
||||
playlog=playlog_with_title,
|
||||
playlog_count=playlog_count
|
||||
playlog_count=playlog_count,
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version)
|
||||
), media_type="text/html; charset=utf-8")
|
||||
else:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
async def render_GET_favorites(self, request: Request) -> bytes:
|
||||
template = self.environment.get_template(
|
||||
"titles/chuni/templates/chuni_favorites.jinja"
|
||||
)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
usr_sesh = UserSession()
|
||||
|
||||
if usr_sesh.user_id > 0:
|
||||
if usr_sesh.chunithm_version < 0:
|
||||
return RedirectResponse("/game/chuni/", 303)
|
||||
|
||||
user_id = usr_sesh.user_id
|
||||
version = usr_sesh.chunithm_version
|
||||
favorites = await self.data.item.get_all_favorites(user_id, version, 1)
|
||||
favorites_count = len(favorites)
|
||||
favorites_with_title = []
|
||||
favorites_by_genre = dict()
|
||||
for idx,favorite in enumerate(favorites):
|
||||
song = await self.data.static.get_song(favorite.favId)
|
||||
if song:
|
||||
# we likely got multiple results - one for each chart. Just use the first
|
||||
artist=song.artist
|
||||
title=song.title
|
||||
genre=song.genre
|
||||
(jacket, ext) = path.splitext(song.jacketPath)
|
||||
jacket += ".png"
|
||||
else:
|
||||
artist="unknown"
|
||||
title="musicid: " + str(favorite.favId)
|
||||
genre="unknown"
|
||||
jacket = "unknown.png"
|
||||
|
||||
# add a new collection for the genre if this is our first time seeing it
|
||||
if genre not in favorites_by_genre:
|
||||
favorites_by_genre[genre] = []
|
||||
|
||||
# add the song to the appropriate genre collection
|
||||
favorites_by_genre[genre].append({
|
||||
"idx": idx,
|
||||
"title": title,
|
||||
"artist": artist,
|
||||
"jacket": jacket,
|
||||
"favId": favorite.favId
|
||||
})
|
||||
|
||||
# Sort favorites by title before rendering the page
|
||||
for g in favorites_by_genre:
|
||||
favorites_by_genre[g].sort(key=lambda x: x["title"].lower())
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
user_id=user_id,
|
||||
favorites_by_genre=favorites_by_genre,
|
||||
favorites_count=favorites_count,
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version)
|
||||
), media_type="text/html; charset=utf-8")
|
||||
else:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
async def get_available_map_icons(self, version: int, profile: Row) -> Tuple[List[Dict], int]:
|
||||
if profile is None:
|
||||
return ([], 0)
|
||||
items = dict()
|
||||
rows = await self.data.static.get_map_icons(version)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("map_icons")
|
||||
|
||||
user_map_icons = []
|
||||
if not force_unlocked:
|
||||
user_map_icons = await self.data.item.get_items(profile.user, ItemKind.MAP_ICON.value)
|
||||
user_map_icons = [icon["itemId"] for icon in user_map_icons] + [profile.mapIconId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["mapIconId"] in user_map_icons:
|
||||
item = dict()
|
||||
item["id"] = row["mapIconId"]
|
||||
item["name"] = row["name"]
|
||||
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
|
||||
items[row["mapIconId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def get_available_system_voices(self, version: int, profile: Row) -> Tuple[List[Dict], int]:
|
||||
if profile is None:
|
||||
return ([], 0)
|
||||
items = dict()
|
||||
rows = await self.data.static.get_system_voices(version)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("system_voices")
|
||||
|
||||
user_system_voices = []
|
||||
if not force_unlocked:
|
||||
user_system_voices = await self.data.item.get_items(profile.user, ItemKind.SYSTEM_VOICE.value)
|
||||
user_system_voices = [icon["itemId"] for icon in user_system_voices] + [profile.voiceId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["voiceId"] in user_system_voices:
|
||||
item = dict()
|
||||
item["id"] = row["voiceId"]
|
||||
item["name"] = row["name"]
|
||||
item["imagePath"] = path.splitext(row["imagePath"])[0] + ".png"
|
||||
items[row["voiceId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def get_available_nameplates(self, version: int, profile: Row) -> Tuple[List[Dict], int]:
|
||||
items = dict()
|
||||
rows = await self.data.static.get_nameplates(version)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("nameplates")
|
||||
|
||||
user_nameplates = []
|
||||
if not force_unlocked:
|
||||
user_nameplates = await self.data.item.get_items(profile.user, ItemKind.NAMEPLATE.value)
|
||||
user_nameplates = [item["itemId"] for item in user_nameplates] + [profile.nameplateId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["nameplateId"] in user_nameplates:
|
||||
item = dict()
|
||||
item["id"] = row["nameplateId"]
|
||||
item["name"] = row["name"]
|
||||
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
|
||||
items[row["nameplateId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def get_available_trophies(self, version: int, profile: Row) -> Tuple[List[Dict], int]:
|
||||
items = dict()
|
||||
rows = await self.data.static.get_trophies(version)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("trophies")
|
||||
|
||||
user_trophies = []
|
||||
if not force_unlocked:
|
||||
user_trophies = await self.data.item.get_items(profile.user, ItemKind.TROPHY.value)
|
||||
user_trophies = [item["itemId"] for item in user_trophies] + [profile.trophyId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["trophyId"] in user_trophies:
|
||||
item = dict()
|
||||
item["id"] = row["trophyId"]
|
||||
item["name"] = row["name"]
|
||||
item["rarity"] = row["rareType"]
|
||||
items[row["trophyId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def get_available_characters(self, version: int, profile: Row) -> Tuple[List[Dict], int]:
|
||||
items = dict()
|
||||
rows = await self.data.static.get_characters(version)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("character_icons")
|
||||
|
||||
user_characters = []
|
||||
if not force_unlocked:
|
||||
user_characters = await self.data.item.get_characters(profile.user)
|
||||
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["characterId"] in user_characters:
|
||||
item = dict()
|
||||
item["id"] = row["characterId"]
|
||||
item["name"] = row["name"]
|
||||
item["iconPath"] = path.splitext(row["imagePath3"])[0] + ".png"
|
||||
items[row["characterId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def get_available_avatar_items(self, version: int, category: AvatarCategory, user_unlocked_items: List[int]) -> Tuple[List[Dict], int]:
|
||||
items = dict()
|
||||
rows = await self.data.static.get_avatar_items(version, category.value)
|
||||
if rows is None:
|
||||
return (items, 0) # can only happen with old db
|
||||
|
||||
force_unlocked = self.game_cfg.mods.forced_item_unlocks("avatar_accessories")
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["avatarAccessoryId"] in user_unlocked_items:
|
||||
item = dict()
|
||||
item["id"] = row["avatarAccessoryId"]
|
||||
item["name"] = row["name"]
|
||||
item["iconPath"] = path.splitext(row["iconPath"])[0] + ".png"
|
||||
item["texturePath"] = path.splitext(row["texturePath"])[0] + ".png"
|
||||
items[row["avatarAccessoryId"]] = item
|
||||
|
||||
return (items, len(rows))
|
||||
|
||||
async def render_GET_userbox(self, request: Request) -> bytes:
|
||||
template = self.environment.get_template(
|
||||
"titles/chuni/templates/chuni_userbox.jinja"
|
||||
)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
usr_sesh = UserSession()
|
||||
|
||||
if usr_sesh.user_id > 0:
|
||||
if usr_sesh.chunithm_version < 0:
|
||||
return RedirectResponse("/game/chuni/", 303)
|
||||
|
||||
user_id = usr_sesh.user_id
|
||||
version = usr_sesh.chunithm_version
|
||||
|
||||
# Get the user profile so we know how the userbox is currently configured
|
||||
profile = await self.data.profile.get_profile_data(user_id, version)
|
||||
|
||||
# Build up lists of available userbox components
|
||||
nameplates, total_nameplates = await self.get_available_nameplates(version, profile)
|
||||
trophies, total_trophies = await self.get_available_trophies(version, profile)
|
||||
characters, total_characters = await self.get_available_characters(version, profile)
|
||||
|
||||
# Get the user's team
|
||||
team_name = "ARTEMiS"
|
||||
if profile["teamId"]:
|
||||
team = await self.data.profile.get_team_by_id(profile["teamId"])
|
||||
team_name = team["teamName"]
|
||||
# Figure out the rating color we should use (rank maps to the stylesheet)
|
||||
rating = profile.playerRating / 100;
|
||||
rating_rank = 0
|
||||
if rating >= 16:
|
||||
rating_rank = 8
|
||||
elif rating >= 15.25:
|
||||
rating_rank = 7
|
||||
elif rating >= 14.5:
|
||||
rating_rank = 6
|
||||
elif rating >= 13.25:
|
||||
rating_rank = 5
|
||||
elif rating >= 12:
|
||||
rating_rank = 4
|
||||
elif rating >= 10:
|
||||
rating_rank = 3
|
||||
elif rating >= 7:
|
||||
rating_rank = 2
|
||||
elif rating >= 4:
|
||||
rating_rank = 1
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
user_id=user_id,
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version),
|
||||
profile=profile,
|
||||
team_name=team_name,
|
||||
rating_rank=rating_rank,
|
||||
nameplates=nameplates,
|
||||
trophies=trophies,
|
||||
characters=characters,
|
||||
total_nameplates=total_nameplates,
|
||||
total_trophies=total_trophies,
|
||||
total_characters=total_characters
|
||||
), media_type="text/html; charset=utf-8")
|
||||
else:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
async def render_GET_avatar(self, request: Request) -> bytes:
|
||||
template = self.environment.get_template(
|
||||
"titles/chuni/templates/chuni_avatar.jinja"
|
||||
)
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
usr_sesh = UserSession()
|
||||
|
||||
if usr_sesh.user_id > 0:
|
||||
if usr_sesh.chunithm_version < 11:
|
||||
# Avatar configuration only for NEW!! and newer
|
||||
return RedirectResponse("/game/chuni/", 303)
|
||||
|
||||
user_id = usr_sesh.user_id
|
||||
version = usr_sesh.chunithm_version
|
||||
|
||||
# Get the user profile so we know what avatar items are currently in use
|
||||
profile = await self.data.profile.get_profile_data(user_id, version)
|
||||
# Get all the user avatar accessories so we know what to populate
|
||||
user_accessories = await self.data.item.get_items(user_id, ItemKind.AVATAR_ACCESSORY.value)
|
||||
user_accessories = [item["itemId"] for item in user_accessories] + \
|
||||
[profile.avatarBack, profile.avatarItem, profile.avatarWear, \
|
||||
profile.avatarFront, profile.avatarSkin, profile.avatarHead, profile.avatarFace]
|
||||
|
||||
# Build up available list of items for each avatar category
|
||||
wears, total_wears = await self.get_available_avatar_items(version, AvatarCategory.WEAR, user_accessories)
|
||||
faces, total_faces = await self.get_available_avatar_items(version, AvatarCategory.FACE, user_accessories)
|
||||
heads, total_heads = await self.get_available_avatar_items(version, AvatarCategory.HEAD, user_accessories)
|
||||
skins, total_skins = await self.get_available_avatar_items(version, AvatarCategory.SKIN, user_accessories)
|
||||
items, total_items = await self.get_available_avatar_items(version, AvatarCategory.ITEM, user_accessories)
|
||||
fronts, total_fronts = await self.get_available_avatar_items(version, AvatarCategory.FRONT, user_accessories)
|
||||
backs, total_backs = await self.get_available_avatar_items(version, AvatarCategory.BACK, user_accessories)
|
||||
|
||||
return Response(template.render(
|
||||
title=f"{self.core_config.server.name} | {self.nav_name}",
|
||||
game_list=self.environment.globals["game_list"],
|
||||
sesh=vars(usr_sesh),
|
||||
user_id=user_id,
|
||||
cur_version=version,
|
||||
cur_version_name=ChuniConstants.game_ver_to_string(version),
|
||||
profile=profile,
|
||||
wears=wears,
|
||||
faces=faces,
|
||||
heads=heads,
|
||||
skins=skins,
|
||||
items=items,
|
||||
fronts=fronts,
|
||||
backs=backs,
|
||||
total_wears=total_wears,
|
||||
total_faces=total_faces,
|
||||
total_heads=total_heads,
|
||||
total_skins=total_skins,
|
||||
total_items=total_items,
|
||||
total_fronts=total_fronts,
|
||||
total_backs=total_backs
|
||||
), media_type="text/html; charset=utf-8")
|
||||
else:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
async def update_map_icon(self, request: Request) -> bytes:
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
form_data = await request.form()
|
||||
new_map_icon: str = form_data.get("id")
|
||||
|
||||
if not new_map_icon:
|
||||
return RedirectResponse("/gate/?e=4", 303)
|
||||
|
||||
if not await self.data.profile.update_map_icon(usr_sesh.user_id, usr_sesh.chunithm_version, new_map_icon):
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
|
||||
return RedirectResponse("/game/chuni/", 303)
|
||||
|
||||
async def update_system_voice(self, request: Request) -> bytes:
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
form_data = await request.form()
|
||||
new_system_voice: str = form_data.get("id")
|
||||
|
||||
if not new_system_voice:
|
||||
return RedirectResponse("/gate/?e=4", 303)
|
||||
|
||||
if not await self.data.profile.update_system_voice(usr_sesh.user_id, usr_sesh.chunithm_version, new_system_voice):
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
|
||||
return RedirectResponse("/game/chuni/", 303)
|
||||
|
||||
async def update_userbox(self, request: Request) -> bytes:
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
form_data = await request.form()
|
||||
new_nameplate: str = form_data.get("nameplate")
|
||||
new_trophy: str = form_data.get("trophy")
|
||||
new_character: str = form_data.get("character")
|
||||
|
||||
if not new_nameplate or \
|
||||
not new_trophy or \
|
||||
not new_character:
|
||||
return RedirectResponse("/game/chuni/userbox?e=4", 303)
|
||||
|
||||
if not await self.data.profile.update_userbox(usr_sesh.user_id, usr_sesh.chunithm_version, new_nameplate, new_trophy, new_character):
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
|
||||
return RedirectResponse("/game/chuni/userbox", 303)
|
||||
|
||||
async def update_avatar(self, request: Request) -> bytes:
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse("/gate/", 303)
|
||||
|
||||
form_data = await request.form()
|
||||
new_wear: str = form_data.get("wear")
|
||||
new_face: str = form_data.get("face")
|
||||
new_head: str = form_data.get("head")
|
||||
new_skin: str = form_data.get("skin")
|
||||
new_item: str = form_data.get("item")
|
||||
new_front: str = form_data.get("front")
|
||||
new_back: str = form_data.get("back")
|
||||
|
||||
if not new_wear or \
|
||||
not new_face or \
|
||||
not new_head or \
|
||||
not new_skin or \
|
||||
not new_item or \
|
||||
not new_front or \
|
||||
not new_back:
|
||||
return RedirectResponse("/game/chuni/avatar?e=4", 303)
|
||||
|
||||
if not await self.data.profile.update_avatar(usr_sesh.user_id, usr_sesh.chunithm_version, new_wear, new_face, new_head, new_skin, new_item, new_front, new_back):
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
|
||||
return RedirectResponse("/game/chuni/avatar", 303)
|
||||
|
||||
|
||||
async def update_name(self, request: Request) -> bytes:
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
@@ -265,7 +719,7 @@ class ChuniFrontend(FE_Base):
|
||||
elif o < 0x7F and o > 0x20:
|
||||
new_name_full += chr(o + 0xFEE0)
|
||||
elif o <= 0x7F:
|
||||
self.logger.warn(f"Invalid ascii character {o:02X}")
|
||||
self.logger.warning(f"Invalid ascii character {o:02X}")
|
||||
return RedirectResponse("/gate/?e=4", 303)
|
||||
else:
|
||||
new_name_full += x
|
||||
@@ -279,6 +733,32 @@ class ChuniFrontend(FE_Base):
|
||||
|
||||
return RedirectResponse("/game/chuni/?s=1", 303)
|
||||
|
||||
async def update_favorite_music(self, request: Request, retPage: str):
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
return RedirectResponse(retPage, 303)
|
||||
|
||||
user_id = usr_sesh.user_id
|
||||
version = usr_sesh.chunithm_version
|
||||
form_data = await request.form()
|
||||
music_id: str = form_data.get("musicId")
|
||||
isAdd: int = int(form_data.get("isAdd"))
|
||||
|
||||
if isAdd:
|
||||
if await self.data.item.put_favorite_music(user_id, version, music_id) == None:
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
else:
|
||||
if await self.data.item.delete_favorite_music(user_id, version, music_id) == None:
|
||||
return RedirectResponse("/gate/?e=999", 303)
|
||||
|
||||
return RedirectResponse(retPage, 303)
|
||||
|
||||
async def update_favorite_music_playlog(self, request: Request):
|
||||
return await self.update_favorite_music(request, "/game/chuni/playlog")
|
||||
|
||||
async def update_favorite_music_favorites(self, request: Request):
|
||||
return await self.update_favorite_music(request, "/game/chuni/favorites")
|
||||
|
||||
async def version_change(self, request: Request):
|
||||
usr_sesh = self.validate_session(request)
|
||||
if not usr_sesh:
|
||||
|
||||
BIN
titles/chuni/img/avatar-common.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
titles/chuni/img/avatar-platform.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
4
titles/chuni/img/avatar/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
||||
BIN
titles/chuni/img/character-bg.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
4
titles/chuni/img/character/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
||||
5
titles/chuni/img/jacket/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file and default unknown
|
||||
!.gitignore
|
||||
!unknown.png
|
||||
BIN
titles/chuni/img/jacket/unknown.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
4
titles/chuni/img/mapIcon/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
||||
4
titles/chuni/img/nameplate/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
||||
BIN
titles/chuni/img/rank/rank0.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
titles/chuni/img/rank/rank1.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
titles/chuni/img/rank/rank10.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
titles/chuni/img/rank/rank11.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
titles/chuni/img/rank/rank2.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
titles/chuni/img/rank/rank3.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
titles/chuni/img/rank/rank4.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
titles/chuni/img/rank/rank5.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
titles/chuni/img/rank/rank6.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
titles/chuni/img/rank/rank7.png
Normal file
|
After Width: | Height: | Size: 42 KiB |
BIN
titles/chuni/img/rank/rank8.png
Normal file
|
After Width: | Height: | Size: 42 KiB |
BIN
titles/chuni/img/rank/rank9.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
titles/chuni/img/rank/rating0.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
titles/chuni/img/rank/team3.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
4
titles/chuni/img/systemVoice/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
||||
@@ -36,6 +36,7 @@ from .newplus import ChuniNewPlus
|
||||
from .sun import ChuniSun
|
||||
from .sunplus import ChuniSunPlus
|
||||
from .luminous import ChuniLuminous
|
||||
from .luminousplus import ChuniLuminousPlus
|
||||
|
||||
class ChuniServlet(BaseServlet):
|
||||
def __init__(self, core_cfg: CoreConfig, cfg_dir: str) -> None:
|
||||
@@ -64,6 +65,7 @@ class ChuniServlet(BaseServlet):
|
||||
ChuniSun,
|
||||
ChuniSunPlus,
|
||||
ChuniLuminous,
|
||||
ChuniLuminousPlus,
|
||||
]
|
||||
|
||||
self.logger = logging.getLogger("chuni")
|
||||
@@ -99,14 +101,18 @@ class ChuniServlet(BaseServlet):
|
||||
f"{ChuniConstants.VER_CHUNITHM_PARADISE}_int": 51, # SUPERSTAR PLUS
|
||||
ChuniConstants.VER_CHUNITHM_NEW: 54,
|
||||
f"{ChuniConstants.VER_CHUNITHM_NEW}_int": 49,
|
||||
f"{ChuniConstants.VER_CHUNITHM_NEW}_chn": 37,
|
||||
ChuniConstants.VER_CHUNITHM_NEW_PLUS: 25,
|
||||
f"{ChuniConstants.VER_CHUNITHM_NEW_PLUS}_int": 31,
|
||||
f"{ChuniConstants.VER_CHUNITHM_NEW_PLUS}_chn": 35, # NEW
|
||||
ChuniConstants.VER_CHUNITHM_SUN: 70,
|
||||
f"{ChuniConstants.VER_CHUNITHM_SUN}_int": 35,
|
||||
ChuniConstants.VER_CHUNITHM_SUN_PLUS: 36,
|
||||
f"{ChuniConstants.VER_CHUNITHM_SUN_PLUS}_int": 36,
|
||||
ChuniConstants.VER_CHUNITHM_LUMINOUS: 8,
|
||||
f"{ChuniConstants.VER_CHUNITHM_LUMINOUS}_int": 8,
|
||||
f"{ChuniConstants.VER_CHUNITHM_LUMINOUS}_chn": 8,
|
||||
ChuniConstants.VER_CHUNITHM_LUMINOUS_PLUS: 56,
|
||||
}
|
||||
|
||||
for version, keys in self.game_cfg.crypto.keys.items():
|
||||
@@ -147,6 +153,11 @@ class ChuniServlet(BaseServlet):
|
||||
and version_idx >= ChuniConstants.VER_CHUNITHM_NEW
|
||||
):
|
||||
method_fixed += "C3Exp"
|
||||
elif (
|
||||
isinstance(version, str)
|
||||
and version.endswith("_chn")
|
||||
):
|
||||
method_fixed += "Chn"
|
||||
|
||||
hash = PBKDF2(
|
||||
method_fixed,
|
||||
@@ -235,8 +246,10 @@ class ChuniServlet(BaseServlet):
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN
|
||||
elif version >= 215 and version < 220: # SUN PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN_PLUS
|
||||
elif version >= 220: # LUMINOUS
|
||||
elif version >= 220 and version < 225: # LUMINOUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
|
||||
elif version >= 225: # LUMINOUS PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS_PLUS
|
||||
elif game_code == "SDGS": # Int
|
||||
if version < 105: # SUPERSTAR
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_CRYSTAL_PLUS
|
||||
@@ -250,7 +263,16 @@ class ChuniServlet(BaseServlet):
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN
|
||||
elif version >= 125 and version < 130: # SUN PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_SUN_PLUS
|
||||
elif version >= 130: # LUMINOUS
|
||||
elif version >= 130 and version < 135: # LUMINOUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
|
||||
elif version >= 135: # LUMINOUS PLUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS_PLUS
|
||||
elif game_code == "SDHJ": # Chn
|
||||
if version < 110: # NEW
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW
|
||||
elif version >= 110 and version < 120: # NEW *Cursed but needed due to different encryption key
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_NEW_PLUS
|
||||
elif version >= 120: # LUMINOUS
|
||||
internal_ver = ChuniConstants.VER_CHUNITHM_LUMINOUS
|
||||
|
||||
if all(c in string.hexdigits for c in endpoint) and len(endpoint) == 32:
|
||||
@@ -261,6 +283,9 @@ class ChuniServlet(BaseServlet):
|
||||
if game_code == "SDGS":
|
||||
crypto_cfg_key = f"{internal_ver}_int"
|
||||
hash_table_key = f"{internal_ver}_int"
|
||||
elif game_code == "SDHJ":
|
||||
crypto_cfg_key = f"{internal_ver}_chn"
|
||||
hash_table_key = f"{internal_ver}_chn"
|
||||
else:
|
||||
crypto_cfg_key = internal_ver
|
||||
hash_table_key = internal_ver
|
||||
@@ -311,8 +336,10 @@ class ChuniServlet(BaseServlet):
|
||||
return Response(zlib.compress(b'{"stat": "0"}'))
|
||||
|
||||
try:
|
||||
unzip = zlib.decompress(req_raw)
|
||||
|
||||
if request.headers.get("x-debug") is not None:
|
||||
unzip = req_raw
|
||||
else:
|
||||
unzip = zlib.decompress(req_raw)
|
||||
except zlib.error as e:
|
||||
self.logger.error(
|
||||
f"Failed to decompress v{version} {endpoint} request -> {e}"
|
||||
@@ -328,6 +355,8 @@ class ChuniServlet(BaseServlet):
|
||||
endpoint = endpoint.replace("C3Exp", "")
|
||||
elif game_code == "SDGS" and version < 110:
|
||||
endpoint = endpoint.replace("Exp", "")
|
||||
elif game_code == "SDHJ":
|
||||
endpoint = endpoint.replace("Chn", "")
|
||||
else:
|
||||
endpoint = endpoint
|
||||
|
||||
@@ -352,6 +381,9 @@ class ChuniServlet(BaseServlet):
|
||||
|
||||
self.logger.debug(f"Response {resp}")
|
||||
|
||||
if request.headers.get("x-debug") is not None:
|
||||
return Response(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
||||
zipped = zlib.compress(json.dumps(resp, ensure_ascii=False).encode("utf-8"))
|
||||
|
||||
if not encrtped:
|
||||
|
||||
@@ -2,9 +2,13 @@ from datetime import timedelta
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.sunplus import ChuniSunPlus
|
||||
from titles.chuni.const import ChuniConstants, MapAreaConditionLogicalOperator, MapAreaConditionType
|
||||
from titles.chuni.config import ChuniConfig
|
||||
from titles.chuni.const import (
|
||||
ChuniConstants,
|
||||
MapAreaConditionLogicalOperator,
|
||||
MapAreaConditionType,
|
||||
)
|
||||
from titles.chuni.sunplus import ChuniSunPlus
|
||||
|
||||
|
||||
class ChuniLuminous(ChuniSunPlus):
|
||||
@@ -18,7 +22,7 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
# Does CARD MAKER 1.35 work this far up?
|
||||
user_data["lastDataVersion"] = "2.20.00"
|
||||
return user_data
|
||||
|
||||
|
||||
async def handle_get_user_c_mission_api_request(self, data: Dict) -> Dict:
|
||||
user_id = data["userId"]
|
||||
mission_id = data["missionId"]
|
||||
@@ -28,7 +32,7 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
|
||||
mission_data = await self.data.item.get_cmission(user_id, mission_id)
|
||||
progress_data = await self.data.item.get_cmission_progress(user_id, mission_id)
|
||||
|
||||
|
||||
if mission_data and progress_data:
|
||||
point = mission_data["point"]
|
||||
|
||||
@@ -48,12 +52,14 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
"userCMissionProgressList": progress_list,
|
||||
}
|
||||
|
||||
async def handle_get_user_net_battle_ranking_info_api_request(self, data: Dict) -> Dict:
|
||||
async def handle_get_user_net_battle_ranking_info_api_request(
|
||||
self, data: Dict
|
||||
) -> Dict:
|
||||
user_id = data["userId"]
|
||||
|
||||
net_battle = {}
|
||||
net_battle_data = await self.data.profile.get_net_battle(user_id)
|
||||
|
||||
|
||||
if net_battle_data:
|
||||
net_battle = {
|
||||
"isRankUpChallengeFailed": net_battle_data["isRankUpChallengeFailed"],
|
||||
@@ -94,131 +100,135 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
# (event ID 14214) was imported into ARTEMiS, we disable the requirement
|
||||
# for this trophy.
|
||||
if 14214 in event_by_id:
|
||||
mission_in_progress_end_date = (event_by_id[14214]["startDate"] - timedelta(hours=2)).strftime(self.date_time_format)
|
||||
|
||||
conditions.extend([
|
||||
{
|
||||
"mapAreaId": 2206201, # BlythE ULTIMA
|
||||
"length": 1,
|
||||
# Obtain the trophy "MISSION in progress".
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6832,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": mission_in_progress_end_date,
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206202, # PRIVATE SERVICE ULTIMA
|
||||
"length": 1,
|
||||
# Obtain the trophy "MISSION in progress".
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6832,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": mission_in_progress_end_date,
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206203, # New York Back Raise
|
||||
"length": 1,
|
||||
# SS NightTheater's EXPERT chart and get the title
|
||||
# "今宵、劇場に映し出される景色とは――――。"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6833,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206204, # Spasmodic
|
||||
"length": 2,
|
||||
# - Get 1 miss on Random (any difficulty) and get the title "当たり待ち"
|
||||
# - Get 1 miss on 花たちに希望を (any difficulty) and get the title "花たちに希望を"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6834,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6835,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206205, # ΩΩPARTS
|
||||
"length": 2,
|
||||
# - S Sage EXPERT to get the title "マターリ進行キボンヌ"
|
||||
# - Equip this title and play cab-to-cab with another person with this title
|
||||
# to get "マターリしようよ". Disabled because it is difficult to play cab2cab
|
||||
# on data setups. A network operator may consider re-enabling it by uncommenting
|
||||
# the second condition.
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6836,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
# {
|
||||
# "type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
# "conditionId": 6837,
|
||||
# "logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
# "startDate": start_date,
|
||||
# "endDate": "2099-12-31 00:00:00.0",
|
||||
# },
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206206, # Blow My Mind
|
||||
"length": 1,
|
||||
# SS on CHAOS EXPERT, Hydra EXPERT, Surive EXPERT and Jakarta PROGRESSION EXPERT
|
||||
# to get the title "Can you hear me?"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6838,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206207, # VALLIS-NERIA
|
||||
"length": 6,
|
||||
# Finish the 6 other areas
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_AREA_CLEARED.value,
|
||||
"conditionId": x,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
}
|
||||
for x in range(2206201, 2206207)
|
||||
],
|
||||
},
|
||||
])
|
||||
|
||||
mission_in_progress_end_date = (
|
||||
event_by_id[14214]["startDate"] - timedelta(hours=2)
|
||||
).strftime(self.date_time_format)
|
||||
|
||||
conditions.extend(
|
||||
[
|
||||
{
|
||||
"mapAreaId": 2206201, # BlythE ULTIMA
|
||||
"length": 1,
|
||||
# Obtain the trophy "MISSION in progress".
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6832,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": mission_in_progress_end_date,
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206202, # PRIVATE SERVICE ULTIMA
|
||||
"length": 1,
|
||||
# Obtain the trophy "MISSION in progress".
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6832,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": mission_in_progress_end_date,
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206203, # New York Back Raise
|
||||
"length": 1,
|
||||
# SS NightTheater's EXPERT chart and get the title
|
||||
# "今宵、劇場に映し出される景色とは――――。"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6833,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206204, # Spasmodic
|
||||
"length": 2,
|
||||
# - Get 1 miss on Random (any difficulty) and get the title "当たり待ち"
|
||||
# - Get 1 miss on 花たちに希望を (any difficulty) and get the title "花たちに希望を"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6834,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6835,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206205, # ΩΩPARTS
|
||||
"length": 2,
|
||||
# - S Sage EXPERT to get the title "マターリ進行キボンヌ"
|
||||
# - Equip this title and play cab-to-cab with another person with this title
|
||||
# to get "マターリしようよ". Disabled because it is difficult to play cab2cab
|
||||
# on data setups. A network operator may consider re-enabling it by uncommenting
|
||||
# the second condition.
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6836,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
# {
|
||||
# "type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
# "conditionId": 6837,
|
||||
# "logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
# "startDate": start_date,
|
||||
# "endDate": "2099-12-31 00:00:00.0",
|
||||
# },
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206206, # Blow My Mind
|
||||
"length": 1,
|
||||
# SS on CHAOS EXPERT, Hydra EXPERT, Surive EXPERT and Jakarta PROGRESSION EXPERT
|
||||
# to get the title "Can you hear me?"
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": 6838,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"mapAreaId": 2206207, # VALLIS-NERIA
|
||||
"length": 6,
|
||||
# Finish the 6 other areas
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_AREA_CLEARED.value,
|
||||
"conditionId": x,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
}
|
||||
for x in range(2206201, 2206207)
|
||||
],
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
# LUMINOUS ep. I
|
||||
if 14005 in event_by_id:
|
||||
start_date = event_by_id[14005]["startDate"].strftime(self.date_time_format)
|
||||
@@ -226,7 +236,7 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
if not mystic_area_1_added:
|
||||
conditions.append(mystic_area_1_conditions)
|
||||
mystic_area_1_added = True
|
||||
|
||||
|
||||
mystic_area_1_conditions["length"] += 1
|
||||
mystic_area_1_conditions["mapAreaConditionList"].append(
|
||||
{
|
||||
@@ -254,15 +264,15 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# LUMINOUS ep. II
|
||||
if 14251 in event_by_id:
|
||||
start_date = event_by_id[14251]["startDate"].strftime(self.date_time_format)
|
||||
|
||||
|
||||
if not mystic_area_1_added:
|
||||
conditions.append(mystic_area_1_conditions)
|
||||
mystic_area_1_added = True
|
||||
|
||||
|
||||
mystic_area_1_conditions["length"] += 1
|
||||
mystic_area_1_conditions["mapAreaConditionList"].append(
|
||||
{
|
||||
@@ -291,6 +301,203 @@ class ChuniLuminous(ChuniSunPlus):
|
||||
}
|
||||
)
|
||||
|
||||
# LUMINOUS ep. III
|
||||
if 14481 in event_by_id:
|
||||
start_date = event_by_id[14481]["startDate"].strftime(self.date_time_format)
|
||||
|
||||
if not mystic_area_1_added:
|
||||
conditions.append(mystic_area_1_conditions)
|
||||
mystic_area_1_added = True
|
||||
|
||||
mystic_area_1_conditions["length"] += 1
|
||||
mystic_area_1_conditions["mapAreaConditionList"].append(
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_CLEARED.value,
|
||||
"conditionId": 3020703,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.OR.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
}
|
||||
)
|
||||
|
||||
conditions.append(
|
||||
{
|
||||
"mapAreaId": 3229304, # Mystic Rainbow of LUMINOUS Area 4,
|
||||
"length": 1,
|
||||
# Unlocks when LUMINOUS ep. III is completed.
|
||||
"mapAreaConditionList": [
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_CLEARED.value,
|
||||
"conditionId": 3020703,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date,
|
||||
"endDate": "2099-12-31 00:00:00.0",
|
||||
},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# 1UM1N0U5 ep. 111
|
||||
if 14483 in event_by_id:
|
||||
start_date = event_by_id[14483]["startDate"].replace(
|
||||
hour=0, minute=0, second=0
|
||||
)
|
||||
|
||||
# conditions to unlock the 6 "Key of ..." area in the map
|
||||
# for the first 14 days: Defandour MASTER AJ, crazy (about you) MASTER AJ, Halcyon ULTIMA SSS
|
||||
title_conditions = [
|
||||
{
|
||||
"type": MapAreaConditionType.ALL_JUSTICE.value,
|
||||
"conditionId": 258103, # Defandour MASTER
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": (
|
||||
start_date + timedelta(days=14) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format),
|
||||
},
|
||||
{
|
||||
"type": MapAreaConditionType.ALL_JUSTICE.value,
|
||||
"conditionId": 258003, # crazy (about you) MASTER
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": (
|
||||
start_date + timedelta(days=14) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format),
|
||||
},
|
||||
{
|
||||
"type": MapAreaConditionType.RANK_SSS.value,
|
||||
"conditionId": 17304, # Halcyon ULTIMA
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": (
|
||||
start_date + timedelta(days=14) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format),
|
||||
},
|
||||
]
|
||||
|
||||
# For each next 14 days, the conditions are lowered to SS+, S+, S, and then always unlocked
|
||||
for i, typ in enumerate(
|
||||
[
|
||||
MapAreaConditionType.RANK_SSP.value,
|
||||
MapAreaConditionType.RANK_SP.value,
|
||||
MapAreaConditionType.RANK_S.value,
|
||||
MapAreaConditionType.ALWAYS_UNLOCKED.value,
|
||||
]
|
||||
):
|
||||
start = (start_date + timedelta(days=14 * (i + 1))).strftime(
|
||||
self.date_time_format
|
||||
)
|
||||
|
||||
if typ != MapAreaConditionType.ALWAYS_UNLOCKED.value:
|
||||
end = (
|
||||
start_date + timedelta(days=14 * (i + 2)) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format)
|
||||
|
||||
title_conditions.extend(
|
||||
[
|
||||
{
|
||||
"type": typ,
|
||||
"conditionId": condition_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start,
|
||||
"endDate": end,
|
||||
}
|
||||
for condition_id in {17304, 258003, 258103}
|
||||
]
|
||||
)
|
||||
else:
|
||||
end = "2099-12-31 00:00:00"
|
||||
|
||||
title_conditions.append(
|
||||
{
|
||||
"type": typ,
|
||||
"conditionId": 0,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start,
|
||||
"endDate": end,
|
||||
}
|
||||
)
|
||||
|
||||
# actually add all the conditions
|
||||
for map_area_id in range(3229201, 3229207):
|
||||
conditions.append(
|
||||
{
|
||||
"mapAreaId": map_area_id,
|
||||
"length": len(title_conditions),
|
||||
"mapAreaConditionList": title_conditions,
|
||||
}
|
||||
)
|
||||
|
||||
# Ultimate Force
|
||||
# For the first 14 days, the condition is to obtain all 9 "Key of ..." titles
|
||||
# Afterwards, the condition is the 6 "Key of ..." titles that you can obtain
|
||||
# by playing the 6 areas, as well as obtaining specific ranks on
|
||||
# [CRYSTAL_ACCESS] / Strange Love / βlαnoir
|
||||
ultimate_force_conditions = []
|
||||
|
||||
# Trophies obtained by playing the 6 areas
|
||||
for trophy_id in {6851, 6853, 6855, 6857, 6858, 6860}:
|
||||
ultimate_force_conditions.append(
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": trophy_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": "2099-12-31 00:00:00",
|
||||
}
|
||||
)
|
||||
|
||||
# βlαnoir MASTER SSS+ / Strange Love MASTER SSS+ / [CRYSTAL_ACCESS] MASTER SSS+
|
||||
for trophy_id in {6852, 6854, 6856}:
|
||||
ultimate_force_conditions.append(
|
||||
{
|
||||
"type": MapAreaConditionType.TROPHY_OBTAINED.value,
|
||||
"conditionId": trophy_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": (
|
||||
start_date + timedelta(days=14) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format),
|
||||
}
|
||||
)
|
||||
|
||||
# For each next 14 days, the rank conditions for the 3 songs lowers
|
||||
# Finally, the Ultimate Force area is unlocked as soon as you finish the 6 other areas.
|
||||
for i, typ in enumerate(
|
||||
[
|
||||
MapAreaConditionType.RANK_SSS.value,
|
||||
MapAreaConditionType.RANK_SS.value,
|
||||
MapAreaConditionType.RANK_S.value,
|
||||
]
|
||||
):
|
||||
start = (start_date + timedelta(days=14 * (i + 1))).strftime(
|
||||
self.date_time_format
|
||||
)
|
||||
|
||||
end = (
|
||||
start_date + timedelta(days=14 * (i + 2)) - timedelta(seconds=1)
|
||||
).strftime(self.date_time_format)
|
||||
|
||||
ultimate_force_conditions.extend(
|
||||
[
|
||||
{
|
||||
"type": typ,
|
||||
"conditionId": condition_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start,
|
||||
"endDate": end,
|
||||
}
|
||||
for condition_id in {109403, 212103, 244203}
|
||||
]
|
||||
)
|
||||
|
||||
conditions.append(
|
||||
{
|
||||
"mapAreaId": 3229207,
|
||||
"length": len(ultimate_force_conditions),
|
||||
"mapAreaConditionList": ultimate_force_conditions,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"length": len(conditions),
|
||||
|
||||
170
titles/chuni/luminousplus.py
Normal file
@@ -0,0 +1,170 @@
|
||||
from datetime import timedelta
|
||||
from typing import Dict
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.config import ChuniConfig
|
||||
from titles.chuni.const import ChuniConstants, MapAreaConditionLogicalOperator, MapAreaConditionType
|
||||
from titles.chuni.luminous import ChuniLuminous
|
||||
|
||||
|
||||
class ChuniLuminousPlus(ChuniLuminous):
|
||||
def __init__(self, core_cfg: CoreConfig, game_cfg: ChuniConfig) -> None:
|
||||
super().__init__(core_cfg, game_cfg)
|
||||
self.version = ChuniConstants.VER_CHUNITHM_LUMINOUS_PLUS
|
||||
|
||||
async def handle_cm_get_user_preview_api_request(self, data: Dict) -> Dict:
|
||||
user_data = await super().handle_cm_get_user_preview_api_request(data)
|
||||
|
||||
# Does CARD MAKER 1.35 work this far up?
|
||||
user_data["lastDataVersion"] = "2.25.00"
|
||||
return user_data
|
||||
|
||||
async def handle_get_user_c_mission_list_api_request(self, data: Dict) -> Dict:
|
||||
user_id = int(data["userId"])
|
||||
user_mission_list_request = data["userCMissionList"]
|
||||
|
||||
user_mission_list = []
|
||||
|
||||
for request in user_mission_list_request:
|
||||
user_id = int(request["userId"])
|
||||
mission_id = int(request["missionId"])
|
||||
point = int(request["point"])
|
||||
|
||||
mission_data = await self.data.item.get_cmission(user_id, mission_id)
|
||||
progress_data = await self.data.item.get_cmission_progress(user_id, mission_id)
|
||||
|
||||
if mission_data is None or progress_data is None:
|
||||
continue
|
||||
|
||||
point = mission_data.point
|
||||
user_mission_progress_list = [
|
||||
{
|
||||
"order": progress.order,
|
||||
"stage": progress.stage,
|
||||
"progress": progress.progress,
|
||||
}
|
||||
for progress in progress_data
|
||||
]
|
||||
|
||||
user_mission_list.append(
|
||||
{
|
||||
"userId": user_id,
|
||||
"missionId": mission_id,
|
||||
"point": point,
|
||||
"userCMissionProgressList": user_mission_progress_list,
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"userId": user_id,
|
||||
"userCMissionList": user_mission_list,
|
||||
}
|
||||
|
||||
async def handle_get_game_map_area_condition_api_request(self, data: Dict) -> Dict:
|
||||
# There is no game data for this, everything is server side.
|
||||
# However, we can selectively show/hide events as data is imported into the server.
|
||||
events = await self.data.static.get_enabled_events(self.version)
|
||||
event_by_id = {evt["eventId"]: evt for evt in events}
|
||||
conditions = []
|
||||
|
||||
# LUMINOUS ep. Ascension
|
||||
if ep_ascension := event_by_id.get(15512):
|
||||
start_date = ep_ascension["startDate"].replace(hour=0, minute=0, second=0)
|
||||
|
||||
# Finish LUMINOUS ep. VII to unlock LUMINOUS ep. Ascension.
|
||||
task_track_map_conditions = [
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_CLEARED.value,
|
||||
"conditionId": 3020707,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start_date.strftime(self.date_time_format),
|
||||
"endDate": "2099-12-31 00:00:00",
|
||||
}
|
||||
]
|
||||
|
||||
# You also need to reach a specific rank on Acid God MASTER.
|
||||
# This condition lowers every 7 days.
|
||||
# After the first 4 weeks, you only need to finish ep. VII.
|
||||
for i, typ in enumerate([
|
||||
MapAreaConditionType.RANK_SSSP.value,
|
||||
MapAreaConditionType.RANK_SSS.value,
|
||||
MapAreaConditionType.RANK_SS.value,
|
||||
MapAreaConditionType.RANK_S.value,
|
||||
]):
|
||||
start = start_date + timedelta(days=7 * i)
|
||||
end = start_date + timedelta(days=7 * (i + 1)) - timedelta(seconds=1)
|
||||
|
||||
task_track_map_conditions.append(
|
||||
{
|
||||
"type": typ,
|
||||
"conditionId": 265103,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start.strftime(self.date_time_format),
|
||||
"endDate": end.strftime(self.date_time_format),
|
||||
}
|
||||
)
|
||||
|
||||
conditions.extend(
|
||||
[
|
||||
{
|
||||
"mapAreaId": map_area_id,
|
||||
"length": len(task_track_map_conditions),
|
||||
"mapAreaConditionList": task_track_map_conditions,
|
||||
}
|
||||
for map_area_id in {3220801, 3220802, 3220803, 3220804}
|
||||
]
|
||||
)
|
||||
|
||||
# To unlock the final map area (Forsaken Tale), achieve a specific rank
|
||||
# on the 4 task tracks in the previous map areas. This condition also lowers
|
||||
# every 7 days, similar to Acid God.
|
||||
# After 28 days, you only need to finish the other 4 areas in ep. Ascension.
|
||||
forsaken_tale_conditions = []
|
||||
|
||||
for i, typ in enumerate([
|
||||
MapAreaConditionType.RANK_SSSP.value,
|
||||
MapAreaConditionType.RANK_SSS.value,
|
||||
MapAreaConditionType.RANK_SS.value,
|
||||
MapAreaConditionType.RANK_S.value,
|
||||
]):
|
||||
start = start_date + timedelta(days=7 * i)
|
||||
end = start_date + timedelta(days=7 * (i + 1)) - timedelta(seconds=1)
|
||||
|
||||
forsaken_tale_conditions.extend(
|
||||
[
|
||||
{
|
||||
"type": typ,
|
||||
"conditionId": condition_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": start.strftime(self.date_time_format),
|
||||
"endDate": end.strftime(self.date_time_format),
|
||||
}
|
||||
for condition_id in {98203, 108603, 247503, 233903}
|
||||
]
|
||||
)
|
||||
|
||||
forsaken_tale_conditions.extend(
|
||||
[
|
||||
{
|
||||
"type": MapAreaConditionType.MAP_AREA_CLEARED.value,
|
||||
"conditionId": map_area_id,
|
||||
"logicalOpe": MapAreaConditionLogicalOperator.AND.value,
|
||||
"startDate": (start_date + timedelta(days=28)).strftime(self.date_time_format),
|
||||
"endDate": "2099-12-31 00:00:00",
|
||||
}
|
||||
for map_area_id in {3220801, 3220802, 3220803, 3220804}
|
||||
]
|
||||
)
|
||||
|
||||
conditions.append(
|
||||
{
|
||||
"mapAreaId": 3220805,
|
||||
"length": len(forsaken_tale_conditions),
|
||||
"mapAreaConditionList": forsaken_tale_conditions,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"length": len(conditions),
|
||||
"gameMapAreaConditionList": conditions,
|
||||
}
|
||||
@@ -4,12 +4,14 @@ from random import randint
|
||||
from typing import Dict
|
||||
|
||||
import pytz
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.utils import Utils
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.base import ChuniBase
|
||||
from titles.chuni.config import ChuniConfig
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.database import ChuniData
|
||||
|
||||
|
||||
class ChuniNew(ChuniBase):
|
||||
ITEM_TYPE = {"character": 20, "story": 21, "card": 22}
|
||||
@@ -34,6 +36,8 @@ class ChuniNew(ChuniBase):
|
||||
return "215"
|
||||
if self.version == ChuniConstants.VER_CHUNITHM_LUMINOUS:
|
||||
return "220"
|
||||
if self.version == ChuniConstants.VER_CHUNITHM_LUMINOUS_PLUS:
|
||||
return "225"
|
||||
|
||||
async def handle_get_game_setting_api_request(self, data: Dict) -> Dict:
|
||||
# use UTC time and convert it to JST time by adding +9
|
||||
@@ -104,7 +108,8 @@ class ChuniNew(ChuniBase):
|
||||
return {"returnCode": "1"}
|
||||
|
||||
async def handle_get_user_map_area_api_request(self, data: Dict) -> Dict:
|
||||
user_map_areas = await self.data.item.get_map_areas(data["userId"])
|
||||
map_area_ids = [int(area["mapAreaId"]) for area in data["mapAreaIdList"]]
|
||||
user_map_areas = await self.data.item.get_map_areas(data["userId"], map_area_ids)
|
||||
|
||||
map_areas = []
|
||||
for map_area in user_map_areas:
|
||||
@@ -284,35 +289,37 @@ class ChuniNew(ChuniBase):
|
||||
}
|
||||
|
||||
async def handle_get_user_printed_card_api_request(self, data: Dict) -> Dict:
|
||||
user_print_list = await self.data.item.get_user_print_states(
|
||||
data["userId"], has_completed=True
|
||||
user_id = int(data["userId"])
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
rows = await self.data.item.get_user_print_states(
|
||||
user_id,
|
||||
has_completed=True,
|
||||
limit=max_ct + 1,
|
||||
offset=next_idx,
|
||||
)
|
||||
if user_print_list is None:
|
||||
if rows is None or len(rows) == 0:
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": 0,
|
||||
"nextIndex": -1,
|
||||
"userPrintedCardList": [],
|
||||
}
|
||||
|
||||
print_list = []
|
||||
next_idx = int(data["nextIndex"])
|
||||
max_ct = int(data["maxCount"])
|
||||
|
||||
for x in range(next_idx, len(user_print_list)):
|
||||
tmp = user_print_list[x]._asdict()
|
||||
for row in rows[:max_ct]:
|
||||
tmp = row._asdict()
|
||||
print_list.append(tmp["cardId"])
|
||||
|
||||
if len(print_list) >= max_ct:
|
||||
break
|
||||
|
||||
if len(print_list) >= max_ct:
|
||||
next_idx = next_idx + max_ct
|
||||
if len(rows) > max_ct:
|
||||
next_idx += max_ct
|
||||
else:
|
||||
next_idx = -1
|
||||
|
||||
return {
|
||||
"userId": data["userId"],
|
||||
"userId": user_id,
|
||||
"length": len(print_list),
|
||||
"nextIndex": next_idx,
|
||||
"userPrintedCardList": print_list,
|
||||
|
||||
@@ -2,10 +2,13 @@ from typing import Optional
|
||||
from os import walk, path
|
||||
import xml.etree.ElementTree as ET
|
||||
from read import BaseReader
|
||||
from PIL import Image
|
||||
import configparser
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.database import ChuniData
|
||||
from titles.chuni.const import ChuniConstants
|
||||
from titles.chuni.schema.static import music as MusicTable
|
||||
|
||||
|
||||
class ChuniReader(BaseReader):
|
||||
@@ -35,16 +38,32 @@ class ChuniReader(BaseReader):
|
||||
|
||||
if self.opt_dir is not None:
|
||||
data_dirs += self.get_data_directories(self.opt_dir)
|
||||
|
||||
we_diff = "4"
|
||||
if self.version >= ChuniConstants.VER_CHUNITHM_NEW:
|
||||
we_diff = "5"
|
||||
|
||||
# character images could be stored anywhere across all the data dirs. Map them first
|
||||
self.logger.info(f"Mapping DDS image files...")
|
||||
dds_images = dict()
|
||||
for dir in data_dirs:
|
||||
self.map_dds_images(dds_images, f"{dir}/ddsImage")
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music")
|
||||
await self.read_charges(f"{dir}/chargeItem")
|
||||
await self.read_avatar(f"{dir}/avatarAccessory")
|
||||
await self.read_login_bonus(f"{dir}/")
|
||||
this_opt_id = await self.read_opt_info(dir) # this also treats A000 as an opt, which is intended
|
||||
await self.read_events(f"{dir}/event", this_opt_id)
|
||||
await self.read_music(f"{dir}/music", we_diff, this_opt_id)
|
||||
await self.read_charges(f"{dir}/chargeItem", this_opt_id)
|
||||
await self.read_avatar(f"{dir}/avatarAccessory", this_opt_id)
|
||||
await self.read_login_bonus(f"{dir}/", this_opt_id)
|
||||
await self.read_nameplate(f"{dir}/namePlate", this_opt_id)
|
||||
await self.read_trophy(f"{dir}/trophy", this_opt_id)
|
||||
await self.read_character(f"{dir}/chara", dds_images, this_opt_id)
|
||||
await self.read_map_icon(f"{dir}/mapIcon", this_opt_id)
|
||||
await self.read_system_voice(f"{dir}/systemVoice", this_opt_id)
|
||||
|
||||
async def read_login_bonus(self, root_dir: str) -> None:
|
||||
async def read_login_bonus(self, root_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/LoginBonusPreset.xml"):
|
||||
@@ -55,12 +74,11 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
is_enabled = (
|
||||
True if xml_root.find("disableFlag").text == "false" else False
|
||||
)
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_login_bonus_preset(
|
||||
self.version, id, name, is_enabled
|
||||
self.version, id, name, is_enabled, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -107,6 +125,7 @@ class ChuniReader(BaseReader):
|
||||
item_num,
|
||||
need_login_day_count,
|
||||
login_bonus_category_type,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -116,7 +135,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert login bonus {bonus_id}"
|
||||
)
|
||||
|
||||
async def read_events(self, evt_dir: str) -> None:
|
||||
async def read_events(self, evt_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(evt_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Event.xml"):
|
||||
@@ -131,14 +150,17 @@ class ChuniReader(BaseReader):
|
||||
event_type = substances.find("type").text
|
||||
|
||||
result = await self.data.static.put_event(
|
||||
self.version, id, event_type, name
|
||||
self.version, id, event_type, name, opt_id
|
||||
)
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted event {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert event {id}")
|
||||
|
||||
async def read_music(self, music_dir: str) -> None:
|
||||
async def read_music(self, music_dir: str, we_diff: str = "4", opt_id: Optional[int] = None) -> None:
|
||||
max_title_len = MusicTable.columns["title"].type.length
|
||||
max_artist_len = MusicTable.columns["artist"].type.length
|
||||
|
||||
for root, dirs, files in walk(music_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Music.xml"):
|
||||
@@ -149,9 +171,15 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
song_id = name.find("id").text
|
||||
title = name.find("str").text
|
||||
if len(title) > max_title_len:
|
||||
self.logger.warning(f"Truncating music {song_id} song title")
|
||||
title = title[:max_title_len]
|
||||
|
||||
for artistName in xml_root.findall("artistName"):
|
||||
artist = artistName.find("str").text
|
||||
if len(artist) > max_artist_len:
|
||||
self.logger.warning(f"Truncating music {song_id} artist name")
|
||||
artist = artist[:max_artist_len]
|
||||
|
||||
for genreNames in xml_root.findall("genreNames"):
|
||||
for list_ in genreNames.findall("list"):
|
||||
@@ -160,6 +188,8 @@ class ChuniReader(BaseReader):
|
||||
|
||||
for jaketFile in xml_root.findall("jaketFile"): # nice typo, SEGA
|
||||
jacket_path = jaketFile.find("path").text
|
||||
# Save off image for use in frontend
|
||||
self.copy_image(jacket_path, f"{root}/{dir}", "titles/chuni/img/jacket/")
|
||||
|
||||
for fumens in xml_root.findall("fumens"):
|
||||
for MusicFumenData in fumens.findall("MusicFumenData"):
|
||||
@@ -169,7 +199,7 @@ class ChuniReader(BaseReader):
|
||||
chart_type = MusicFumenData.find("type")
|
||||
chart_id = chart_type.find("id").text
|
||||
chart_diff = chart_type.find("str").text
|
||||
if chart_diff == "WorldsEnd" and (chart_id == "4" or chart_id == "5"): # 4 in SDBT, 5 in SDHD
|
||||
if chart_diff == "WorldsEnd" and chart_id == we_diff: # 4 in SDBT, 5 in SDHD
|
||||
level = float(xml_root.find("starDifType").text)
|
||||
we_chara = (
|
||||
xml_root.find("worldsEndTagName")
|
||||
@@ -192,6 +222,7 @@ class ChuniReader(BaseReader):
|
||||
genre,
|
||||
jacket_path,
|
||||
we_chara,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -203,7 +234,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert music {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
async def read_charges(self, charge_dir: str) -> None:
|
||||
async def read_charges(self, charge_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(charge_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
|
||||
@@ -225,6 +256,7 @@ class ChuniReader(BaseReader):
|
||||
expirationDays,
|
||||
consumeType,
|
||||
sellingAppeal,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -232,7 +264,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert charge {id}")
|
||||
|
||||
async def read_avatar(self, avatar_dir: str) -> None:
|
||||
async def read_avatar(self, avatar_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(avatar_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
|
||||
@@ -243,17 +275,260 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
category = xml_root.find("category").text
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
for image in xml_root.findall("image"):
|
||||
iconPath = image.find("path").text
|
||||
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/avatar/")
|
||||
for texture in xml_root.findall("texture"):
|
||||
texturePath = texture.find("path").text
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/avatar/")
|
||||
|
||||
result = await self.data.static.put_avatar(
|
||||
self.version, id, name, category, iconPath, texturePath
|
||||
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted avatarAccessory {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert avatarAccessory {id}")
|
||||
|
||||
async def read_nameplate(self, nameplate_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(nameplate_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/NamePlate.xml"):
|
||||
with open(f"{root}/{dir}/NamePlate.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
for image in xml_root.findall("image"):
|
||||
texturePath = image.find("path").text
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/nameplate/")
|
||||
|
||||
result = await self.data.static.put_nameplate(
|
||||
self.version, id, name, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted nameplate {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert nameplate {id}")
|
||||
|
||||
async def read_trophy(self, trophy_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(trophy_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Trophy.xml"):
|
||||
with open(f"{root}/{dir}/Trophy.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
rareType = xml_root.find("rareType").text
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
|
||||
result = await self.data.static.put_trophy(
|
||||
self.version, id, name, rareType, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted trophy {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert trophy {id}")
|
||||
|
||||
async def read_character(self, chara_dir: str, dds_images: dict, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(chara_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Chara.xml"):
|
||||
with open(f"{root}/{dir}/Chara.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
# ET may choke if there is a & symbol (which is present in some character xml)
|
||||
if "&" in strdata:
|
||||
strdata = strdata.replace("&", "&")
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for work in xml_root.findall("works"):
|
||||
worksName = work.find("str").text
|
||||
rareType = xml_root.find("rareType").text
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
# character images are not stored alongside
|
||||
for image in xml_root.findall("defaultImages"):
|
||||
imageKey = image.find("str").text
|
||||
if imageKey in dds_images.keys():
|
||||
(imageDir, imagePaths) = dds_images[imageKey]
|
||||
imagePath1 = imagePaths[0] if len(imagePaths) > 0 else ""
|
||||
imagePath2 = imagePaths[1] if len(imagePaths) > 1 else ""
|
||||
imagePath3 = imagePaths[2] if len(imagePaths) > 2 else ""
|
||||
# @note the third image is the image needed for the user box ui
|
||||
if imagePath3:
|
||||
self.copy_image(imagePath3, imageDir, "titles/chuni/img/character/")
|
||||
else:
|
||||
self.logger.warning(f"Character {id} only has {len(imagePaths)} images. Expected 3")
|
||||
else:
|
||||
self.logger.warning(f"Unable to location character {id} images")
|
||||
|
||||
result = await self.data.static.put_character(
|
||||
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted character {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert character {id}")
|
||||
|
||||
async def read_map_icon(self, mapicon_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(mapicon_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/MapIcon.xml"):
|
||||
with open(f"{root}/{dir}/MapIcon.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
iconPath = image.find("path").text
|
||||
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/mapIcon/")
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_map_icon(
|
||||
self.version, id, name, sortName, iconPath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted map icon {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to map icon {id}")
|
||||
|
||||
async def read_system_voice(self, voice_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(voice_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/SystemVoice.xml"):
|
||||
with open(f"{root}/{dir}/SystemVoice.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
imagePath = image.find("path").text
|
||||
self.copy_image(imagePath, f"{root}/{dir}", "titles/chuni/img/systemVoice/")
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_system_voice(
|
||||
self.version, id, name, sortName, imagePath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted system voice {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to system voice {id}")
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
if not path.exists(f"{directory}/data.conf"):
|
||||
self.logger.warning(f"{directory} does not contain data.conf, opt info will not be read")
|
||||
return None
|
||||
|
||||
data_config = configparser.ConfigParser()
|
||||
if not data_config.read(f"{directory}/data.conf", 'utf-8'):
|
||||
self.logger.warning(f"{directory}/data.conf failed to read or parse, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Version' not in data_config:
|
||||
self.logger.warning(f"{directory}/data.conf contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Name' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no Name item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMajor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMajor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMinor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMinor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerRelease' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerRelease item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_seq = data_config['Version']['VerRelease']
|
||||
opt_folder = path.basename(path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, opt_seq)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
self.logger.info(f"Opt folder {opt_folder} (Database ID {opt_id}) contains {data_config['Version']['Name']} v{data_config['Version']['VerMajor']}.{data_config['Version']['VerMinor']}.{opt_seq}")
|
||||
return opt_id
|
||||
|
||||
def copy_image(self, filename: str, src_dir: str, dst_dir: str) -> None:
|
||||
# Convert the image to png so we can easily display it in the frontend
|
||||
file_src = path.join(src_dir, filename)
|
||||
(basename, ext) = path.splitext(filename)
|
||||
file_dst = path.join(dst_dir, basename) + ".png"
|
||||
|
||||
if path.exists(file_src) and not path.exists(file_dst):
|
||||
try:
|
||||
im = Image.open(file_src)
|
||||
im.save(file_dst)
|
||||
except Exception:
|
||||
self.logger.warning(f"Failed to convert {filename} to png")
|
||||
|
||||
def map_dds_images(self, image_dict: dict, dds_dir: str) -> None:
|
||||
for root, dirs, files in walk(dds_dir):
|
||||
for dir in dirs:
|
||||
directory = f"{root}/{dir}"
|
||||
if path.exists(f"{directory}/DDSImage.xml"):
|
||||
with open(f"{directory}/DDSImage.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
name = name.find("str").text
|
||||
|
||||
images = []
|
||||
i = 0
|
||||
while xml_root.findall(f"ddsFile{i}"):
|
||||
for ddsFile in xml_root.findall(f"ddsFile{i}"):
|
||||
images += [ddsFile.find("path").text]
|
||||
i += 1
|
||||
|
||||
image_dict[name] = (directory, images)
|
||||
@@ -1,6 +1,6 @@
|
||||
from titles.chuni.schema.profile import ChuniProfileData
|
||||
from titles.chuni.schema.score import ChuniScoreData
|
||||
from titles.chuni.schema.score import ChuniScoreData, ChuniRomVersion
|
||||
from titles.chuni.schema.item import ChuniItemData
|
||||
from titles.chuni.schema.static import ChuniStaticData
|
||||
|
||||
__all__ = ["ChuniProfileData", "ChuniScoreData", "ChuniItemData", "ChuniStaticData"]
|
||||
__all__ = ["ChuniProfileData", "ChuniScoreData", "ChuniRomVersion", "ChuniItemData", "ChuniStaticData"]
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Table,
|
||||
Column,
|
||||
UniqueConstraint,
|
||||
PrimaryKeyConstraint,
|
||||
Table,
|
||||
UniqueConstraint,
|
||||
and_,
|
||||
delete,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.types import JSON, TIMESTAMP, Boolean, Integer, String
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
character = Table(
|
||||
character: Table = Table(
|
||||
"chuni_item_character",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -40,7 +40,7 @@ character = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
item = Table(
|
||||
item: Table = Table(
|
||||
"chuni_item_item",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -141,7 +141,7 @@ gacha = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
print_state = Table(
|
||||
print_state: Table = Table(
|
||||
"chuni_item_print_state",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -210,7 +210,7 @@ login_bonus = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
favorite = Table(
|
||||
favorite: Table = Table(
|
||||
"chuni_item_favorite",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -359,10 +359,34 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_all_favorites(
|
||||
self, user_id: int, version: int, fav_kind: int = 1
|
||||
) -> Optional[List[Row]]:
|
||||
async def is_favorite(
|
||||
self, user_id: int, version: int, fav_id: int, fav_kind: int = 1
|
||||
) -> bool:
|
||||
|
||||
sql = favorite.select(
|
||||
and_(
|
||||
favorite.c.version == version,
|
||||
favorite.c.user == user_id,
|
||||
favorite.c.favId == fav_id,
|
||||
favorite.c.favKind == fav_kind,
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return False
|
||||
|
||||
return True if len(result.all()) else False
|
||||
|
||||
async def get_all_favorites(
|
||||
self,
|
||||
user_id: int,
|
||||
version: int,
|
||||
fav_kind: int = 1,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(favorite).where(
|
||||
and_(
|
||||
favorite.c.version == version,
|
||||
favorite.c.user == user_id,
|
||||
@@ -370,6 +394,13 @@ class ChuniItemData(BaseData):
|
||||
)
|
||||
)
|
||||
|
||||
if limit is not None or offset is not None:
|
||||
sql = sql.order_by(favorite.c.id)
|
||||
if limit is not None:
|
||||
sql = sql.limit(limit)
|
||||
if offset is not None:
|
||||
sql = sql.offset(offset)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
@@ -421,6 +452,31 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_favorite_music(self, user_id: int, version: int, music_id: int) -> Optional[int]:
|
||||
sql = insert(favorite).values(user=user_id, version=version, favId=music_id, favKind=1)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(user=user_id, version=version, favId=music_id, favKind=1)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def delete_favorite_music(self, user_id: int, version: int, music_id: int) -> Optional[int]:
|
||||
sql = delete(favorite).where(
|
||||
and_(
|
||||
favorite.c.user==user_id,
|
||||
favorite.c.version==version,
|
||||
favorite.c.favId==music_id,
|
||||
favorite.c.favKind==1
|
||||
)
|
||||
)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def put_character(self, user_id: int, character_data: Dict) -> Optional[int]:
|
||||
character_data["user"] = user_id
|
||||
|
||||
@@ -444,9 +500,18 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_characters(self, user_id: int) -> Optional[List[Row]]:
|
||||
async def get_characters(
|
||||
self, user_id: int, limit: Optional[int] = None, offset: Optional[int] = None
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(character).where(character.c.user == user_id)
|
||||
|
||||
if limit is not None or offset is not None:
|
||||
sql = sql.order_by(character.c.id)
|
||||
if limit is not None:
|
||||
sql = sql.limit(limit)
|
||||
if offset is not None:
|
||||
sql = sql.offset(offset)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
@@ -465,13 +530,26 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_items(self, user_id: int, kind: int = None) -> Optional[List[Row]]:
|
||||
if kind is None:
|
||||
sql = select(item).where(item.c.user == user_id)
|
||||
else:
|
||||
sql = select(item).where(
|
||||
and_(item.c.user == user_id, item.c.itemKind == kind)
|
||||
)
|
||||
async def get_items(
|
||||
self,
|
||||
user_id: int,
|
||||
kind: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Optional[List[Row]]:
|
||||
cond = item.c.user == user_id
|
||||
|
||||
if kind is not None:
|
||||
cond &= item.c.itemKind == kind
|
||||
|
||||
sql = select(item).where(cond)
|
||||
|
||||
if limit is not None or offset is not None:
|
||||
sql = sql.order_by(item.c.id)
|
||||
if limit is not None:
|
||||
sql = sql.limit(limit)
|
||||
if offset is not None:
|
||||
sql = sql.offset(offset)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
@@ -533,8 +611,8 @@ class ChuniItemData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_map_areas(self, user_id: int) -> Optional[List[Row]]:
|
||||
sql = select(map_area).where(map_area.c.user == user_id)
|
||||
async def get_map_areas(self, user_id: int, map_area_ids: List[int]) -> Optional[List[Row]]:
|
||||
sql = select(map_area).where(map_area.c.user == user_id, map_area.c.mapAreaId.in_(map_area_ids))
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
@@ -565,15 +643,26 @@ class ChuniItemData(BaseData):
|
||||
return result.lastrowid
|
||||
|
||||
async def get_user_print_states(
|
||||
self, aime_id: int, has_completed: bool = False
|
||||
self,
|
||||
aime_id: int,
|
||||
has_completed: bool = False,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Optional[List[Row]]:
|
||||
sql = print_state.select(
|
||||
sql = select(print_state).where(
|
||||
and_(
|
||||
print_state.c.user == aime_id,
|
||||
print_state.c.hasCompleted == has_completed,
|
||||
)
|
||||
)
|
||||
|
||||
if limit is not None or offset is not None:
|
||||
sql = sql.order_by(print_state.c.id)
|
||||
if limit is not None:
|
||||
sql = sql.limit(limit)
|
||||
if offset is not None:
|
||||
sql = sql.offset(offset)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
@@ -35,13 +35,13 @@ profile = Table(
|
||||
Column("friendCount", Integer),
|
||||
Column("lastPlaceId", Integer),
|
||||
Column("nameplateId", Integer),
|
||||
Column("totalMapNum", Integer),
|
||||
Column("totalMapNum", BigInteger),
|
||||
Column("lastAllNetId", Integer),
|
||||
Column("lastClientId", String(25)),
|
||||
Column("lastPlayDate", String(25)),
|
||||
Column("lastRegionId", Integer),
|
||||
Column("playerRating", Integer),
|
||||
Column("totalHiScore", Integer),
|
||||
Column("totalHiScore", BigInteger),
|
||||
Column("webLimitDate", String(25)),
|
||||
Column("firstPlayDate", String(25)),
|
||||
Column("highestRating", Integer),
|
||||
@@ -59,12 +59,12 @@ profile = Table(
|
||||
Column("firstDataVersion", String(25)),
|
||||
Column("reincarnationNum", Integer),
|
||||
Column("playedTutorialBit", Integer),
|
||||
Column("totalBasicHighScore", Integer),
|
||||
Column("totalExpertHighScore", Integer),
|
||||
Column("totalMasterHighScore", Integer),
|
||||
Column("totalRepertoireCount", Integer),
|
||||
Column("totalBasicHighScore", BigInteger),
|
||||
Column("totalExpertHighScore", BigInteger),
|
||||
Column("totalMasterHighScore", BigInteger),
|
||||
Column("totalRepertoireCount", BigInteger),
|
||||
Column("firstTutorialCancelNum", Integer),
|
||||
Column("totalAdvancedHighScore", Integer),
|
||||
Column("totalAdvancedHighScore", BigInteger),
|
||||
Column("masterTutorialCancelNum", Integer),
|
||||
Column("ext1", Integer), # Added in chunew
|
||||
Column("ext2", Integer),
|
||||
@@ -111,7 +111,7 @@ profile = Table(
|
||||
Column("classEmblemBase", Integer, server_default="0"),
|
||||
Column("battleRankPoint", Integer, server_default="0"),
|
||||
Column("netBattle2ndCount", Integer, server_default="0"),
|
||||
Column("totalUltimaHighScore", Integer, server_default="0"),
|
||||
Column("totalUltimaHighScore", BigInteger, server_default="0"),
|
||||
Column("skillId", Integer, server_default="0"),
|
||||
Column("lastCountryCode", String(5), server_default="JPN"),
|
||||
Column("isNetBattleHost", Boolean, server_default="0"),
|
||||
@@ -439,6 +439,58 @@ class ChuniProfileData(BaseData):
|
||||
return False
|
||||
return True
|
||||
|
||||
async def update_map_icon(self, user_id: int, version: int, new_map_icon: int) -> bool:
|
||||
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
|
||||
mapIconId=new_map_icon
|
||||
)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to set user {user_id} map icon")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def update_system_voice(self, user_id: int, version: int, new_system_voice: int) -> bool:
|
||||
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
|
||||
voiceId=new_system_voice
|
||||
)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to set user {user_id} system voice")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def update_userbox(self, user_id: int, version: int, new_nameplate: int, new_trophy: int, new_character: int) -> bool:
|
||||
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
|
||||
nameplateId=new_nameplate,
|
||||
trophyId=new_trophy,
|
||||
charaIllustId=new_character
|
||||
)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to set user {user_id} userbox")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def update_avatar(self, user_id: int, version: int, new_wear: int, new_face: int, new_head: int, new_skin: int, new_item: int, new_front: int, new_back: int) -> bool:
|
||||
sql = profile.update((profile.c.user == user_id) & (profile.c.version == version)).values(
|
||||
avatarWear=new_wear,
|
||||
avatarFace=new_face,
|
||||
avatarHead=new_head,
|
||||
avatarSkin=new_skin,
|
||||
avatarItem=new_item,
|
||||
avatarFront=new_front,
|
||||
avatarBack=new_back
|
||||
)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to set user {user_id} avatar")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def put_profile_data(
|
||||
self, aime_id: int, version: int, profile_data: Dict
|
||||
) -> Optional[int]:
|
||||
@@ -713,7 +765,7 @@ class ChuniProfileData(BaseData):
|
||||
|
||||
existing_team = self.get_team_by_id(team_id)
|
||||
if existing_team is None or "userTeamPoint" not in existing_team:
|
||||
self.logger.warn(
|
||||
self.logger.warning(
|
||||
f"update_team: Failed to update team! team id: {team_id}. Existing team data not found."
|
||||
)
|
||||
return False
|
||||
@@ -743,7 +795,7 @@ class ChuniProfileData(BaseData):
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
self.logger.warning(
|
||||
f"update_team: Failed to update team! team id: {team_id}"
|
||||
)
|
||||
return False
|
||||
@@ -756,12 +808,13 @@ class ChuniProfileData(BaseData):
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_overview(self) -> Dict:
|
||||
# Fetch and add up all the playcounts
|
||||
playcount_sql = await self.execute(select(profile.c.playCount))
|
||||
|
||||
if playcount_sql is None:
|
||||
self.logger.warn(
|
||||
self.logger.warning(
|
||||
f"get_overview: Couldn't pull playcounts"
|
||||
)
|
||||
return 0
|
||||
@@ -790,7 +843,7 @@ class ChuniProfileData(BaseData):
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
self.logger.warn(
|
||||
self.logger.warning(
|
||||
f"put_profile_rating: Could not insert {rating_type}, aime_id: {aime_id}",
|
||||
)
|
||||
return
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, BigInteger
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func, select
|
||||
|
||||
from sqlalchemy import Column, Table, UniqueConstraint
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql.expression import exists
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.types import Boolean, Integer, String
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
course = Table(
|
||||
from ..config import ChuniConfig
|
||||
|
||||
course: Table = Table(
|
||||
"chuni_score_course",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -40,7 +42,7 @@ course = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
best_score = Table(
|
||||
best_score: Table = Table(
|
||||
"chuni_score_best",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
@@ -140,11 +142,116 @@ playlog = Table(
|
||||
mysql_charset="utf8mb4"
|
||||
)
|
||||
|
||||
class ChuniRomVersion():
|
||||
"""
|
||||
Class used to easily compare rom version strings and map back to the internal integer version.
|
||||
Used with methods that touch the playlog table.
|
||||
"""
|
||||
Versions = {}
|
||||
def init_versions(cfg: ChuniConfig):
|
||||
if len(ChuniRomVersion.Versions) > 0:
|
||||
# dont bother with reinit
|
||||
return
|
||||
|
||||
# Build up a easily comparible list of versions. Used when deriving romVersion from the playlog
|
||||
all_versions = {
|
||||
10: ChuniRomVersion("1.50.0"),
|
||||
9: ChuniRomVersion("1.45.0"),
|
||||
8: ChuniRomVersion("1.40.0"),
|
||||
7: ChuniRomVersion("1.35.0"),
|
||||
6: ChuniRomVersion("1.30.0"),
|
||||
5: ChuniRomVersion("1.25.0"),
|
||||
4: ChuniRomVersion("1.20.0"),
|
||||
3: ChuniRomVersion("1.15.0"),
|
||||
2: ChuniRomVersion("1.10.0"),
|
||||
1: ChuniRomVersion("1.05.0"),
|
||||
0: ChuniRomVersion("1.00.0")
|
||||
}
|
||||
|
||||
# add the versions from the config
|
||||
for ver in range(11,999):
|
||||
cfg_ver = cfg.version.version(ver)
|
||||
if cfg_ver:
|
||||
all_versions[ver] = ChuniRomVersion(cfg_ver["rom"])
|
||||
else:
|
||||
break
|
||||
|
||||
# sort it by version number for easy iteration
|
||||
ChuniRomVersion.Versions = dict(sorted(all_versions.items()))
|
||||
|
||||
def __init__(self, rom_version: Optional[str] = None) -> None:
|
||||
if rom_version is None:
|
||||
self.major = 0
|
||||
self.minor = 0
|
||||
self.maint = 0
|
||||
self.version = "0.00.00"
|
||||
return
|
||||
|
||||
(major, minor, maint) = rom_version.split('.')
|
||||
self.major = int(major)
|
||||
self.minor = int(minor)
|
||||
self.maint = int(maint)
|
||||
self.version = rom_version
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.version
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
return (self.major == other.major and
|
||||
self.minor == other.minor and
|
||||
self.maint == other.maint)
|
||||
|
||||
def __lt__(self, other) -> bool:
|
||||
return (self.major < other.major) or \
|
||||
(self.major == other.major and self.minor < other.minor) or \
|
||||
(self.major == other.major and self.minor == other.minor and self.maint < other.maint)
|
||||
|
||||
def __gt__(self, other) -> bool:
|
||||
return (self.major > other.major) or \
|
||||
(self.major == other.major and self.minor > other.minor) or \
|
||||
(self.major == other.major and self.minor == other.minor and self.maint > other.maint)
|
||||
|
||||
def get_int_version(self) -> int:
|
||||
"""
|
||||
Used when displaying the playlog to walk backwards from the recorded romVersion to our internal version number.
|
||||
This is effectively a workaround to avoid recording our internal version number along with the romVersion in the db at insert time.
|
||||
"""
|
||||
for ver,rom in ChuniRomVersion.Versions.items():
|
||||
# if the version matches exactly, great!
|
||||
if self == rom:
|
||||
return ver
|
||||
|
||||
# If this isnt the last version, use the next as an upper bound
|
||||
if ver + 1 < len(ChuniRomVersion.Versions):
|
||||
if self > rom and self < ChuniRomVersion.Versions[ver + 1]:
|
||||
# this version fits in the middle! It must be a revision of the version
|
||||
# e.g. 2.15.00 vs 2.16.00
|
||||
return ver
|
||||
else:
|
||||
# this is the last version in the list.
|
||||
# If its greate than this one and still the same major, this call it a match
|
||||
if self.major == rom.major and self > rom:
|
||||
return ver
|
||||
|
||||
# Only way we get here is if it was a version that started with "0." which is def invalid
|
||||
return -1
|
||||
|
||||
class ChuniScoreData(BaseData):
|
||||
async def get_courses(self, aime_id: int) -> Optional[Row]:
|
||||
async def get_courses(
|
||||
self,
|
||||
aime_id: int,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Optional[List[Row]]:
|
||||
sql = select(course).where(course.c.user == aime_id)
|
||||
|
||||
if limit is not None or offset is not None:
|
||||
sql = sql.order_by(course.c.id)
|
||||
if limit is not None:
|
||||
sql = sql.limit(limit)
|
||||
if offset is not None:
|
||||
sql = sql.offset(offset)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
@@ -162,8 +269,45 @@ class ChuniScoreData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_scores(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(best_score).where(best_score.c.user == aime_id)
|
||||
async def get_scores(
|
||||
self,
|
||||
aime_id: int,
|
||||
levels: Optional[list[int]] = None,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
) -> Optional[List[Row]]:
|
||||
condition = best_score.c.user == aime_id
|
||||
|
||||
if levels is not None:
|
||||
condition &= best_score.c.level.in_(levels)
|
||||
|
||||
if limit is None and offset is None:
|
||||
sql = (
|
||||
select(best_score)
|
||||
.where(condition)
|
||||
.order_by(best_score.c.musicId.asc(), best_score.c.level.asc())
|
||||
)
|
||||
else:
|
||||
subq = (
|
||||
select(best_score.c.musicId)
|
||||
.distinct()
|
||||
.where(condition)
|
||||
.order_by(best_score.c.musicId)
|
||||
)
|
||||
|
||||
if limit is not None:
|
||||
subq = subq.limit(limit)
|
||||
if offset is not None:
|
||||
subq = subq.offset(offset)
|
||||
|
||||
subq = subq.subquery()
|
||||
|
||||
sql = (
|
||||
select(best_score)
|
||||
.join(subq, best_score.c.musicId == subq.c.musicId)
|
||||
.where(condition)
|
||||
.order_by(best_score.c.musicId, best_score.c.level)
|
||||
)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
@@ -190,76 +334,86 @@ class ChuniScoreData(BaseData):
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_playlogs_limited(self, aime_id: int, index: int, count: int) -> Optional[Row]:
|
||||
sql = select(playlog).where(playlog.c.user == aime_id).order_by(playlog.c.id.desc()).limit(count).offset(index * count)
|
||||
async def get_playlog_rom_versions_by_int_version(self, version: int, aime_id: int = -1) -> Optional[str]:
|
||||
# Get a set of all romVersion values present
|
||||
sql = select([playlog.c.romVersion])
|
||||
if aime_id != -1:
|
||||
# limit results to a specific user
|
||||
sql = sql.where(playlog.c.user == aime_id)
|
||||
sql = sql.distinct()
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warning(f" aime_id {aime_id} has no playlog ")
|
||||
return None
|
||||
record_versions = result.fetchall()
|
||||
|
||||
# for each romVersion recorded, check if it maps back the current version we are operating on
|
||||
matching_rom_versions = []
|
||||
for v in record_versions:
|
||||
# Do this to prevent null romVersion from causing an error in ChuniRomVersion.__init__()
|
||||
if v[0] is None:
|
||||
continue
|
||||
|
||||
if ChuniRomVersion(v[0]).get_int_version() == version:
|
||||
matching_rom_versions += [v[0]]
|
||||
|
||||
self.logger.debug(f"romVersions {matching_rom_versions} map to version {version}")
|
||||
return matching_rom_versions
|
||||
|
||||
async def get_playlogs_limited(self, aime_id: int, version: int, index: int, count: int) -> Optional[Row]:
|
||||
# Get a list of all the recorded romVersions in the playlog
|
||||
# for this user that map to the given version.
|
||||
rom_versions = await self.get_playlog_rom_versions_by_int_version(version, aime_id)
|
||||
if rom_versions is None:
|
||||
return None
|
||||
|
||||
# Query results that have the matching romVersions
|
||||
sql = select(playlog).where((playlog.c.user == aime_id) & (playlog.c.romVersion.in_(rom_versions))).order_by(playlog.c.id.desc()).limit(count).offset(index * count)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.info(f" aime_id {aime_id} has no playlog for version {version}")
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_user_playlogs_count(self, aime_id: int) -> Optional[Row]:
|
||||
sql = select(func.count()).where(playlog.c.user == aime_id)
|
||||
async def get_user_playlogs_count(self, aime_id: int, version: int) -> Optional[Row]:
|
||||
# Get a list of all the recorded romVersions in the playlog
|
||||
# for this user that map to the given version.
|
||||
rom_versions = await self.get_playlog_rom_versions_by_int_version(version, aime_id)
|
||||
if rom_versions is None:
|
||||
return None
|
||||
|
||||
# Query results that have the matching romVersions
|
||||
sql = select(func.count()).where((playlog.c.user == aime_id) & (playlog.c.romVersion.in_(rom_versions)))
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
self.logger.warning(f" aime_id {aime_id} has no playlog ")
|
||||
return None
|
||||
self.logger.info(f" aime_id {aime_id} has no playlog for version {version}")
|
||||
return 0
|
||||
return result.scalar()
|
||||
|
||||
async def put_playlog(self, aime_id: int, playlog_data: Dict, version: int) -> Optional[int]:
|
||||
# Calculate the ROM version that should be inserted into the DB, based on the version of the ggame being inserted
|
||||
# We only need from Version 10 (Plost) and back, as newer versions include romVersion in their upsert
|
||||
# This matters both for gameRankings, as well as a future DB update to keep version data separate
|
||||
romVer = {
|
||||
10: "1.50.0",
|
||||
9: "1.45.0",
|
||||
8: "1.40.0",
|
||||
7: "1.35.0",
|
||||
6: "1.30.0",
|
||||
5: "1.25.0",
|
||||
4: "1.20.0",
|
||||
3: "1.15.0",
|
||||
2: "1.10.0",
|
||||
1: "1.05.0",
|
||||
0: "1.00.0"
|
||||
}
|
||||
|
||||
playlog_data["user"] = aime_id
|
||||
playlog_data = self.fix_bools(playlog_data)
|
||||
# If the romVersion is not in the data (Version 10 and earlier), look it up from our internal mapping
|
||||
if "romVersion" not in playlog_data:
|
||||
playlog_data["romVersion"] = romVer.get(version, "1.00.0")
|
||||
playlog_data["romVersion"] = ChuniRomVersion.Versions[version]
|
||||
|
||||
sql = insert(playlog).values(**playlog_data)
|
||||
conflict = sql.on_duplicate_key_update(**playlog_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_rankings(self, version: int) -> Optional[List[Dict]]:
|
||||
# Calculates the ROM version that should be fetched for rankings, based on the game version being retrieved
|
||||
# This prevents tracks that are not accessible in your version from counting towards the 10 results
|
||||
romVer = {
|
||||
15: "2.20%",
|
||||
14: "2.15%",
|
||||
13: "2.10%",
|
||||
12: "2.05%",
|
||||
11: "2.00%",
|
||||
10: "1.50%",
|
||||
9: "1.45%",
|
||||
8: "1.40%",
|
||||
7: "1.35%",
|
||||
6: "1.30%",
|
||||
5: "1.25%",
|
||||
4: "1.20%",
|
||||
3: "1.15%",
|
||||
2: "1.10%",
|
||||
1: "1.05%",
|
||||
0: "1.00%"
|
||||
}
|
||||
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.like(romVer.get(version, "%")))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
|
||||
# Get a list of all the recorded romVersions in the playlog for the given version
|
||||
rom_versions = await self.get_playlog_rom_versions_by_int_version(version)
|
||||
if rom_versions is None:
|
||||
return None
|
||||
|
||||
# Query results that have the matching romVersions
|
||||
sql = select([playlog.c.musicId.label('id'), func.count(playlog.c.musicId).label('point')]).where((playlog.c.level != 4) & (playlog.c.romVersion.in_(rom_versions))).group_by(playlog.c.musicId).order_by(func.count(playlog.c.musicId).desc()).limit(10)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
@@ -267,11 +421,3 @@ class ChuniScoreData(BaseData):
|
||||
|
||||
rows = result.fetchall()
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
async def get_rival_music(self, rival_id: int) -> Optional[List[Dict]]:
|
||||
sql = select(best_score).where(best_score.c.user == rival_id)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
@@ -7,16 +7,29 @@ from sqlalchemy import (
|
||||
PrimaryKeyConstraint,
|
||||
and_,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, VARCHAR, BOOLEAN
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from datetime import datetime
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
opts = Table(
|
||||
"chuni_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # VerRelease in data.conf
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="chuni_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
events = Table(
|
||||
"chuni_static_events",
|
||||
metadata,
|
||||
@@ -27,6 +40,7 @@ events = Table(
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", name="chuni_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -44,6 +58,7 @@ music = Table(
|
||||
Column("genre", String(255)),
|
||||
Column("jacketPath", String(255)),
|
||||
Column("worldsEndTag", String(7)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "songId", "chartId", name="chuni_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -59,6 +74,7 @@ charge = Table(
|
||||
Column("consumeType", Integer),
|
||||
Column("sellingAppeal", Boolean),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "chargeId", name="chuni_static_charge_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -73,10 +89,97 @@ avatar = Table(
|
||||
Column("category", Integer),
|
||||
Column("iconPath", String(255)),
|
||||
Column("texturePath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
nameplate = Table(
|
||||
"chuni_static_nameplate",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("nameplateId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("texturePath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "nameplateId", name="chuni_static_nameplate_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
character = Table(
|
||||
"chuni_static_character",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("characterId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("sortName", String(255)),
|
||||
Column("worksName", String(255)),
|
||||
Column("rareType", Integer),
|
||||
Column("imagePath1", String(255)),
|
||||
Column("imagePath2", String(255)),
|
||||
Column("imagePath3", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "characterId", name="chuni_static_character_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
trophy = Table(
|
||||
"chuni_static_trophy",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("trophyId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("rareType", Integer),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "trophyId", name="chuni_static_trophy_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
map_icon = Table(
|
||||
"chuni_static_map_icon",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("mapIconId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("sortName", String(255)),
|
||||
Column("iconPath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "mapIconId", name="chuni_static_mapicon_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
system_voice = Table(
|
||||
"chuni_static_system_voice",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("voiceId", Integer),
|
||||
Column("name", String(255)),
|
||||
Column("sortName", String(255)),
|
||||
Column("imagePath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "voiceId", name="chuni_static_systemvoice_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
gachas = Table(
|
||||
"chuni_static_gachas",
|
||||
metadata,
|
||||
@@ -94,6 +197,7 @@ gachas = Table(
|
||||
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "gachaId", "gachaName", name="chuni_static_gachas_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -114,6 +218,7 @@ cards = Table(
|
||||
Column("combo", Integer, nullable=False),
|
||||
Column("chain", Integer, nullable=False),
|
||||
Column("skillName", String(255), nullable=False),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", name="chuni_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -138,6 +243,7 @@ login_bonus_preset = Table(
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("presetName", String(255), nullable=False),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
PrimaryKeyConstraint(
|
||||
"presetId", "version", name="chuni_static_login_bonus_preset_pk"
|
||||
),
|
||||
@@ -157,6 +263,7 @@ login_bonus = Table(
|
||||
Column("itemNum", Integer, nullable=False),
|
||||
Column("needLoginDayCount", Integer, nullable=False),
|
||||
Column("loginBonusCategoryType", Integer, nullable=False),
|
||||
Column("opt", BIGINT),
|
||||
UniqueConstraint(
|
||||
"version", "presetId", "loginBonusId", name="chuni_static_login_bonus_uk"
|
||||
),
|
||||
@@ -170,10 +277,18 @@ login_bonus = Table(
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_1",
|
||||
),
|
||||
ForeignKeyConstraint(
|
||||
["opt"],
|
||||
[
|
||||
"chuni_static_opt.id",
|
||||
],
|
||||
onupdate="SET NULL",
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_2",
|
||||
),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class ChuniStaticData(BaseData):
|
||||
async def put_login_bonus(
|
||||
self,
|
||||
@@ -186,6 +301,7 @@ class ChuniStaticData(BaseData):
|
||||
item_num: int,
|
||||
need_login_day_count: int,
|
||||
login_bonus_category_type: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus).values(
|
||||
version=version,
|
||||
@@ -197,6 +313,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -205,6 +322,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -246,17 +364,19 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
async def put_login_bonus_preset(
|
||||
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
|
||||
self, version: int, preset_id: int, preset_name: str, isEnabled: bool, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus_preset).values(
|
||||
presetId=preset_id,
|
||||
version=version,
|
||||
presetName=preset_name,
|
||||
isEnabled=is_enabled,
|
||||
isEnabled=isEnabled,
|
||||
opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=is_enabled
|
||||
|
||||
# Chuni has a habbit of including duplicates in it's opt files, so only update opt if it's null
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=isEnabled, opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -265,12 +385,12 @@ class ChuniStaticData(BaseData):
|
||||
return result.lastrowid
|
||||
|
||||
async def get_login_bonus_presets(
|
||||
self, version: int, is_enabled: bool = True
|
||||
self, version: int, isEnabled: bool = True
|
||||
) -> Optional[List[Row]]:
|
||||
sql = login_bonus_preset.select(
|
||||
and_(
|
||||
login_bonus_preset.c.version == version,
|
||||
login_bonus_preset.c.isEnabled == is_enabled,
|
||||
login_bonus_preset.c.isEnabled == isEnabled,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -280,13 +400,13 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchall()
|
||||
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, type: int, name: str
|
||||
self, version: int, event_id: int, type: int, name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version=version, eventId=event_id, type=type, name=name
|
||||
version=version, eventId=event_id, type=type, name=name, opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
conflict = sql.on_duplicate_key_update(name=name, opt=coalesce(events.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -354,6 +474,7 @@ class ChuniStaticData(BaseData):
|
||||
genre: str,
|
||||
jacketPath: str,
|
||||
we_tag: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -365,6 +486,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -374,6 +496,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -389,6 +512,7 @@ class ChuniStaticData(BaseData):
|
||||
expiration_days: int,
|
||||
consume_type: int,
|
||||
selling_appeal: bool,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(charge).values(
|
||||
version=version,
|
||||
@@ -397,6 +521,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -404,6 +529,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -454,14 +580,13 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
async def get_song(self, music_id: int) -> Optional[Row]:
|
||||
sql = music.select(music.c.id == music_id)
|
||||
sql = music.select(music.c.songId == music_id)
|
||||
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
|
||||
async def put_avatar(
|
||||
self,
|
||||
version: int,
|
||||
@@ -470,6 +595,10 @@ class ChuniStaticData(BaseData):
|
||||
category: int,
|
||||
iconPath: str,
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(avatar).values(
|
||||
version=version,
|
||||
@@ -478,6 +607,10 @@ class ChuniStaticData(BaseData):
|
||||
category=category,
|
||||
iconPath=iconPath,
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -485,6 +618,10 @@ class ChuniStaticData(BaseData):
|
||||
category=category,
|
||||
iconPath=iconPath,
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -492,17 +629,274 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_avatar_items(self, version: int, category: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category) & (avatar.c.isEnabled)).order_by(avatar.c.sortName)
|
||||
else:
|
||||
sql = select(avatar).where((avatar.c.version == version) & (avatar.c.category == category)).order_by(avatar.c.sortName)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_nameplate(
|
||||
self,
|
||||
version: int,
|
||||
nameplateId: int,
|
||||
name: str,
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(nameplate).values(
|
||||
version=version,
|
||||
nameplateId=nameplateId,
|
||||
name=name,
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_nameplates(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(nameplate).where((nameplate.c.version == version) & (nameplate.c.isEnabled)).order_by(nameplate.c.sortName)
|
||||
else:
|
||||
sql = select(nameplate).where(nameplate.c.version == version).order_by(nameplate.c.sortName)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_trophy(
|
||||
self,
|
||||
version: int,
|
||||
trophyId: int,
|
||||
name: str,
|
||||
rareType: int,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(trophy).values(
|
||||
version=version,
|
||||
trophyId=trophyId,
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_trophies(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(trophy).where((trophy.c.version == version) & (trophy.c.isEnabled)).order_by(trophy.c.name)
|
||||
else:
|
||||
sql = select(trophy).where(trophy.c.version == version).order_by(trophy.c.name)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_map_icon(
|
||||
self,
|
||||
version: int,
|
||||
mapIconId: int,
|
||||
name: str,
|
||||
sortName: str,
|
||||
iconPath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(map_icon).values(
|
||||
version=version,
|
||||
mapIconId=mapIconId,
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_map_icons(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(map_icon).where((map_icon.c.version == version) & (map_icon.c.isEnabled)).order_by(map_icon.c.sortName)
|
||||
else:
|
||||
sql = select(map_icon).where(map_icon.c.version == version).order_by(map_icon.c.sortName)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_system_voice(
|
||||
self,
|
||||
version: int,
|
||||
voiceId: int,
|
||||
name: str,
|
||||
sortName: str,
|
||||
imagePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(system_voice).values(
|
||||
version=version,
|
||||
voiceId=voiceId,
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_system_voices(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(system_voice).where((system_voice.c.version == version) & (system_voice.c.isEnabled)).order_by(system_voice.c.sortName)
|
||||
else:
|
||||
sql = select(system_voice).where(system_voice.c.version == version).order_by(system_voice.c.sortName)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_character(
|
||||
self,
|
||||
version: int,
|
||||
characterId: int,
|
||||
name: str,
|
||||
sortName: str,
|
||||
worksName: str,
|
||||
rareType: int,
|
||||
imagePath1: str,
|
||||
imagePath2: str,
|
||||
imagePath3: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(character).values(
|
||||
version=version,
|
||||
characterId=characterId,
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
worksName=worksName,
|
||||
rareType=rareType,
|
||||
imagePath1=imagePath1,
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
sortName=sortName,
|
||||
worksName=worksName,
|
||||
rareType=rareType,
|
||||
imagePath1=imagePath1,
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_characters(self, version: int, enabled_only: bool = True) -> Optional[List[Dict]]:
|
||||
if enabled_only:
|
||||
sql = select(character).where((character.c.version == version) & (character.c.isEnabled)).order_by(character.c.sortName)
|
||||
else:
|
||||
sql = select(character).where(character.c.version == version).order_by(character.c.sortName)
|
||||
result = await self.execute(sql)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_gacha(
|
||||
self,
|
||||
version: int,
|
||||
gacha_id: int,
|
||||
gacha_name: int,
|
||||
opt_id: int = None,
|
||||
**gacha_data,
|
||||
) -> Optional[int]:
|
||||
sql = insert(gachas).values(
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -510,6 +904,7 @@ class ChuniStaticData(BaseData):
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -579,10 +974,10 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
async def put_card(self, version: int, card_id: int, opt_id: int = None,**card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -596,4 +991,86 @@ class ChuniStaticData(BaseData):
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
return result.fetchone()
|
||||
|
||||
async def put_opt(self, version: int, folder: str, sequence: int) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
309
titles/chuni/templates/chuni_avatar.jinja
Normal file
@@ -0,0 +1,309 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<style>
|
||||
{% include 'titles/chuni/templates/css/chuni_style.css' %}
|
||||
</style>
|
||||
|
||||
<div class="container">
|
||||
{% include 'titles/chuni/templates/chuni_header.jinja' %}
|
||||
|
||||
<!-- AVATAR PREVIEW -->
|
||||
<div class="row">
|
||||
<div class="col-lg-8 m-auto mt-3">
|
||||
<div class="card bg-card rounded">
|
||||
<table class="table-large table-rowdistinct">
|
||||
<caption align="top">AVATAR</caption>
|
||||
<tr><td style="height:340px; width:50%" rowspan=8>
|
||||
<img class="avatar-preview avatar-preview-platform" src="img/avatar-platform.png">
|
||||
<img id="preview1_back" class="avatar-preview avatar-preview-back" src="">
|
||||
<img id="preview1_skin" class="avatar-preview avatar-preview-skin-rightfoot" src="">
|
||||
<img id="preview2_skin" class="avatar-preview avatar-preview-skin-leftfoot" src="">
|
||||
<img id="preview3_skin" class="avatar-preview avatar-preview-skin-body" src="">
|
||||
<img id="preview1_wear" class="avatar-preview avatar-preview-wear" src="">
|
||||
<img class="avatar-preview avatar-preview-common" src="img/avatar-common.png">
|
||||
<img id="preview1_head" class="avatar-preview avatar-preview-head" src="">
|
||||
<img id="preview1_face" class="avatar-preview avatar-preview-face" src="">
|
||||
<img id="preview1_item" class="avatar-preview avatar-preview-item-righthand" src="">
|
||||
<img id="preview2_item" class="avatar-preview avatar-preview-item-lefthand" src="">
|
||||
<img id="preview1_front" class="avatar-preview avatar-preview-front" src="">
|
||||
</td>
|
||||
</tr>
|
||||
<tr><td>Wear:</td><td><div id="name_wear"></div></td></tr>
|
||||
<tr><td>Face:</td><td><div id="name_face"></div></td></tr>
|
||||
<tr><td>Head:</td><td><div id="name_head"></div></td></tr>
|
||||
<tr><td>Skin:</td><td><div id="name_skin"></div></td></tr>
|
||||
<tr><td>Item:</td><td><div id="name_item"></div></td></tr>
|
||||
<tr><td>Front:</td><td><div id="name_front"></div></td></tr>
|
||||
<tr><td>Back:</td><td><div id="name_back"></div></td></tr>
|
||||
|
||||
<tr><td colspan=3 style="padding:8px 0px; text-align: center;">
|
||||
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveAvatar()">SAVE</button>
|
||||
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetAvatar()">RESET</button>
|
||||
</td></tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- ACCESSORY SELECTION -->
|
||||
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
|
||||
|
||||
<!-- WEAR ACCESSORIES -->
|
||||
<button class="collapsible">Wear: {{ wears|length }}/{{ total_wears }} {{ "items" if total_wears > 1 else "item" }}</button>
|
||||
<div id="scrollable-wear" class="collapsible-content">
|
||||
{% for item in wears.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('wear', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="wear-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- FACE ACCESSORIES -->
|
||||
<button class="collapsible">Face: {{ faces|length }}/{{ total_faces }} {{ "items" if total_faces > 1 else "item" }}</button>
|
||||
<div id="scrollable-face" class="collapsible-content">
|
||||
{% for item in faces.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('face', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="face-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- HEAD ACCESSORIES -->
|
||||
<button class="collapsible">Head: {{ heads|length }}/{{ total_heads }} {{ "items" if total_heads > 1 else "item" }}</button>
|
||||
<div id="scrollable-head" class="collapsible-content">
|
||||
{% for item in heads.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('head', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="head-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- SKIN ACCESSORIES -->
|
||||
<button class="collapsible">Skin: {{ skins|length }}/{{ total_skins }} {{ "items" if total_skins > 1 else "item" }}</button>
|
||||
<div id="scrollable-skin" class="collapsible-content">
|
||||
{% for item in skins.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('skin', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="skin-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- ITEM ACCESSORIES -->
|
||||
<button class="collapsible">Item: {{ items|length }}/{{ total_items }} {{ "items" if total_items > 1 else "item" }}</button>
|
||||
<div id="scrollable-item" class="collapsible-content">
|
||||
{% for item in items.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('item', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="item-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- FRONT ACCESSORIES -->
|
||||
<button class="collapsible">Front: {{ fronts|length }}/{{ total_fronts }} {{ "items" if total_fronts > 1 else "item" }}</button>
|
||||
<div id="scrollable-front" class="collapsible-content">
|
||||
{% for item in fronts.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('front', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="front-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- BACK ACCESSORIES -->
|
||||
<button class="collapsible">Back: {{ backs|length }}/{{ total_backs }} {{ "items" if total_backs > 1 else "item" }}</button>
|
||||
<div id="scrollable-back" class="collapsible-content">
|
||||
{% for item in backs.values() %}
|
||||
<img id="{{ item["id"] }}" onclick="changeAccessory('back', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/avatar/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="back-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{% if error is defined %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if wears|length == 0 or faces|length == 0 or heads|length == 0 or skins|length == 0 or items|length == 0 or fronts|length == 0 or backs|length == 0 %}
|
||||
<script>
|
||||
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
|
||||
document.getElementById("name_wear").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
|
||||
</script>
|
||||
{% else %}
|
||||
<script>
|
||||
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
|
||||
|
||||
///
|
||||
/// This script handles all updates to the avatar
|
||||
///
|
||||
total_items = 0;
|
||||
orig_id = 1;
|
||||
orig_name = 2;
|
||||
orig_img = 3;
|
||||
curr_id = 4;
|
||||
curr_name = 5;
|
||||
curr_img = 6;
|
||||
accessories = {
|
||||
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
|
||||
"wear":["{{ wears|length }}",
|
||||
"{{ profile.avatarWear }}",
|
||||
"{{ wears[profile.avatarWear]["name"] }}",
|
||||
"{{ wears[profile.avatarWear]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"face":["{{ faces|length }}",
|
||||
"{{ profile.avatarFace }}",
|
||||
"{{ faces[profile.avatarFace]["name"] }}",
|
||||
"{{ faces[profile.avatarFace]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"head":["{{ heads|length }}",
|
||||
"{{ profile.avatarHead }}",
|
||||
"{{ heads[profile.avatarHead]["name"] }}",
|
||||
"{{ heads[profile.avatarHead]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"skin":["{{ skins|length }}",
|
||||
"{{ profile.avatarSkin }}",
|
||||
"{{ skins[profile.avatarSkin]["name"] }}",
|
||||
"{{ skins[profile.avatarSkin]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"item":["{{ items|length }}",
|
||||
"{{ profile.avatarItem }}",
|
||||
"{{ items[profile.avatarItem]["name"] }}",
|
||||
"{{ items[profile.avatarItem]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"front":["{{ fronts|length }}",
|
||||
"{{ profile.avatarFront }}",
|
||||
"{{ fronts[profile.avatarFront]["name"] }}",
|
||||
"{{ fronts[profile.avatarFront]["texturePath"] }}", "", "", "" ],
|
||||
|
||||
"back":["{{ backs|length }}",
|
||||
"{{ profile.avatarBack }}",
|
||||
"{{ backs[profile.avatarBack]["name"] }}",
|
||||
"{{ backs[profile.avatarBack]["texturePath"] }}", "", "", "" ]
|
||||
};
|
||||
types = Object.keys(accessories);
|
||||
|
||||
function enableButtons(enabled) {
|
||||
document.getElementById("reset-btn").disabled = !enabled;
|
||||
document.getElementById("save-btn").disabled = !enabled;
|
||||
}
|
||||
|
||||
function changeAccessory(type, id, name, img) {
|
||||
// clear select style for old accessory
|
||||
var element = document.getElementById(accessories[type][curr_id]);
|
||||
if (element) {
|
||||
element.style.backgroundColor="inherit";
|
||||
}
|
||||
|
||||
// set new accessory
|
||||
accessories[type][curr_id] = id;
|
||||
accessories[type][curr_name] = name;
|
||||
accessories[type][curr_img] = img;
|
||||
|
||||
// update select style for new accessory
|
||||
element = document.getElementById(id);
|
||||
if (element) {
|
||||
element.style.backgroundColor="#5F5";
|
||||
}
|
||||
|
||||
// Update the avatar preview and enable buttons
|
||||
updatePreview();
|
||||
if (id != accessories[type][orig_id]) {
|
||||
enableButtons(true);
|
||||
}
|
||||
}
|
||||
|
||||
function resetAvatar() {
|
||||
for (const type of types) {
|
||||
changeAccessory(type, accessories[type][orig_id], accessories[type][orig_name], accessories[type][orig_img]);
|
||||
}
|
||||
// disable the save/reset buttons until something changes
|
||||
enableButtons(false);
|
||||
}
|
||||
|
||||
function getRandomInt(min, max) {
|
||||
min = Math.ceil(min);
|
||||
max = Math.floor(max);
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
function updatePreview() {
|
||||
for (const type of types) {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
var img = document.getElementById("preview" + i + "_" + type);
|
||||
if (img) {
|
||||
img.src = "img/avatar/" + accessories[type][curr_img];
|
||||
}
|
||||
}
|
||||
document.getElementById("name_" + type).innerHTML = accessories[type][curr_name];
|
||||
}
|
||||
}
|
||||
|
||||
function saveAvatar() {
|
||||
$.post("/game/chuni/update.avatar", { wear: accessories["wear"][curr_id],
|
||||
face: accessories["face"][curr_id],
|
||||
head: accessories["head"][curr_id],
|
||||
skin: accessories["skin"][curr_id],
|
||||
item: accessories["item"][curr_id],
|
||||
front: accessories["front"][curr_id],
|
||||
back: accessories["back"][curr_id] })
|
||||
.done(function (data) {
|
||||
// set the current as the original and disable buttons
|
||||
for (const type of types) {
|
||||
accessories[type][orig_id] = accessories[type][curr_id];
|
||||
accessories[type][orig_name] = accessories[type][curr_name];
|
||||
accessories[type][orig_img] = accessories[type][curr_img];
|
||||
}
|
||||
enableButtons(false);
|
||||
})
|
||||
.fail(function () {
|
||||
alert("Failed to save avatar.");
|
||||
});
|
||||
}
|
||||
|
||||
function resizePage() {
|
||||
//
|
||||
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
|
||||
//
|
||||
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
|
||||
// when dealing with something like userbox characters where there are 1000s of possible items to
|
||||
// display. This approach gives us full control over where items in the div wrap, allowing us to try
|
||||
// to keep things presentable.
|
||||
//
|
||||
for (const type of types) {
|
||||
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
|
||||
|
||||
// Dont put fewer than 4 per row
|
||||
numPerRow = Math.max(numPerRow, 4);
|
||||
|
||||
// Dont populate more than 8 rows
|
||||
numPerRow = Math.max(numPerRow, Math.ceil(accessories[type][total_items] / 8));
|
||||
|
||||
// update the locations of the <br>
|
||||
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
|
||||
var spanBr = document.getElementById(type + "-br-" + i);
|
||||
if ( i % numPerRow == 0 ) {
|
||||
spanBr.innerHTML = "<br>";
|
||||
} else {
|
||||
spanBr.innerHTML = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
// update the max height for any currently visible containers
|
||||
Collapsibles.updateAllHeights();
|
||||
}
|
||||
resizePage();
|
||||
window.addEventListener('resize', resizePage);
|
||||
|
||||
// Set initial preview for current avatar
|
||||
resetAvatar();
|
||||
// Initialize scroll on all current accessories so we can see the selected ones
|
||||
for (const type of types) {
|
||||
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(accessories[type][curr_id]).offsetLeft;
|
||||
}
|
||||
|
||||
// Expand the first collapsible so the user can get the gist of it.
|
||||
Collapsibles.expandFirst();
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
{% endblock content %}
|
||||
61
titles/chuni/templates/chuni_favorites.jinja
Normal file
@@ -0,0 +1,61 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<style>
|
||||
{% include 'titles/chuni/templates/css/chuni_style.css' %}
|
||||
</style>
|
||||
<div class="container">
|
||||
{% include 'titles/chuni/templates/chuni_header.jinja' %}
|
||||
{% if favorites_by_genre is defined and favorites_by_genre is not none %}
|
||||
<div class="row">
|
||||
<h4 style="text-align: center;">Favorite Count: {{ favorites_count }}</h4>
|
||||
{% for key, genre in favorites_by_genre.items() %}
|
||||
<h2 style="text-align: center; padding-top: 32px">{{ key }}</h2>
|
||||
{% for favorite in genre %}
|
||||
<div class="col-lg-6 mt-3">
|
||||
<div class="card bg-card rounded card-hover">
|
||||
<div class="card-body row">
|
||||
<div class="col-3" style="text-align: center;">
|
||||
<img src="img/jacket/{{ favorite.jacket }}" width="100%">
|
||||
</div>
|
||||
<div class="col scrolling-text">
|
||||
<h5 class="card-text"> {{ favorite.title }} </h5>
|
||||
<br>
|
||||
<h6 class="card-text"> {{ favorite.artist }} </h6>
|
||||
<br><br>
|
||||
<div style="text-align: right;">
|
||||
<button onclick="removeFavorite({{ favorite.favId }})" class="btn btn-secondary btn-fav-remove">Remove</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.scrolling-text p, .scrolling-text h1, .scrolling-text h2, .scrolling-text h3, .scrolling-text h4, .scrolling-text h5, .scrolling-text h6').each(function () {
|
||||
var parentWidth = $(this).parent().width();
|
||||
var elementWidth = $(this).outerWidth();
|
||||
var elementWidthWithPadding = $(this).outerWidth(true);
|
||||
|
||||
if (elementWidthWithPadding > parentWidth) {
|
||||
$(this).addClass('scrolling');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Remove Favorite
|
||||
function removeFavorite(musicId) {
|
||||
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: 0 })
|
||||
.done(function (data) {
|
||||
location.reload();
|
||||
})
|
||||
.fail(function () {
|
||||
alert("Failed to remove favorite.");
|
||||
});
|
||||
}
|
||||
</script>
|
||||
{% endblock content %}
|
||||
@@ -1,11 +1,15 @@
|
||||
<div class="chuni-header">
|
||||
<h1>Chunithm</h1>
|
||||
<h1>{{ cur_version_name }}</h1>
|
||||
<ul class="chuni-navi">
|
||||
<li><a class="nav-link" href="/game/chuni">PROFILE</a></li>
|
||||
<li><a class="nav-link" href="/game/chuni/rating">RATING</a></li>
|
||||
<li><a class="nav-link" href="/game/chuni/playlog">RECORD</a></li>
|
||||
<li><a class="nav-link" href="/game/chuni/favorites">FAVORITES</a></li>
|
||||
<li><a class="nav-link" href="/game/chuni/musics">MUSICS</a></li>
|
||||
<li><a class="nav-link" href="/game/chuni/userbox">USER BOX</a></li>
|
||||
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
|
||||
<li><a class="nav-link" href="/game/chuni/avatar">AVATAR</a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
<script>
|
||||
@@ -17,8 +21,17 @@
|
||||
$('.nav-link[href="/game/chuni/playlog"]').addClass('active');
|
||||
} else if (currentPath.startsWith('/game/chuni/rating')) {
|
||||
$('.nav-link[href="/game/chuni/rating"]').addClass('active');
|
||||
} else if (currentPath.startsWith('/game/chuni/favorites')) {
|
||||
$('.nav-link[href="/game/chuni/favorites"]').addClass('active');
|
||||
} else if (currentPath.startsWith('/game/chuni/musics')) {
|
||||
$('.nav-link[href="/game/chuni/musics"]').addClass('active');
|
||||
} else if (currentPath.startsWith('/game/chuni/userbox')) {
|
||||
$('.nav-link[href="/game/chuni/userbox"]').addClass('active');
|
||||
}
|
||||
{% if cur_version >= 11 %} <!-- avatar config introduced in NEW!! -->
|
||||
else if (currentPath.startsWith('/game/chuni/avatar')) {
|
||||
$('.nav-link[href="/game/chuni/avatar"]').addClass('active');
|
||||
}
|
||||
{% endif %}
|
||||
});
|
||||
</script>
|
||||
@@ -69,9 +69,48 @@
|
||||
<td>Last Play Date:</td>
|
||||
<td>{{ profile.lastPlayDate }}</td>
|
||||
</tr>
|
||||
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
|
||||
<tr>
|
||||
<td>Map Icon:</td>
|
||||
<td><div id="map-icon-name">{{ map_icons[profile.mapIconId]["name"] if map_icons|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>System Voice:</td>
|
||||
<td><div id="system-voice-name">{{ system_voices[profile.voiceId]["name"] if system_voices|length > 0 else "Server DB needs upgraded or is not populated with necessary data" }}</div></td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
|
||||
<!-- MAP ICON SELECTION -->
|
||||
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
|
||||
<div class="card bg-card rounded">
|
||||
<button class="collapsible">Map Icon: {{ map_icons|length }}/{{ total_map_icons }}</button>
|
||||
<div id="scrollable-map-icon" class="collapsible-content">
|
||||
{% for item in map_icons.values() %}
|
||||
<img id="map-icon-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('map-icon', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/mapIcon/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="map-icon-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SYSTEM VOICE SELECTION -->
|
||||
<div class="col-lg-8 m-auto mt-3 scrolling-lists">
|
||||
<div class="card bg-card rounded">
|
||||
<button class="collapsible">System Voice: {{ system_voices|length }}/{{ total_system_voices }}</button>
|
||||
<div id="scrollable-system-voice" class="collapsible-content">
|
||||
{% for item in system_voices.values() %}
|
||||
<img id="system-voice-{{ item["id"] }}" style="padding: 8px 8px;" onclick="saveItem('system-voice', '{{ item["id"] }}', '{{ item["name"] }}')" src="img/systemVoice/{{ item["imagePath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="system-voice-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="col-lg-8 m-auto mt-3">
|
||||
<div class="card bg-card rounded">
|
||||
<table class="table-large table-rowdistinct">
|
||||
@@ -147,4 +186,93 @@
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
{% if cur_version >= 6 %} <!-- MAP ICON and SYSTEM VOICE introduced in AMAZON -->
|
||||
<script>
|
||||
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
|
||||
|
||||
///
|
||||
/// This script handles all updates to the map icon and system voice
|
||||
///
|
||||
total_items = 0;
|
||||
curr_id = 1;
|
||||
items = {
|
||||
// [total_items, curr_id]
|
||||
"map-icon": ["{{ map_icons|length }}", "{{ profile.mapIconId }}"],
|
||||
"system-voice":["{{ system_voices|length }}", "{{ profile.voiceId }}"]
|
||||
};
|
||||
types = Object.keys(items);
|
||||
|
||||
function changeItem(type, id, name) {
|
||||
// clear select style for old selection
|
||||
var element = document.getElementById(type + "-" + items[type][curr_id]);
|
||||
if (element) {
|
||||
element.style.backgroundColor="inherit";
|
||||
}
|
||||
|
||||
// set new item
|
||||
items[type][curr_id] = id;
|
||||
document.getElementById(type + "-name").innerHTML = name;
|
||||
|
||||
// update select style for new accessory
|
||||
element = document.getElementById(type + "-" + id);
|
||||
if (element) {
|
||||
element.style.backgroundColor="#5F5";
|
||||
}
|
||||
}
|
||||
|
||||
function saveItem(type, id, name) {
|
||||
$.post("/game/chuni/update." + type, { id: id })
|
||||
.done(function (data) {
|
||||
changeItem(type, id, name);
|
||||
})
|
||||
.fail(function () {
|
||||
alert("Failed to set " + type + " to " + name);
|
||||
});
|
||||
}
|
||||
|
||||
function resizePage() {
|
||||
//
|
||||
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
|
||||
//
|
||||
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
|
||||
// when dealing with something like userbox characters where there are 1000s of possible items being
|
||||
// display. This approach gives us full control over where items in the div wrap, allowing us to try
|
||||
// to keep things presentable.
|
||||
//
|
||||
for (const type of types) {
|
||||
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
|
||||
|
||||
// Dont put fewer than 4 per row
|
||||
numPerRow = Math.max(numPerRow, 4);
|
||||
|
||||
// Dont populate more than 6 rows
|
||||
numPerRow = Math.max(numPerRow, Math.ceil(items[type][total_items] / 6));
|
||||
|
||||
// update the locations of the <br>
|
||||
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
|
||||
var spanBr = document.getElementById(type + "-br-" + i);
|
||||
if ( i % numPerRow == 0 ) {
|
||||
spanBr.innerHTML = "<br>";
|
||||
} else {
|
||||
spanBr.innerHTML = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
// update the max height for any currently visible containers
|
||||
Collapsibles.updateAllHeights();
|
||||
}
|
||||
resizePage();
|
||||
window.addEventListener('resize', resizePage);
|
||||
|
||||
// Set initial style for current and scroll to selected
|
||||
for (const type of types) {
|
||||
changeItem(type, items[type][curr_id], document.getElementById(type + "-name").innerHTML);
|
||||
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + items[type][curr_id]).offsetLeft;
|
||||
}
|
||||
|
||||
Collapsibles.expandAll();
|
||||
</script>
|
||||
{% endif %}
|
||||
|
||||
{% endblock content %}
|
||||
@@ -7,25 +7,31 @@
|
||||
{% include 'titles/chuni/templates/chuni_header.jinja' %}
|
||||
{% if playlog is defined and playlog is not none %}
|
||||
<div class="row">
|
||||
<h4 style="text-align: center;">Playlog counts: {{ playlog_count }}</h4>
|
||||
<h4 style="text-align: center;">Playlog Count: {{ playlog_count }}</h4>
|
||||
{% set rankName = ['D', 'C', 'B', 'BB', 'BBB', 'A', 'AA', 'AAA', 'S', 'S+', 'SS', 'SS+', 'SSS', 'SSS+'] %}
|
||||
{% set difficultyName = ['normal', 'hard', 'expert', 'master', 'ultimate'] %}
|
||||
{% for record in playlog %}
|
||||
<div class="col-lg-6 mt-3">
|
||||
<div class="card bg-card rounded card-hover">
|
||||
<div class="card-header row">
|
||||
<div class="col-8 scrolling-text">
|
||||
<div class="col-auto fav" title="{{ ('Remove' if record.isFav else 'Add') + ' Favorite'}}">
|
||||
<h1><span id="{{ record.idx }}" class="fav {{ 'fav-set' if record.isFav else '' }}" onclick="updateFavorite({{ record.idx }}, {{ record.musicId }})">{{ '★' if record.isFav else '☆' }}</span>
|
||||
</div>
|
||||
<div class="col scrolling-text">
|
||||
<h5 class="card-text"> {{ record.title }} </h5>
|
||||
<br>
|
||||
<h6 class="card-text"> {{ record.artist }} </h6>
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<div class="col-auto">
|
||||
<h6 class="card-text">{{ record.raw.userPlayDate }}</h6>
|
||||
<h6 class="card-text">TRACK {{ record.raw.track }}</h6>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-body row">
|
||||
<div class="col-3" style="text-align: center;">
|
||||
<div class="col-sm" style="text-align: center;">
|
||||
<img src="../img/jacket/{{ record.jacket }}" width="100%">
|
||||
</div>
|
||||
<div class="col" style="text-align: center;">
|
||||
<h4 class="card-text">{{ record.raw.score }}</h4>
|
||||
<h2>{{ rankName[record.raw.rank] }}</h2>
|
||||
<h6
|
||||
@@ -33,10 +39,10 @@
|
||||
{{ difficultyName[record.raw.level] }}  {{ record.difficultyNum }}
|
||||
</h6>
|
||||
</div>
|
||||
<div class="col-6" style="text-align: center;">
|
||||
<div class="col-4" style="text-align: center;">
|
||||
<table class="table-small table-rowdistinc">
|
||||
<tr>
|
||||
<td>JUSTICE CRITIAL</td>
|
||||
<td>JUSTICE CRITICAL</td>
|
||||
<td>
|
||||
{{ record.raw.judgeCritical + record.raw.judgeHeaven }}
|
||||
</td>
|
||||
@@ -180,5 +186,23 @@
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Add/Remove Favorite
|
||||
function updateFavorite(elementId, musicId) {
|
||||
element = document.getElementById(elementId);
|
||||
isAdd = 1;
|
||||
if (element.classList.contains("fav-set"))
|
||||
{
|
||||
isAdd = 0;
|
||||
}
|
||||
|
||||
$.post("/game/chuni/update.favorite_music_favorites", { musicId: musicId, isAdd: isAdd })
|
||||
.done(function (data) {
|
||||
location.reload();
|
||||
})
|
||||
.fail(function () {
|
||||
alert("Failed to update favorite.");
|
||||
});
|
||||
}
|
||||
</script>
|
||||
{% endblock content %}
|
||||
262
titles/chuni/templates/chuni_userbox.jinja
Normal file
@@ -0,0 +1,262 @@
|
||||
{% extends "core/templates/index.jinja" %}
|
||||
{% block content %}
|
||||
<style>
|
||||
{% include 'titles/chuni/templates/css/chuni_style.css' %}
|
||||
</style>
|
||||
|
||||
<div class="container">
|
||||
{% include 'titles/chuni/templates/chuni_header.jinja' %}
|
||||
|
||||
<!-- USER BOX PREVIEW -->
|
||||
<div class="row">
|
||||
<div class="col-lg-8 m-auto mt-3">
|
||||
<div class="card bg-card rounded">
|
||||
<table class="table-large table-rowdistinct">
|
||||
<caption align="top">USER BOX</caption>
|
||||
<tr><td colspan=2 style="height:240px;">
|
||||
<!-- NAMEPLATE -->
|
||||
<img id="preview_nameplate" class="userbox userbox-nameplate" src="">
|
||||
|
||||
<!-- TEAM -->
|
||||
<img class="userbox userbox-teamframe" src="img/rank/team3.png">
|
||||
<div class="userbox userbox-teamname">{{team_name}}</div>
|
||||
|
||||
<!-- TROPHY/TITLE -->
|
||||
<img id="preview_trophy_rank" class="userbox userbox-trophy" src="">
|
||||
<div id="preview_trophy_name" class="userbox userbox-trophy userbox-trophy-name"></div>
|
||||
|
||||
<!-- NAME/RATING -->
|
||||
<img class="userbox userbox-ratingframe" src="img/rank/rating0.png">
|
||||
<div class="userbox userbox-name">
|
||||
<span class="userbox-name-level-label">Lv.</span>
|
||||
{{ profile.level }} {{ profile.userName }}
|
||||
</div>
|
||||
<div class="userbox userbox-rating rating rating-rank{{ rating_rank }}">
|
||||
<span class="userbox-rating-label">RATING</span>
|
||||
{{ profile.playerRating/100 }}
|
||||
</div>
|
||||
|
||||
<!-- CHARACTER -->
|
||||
<img class="userbox userbox-charaframe" src="img/character-bg.png">
|
||||
<img id="preview_character" class="userbox userbox-chara" src="">
|
||||
</td></tr>
|
||||
|
||||
<tr><td>Nameplate:</td><td style="width: 80%;"><div id="name_nameplate"></div></td></tr>
|
||||
|
||||
<tr><td>Trophy:</td><td><div id="name_trophy">
|
||||
<select name="trophy" id="trophy" onchange="changeTrophy()" style="width:100%;">
|
||||
{% for item in trophies.values() %}
|
||||
<option value="{{ item["id"] }}" class="trophy-rank{{ item["rarity"] }}">{{ item["name"] }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div></td></tr>
|
||||
|
||||
<tr><td>Character:</td><td><div id="name_character"></div></td></tr>
|
||||
|
||||
<tr><td colspan=2 style="padding:8px 0px; text-align: center;">
|
||||
<button id="save-btn" class="btn btn-primary" style="width:140px;" onClick="saveUserbox()">SAVE</button>
|
||||
<button id="reset-btn" class="btn btn-danger" style="width:140px;" onClick="resetUserbox()">RESET</button>
|
||||
</td></tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- USERBOX SELECTION -->
|
||||
<div class="row col-lg-8 m-auto mt-3 scrolling-lists-lg card bg-card rounded">
|
||||
|
||||
<!-- NAMEPLATE -->
|
||||
<button class="collapsible">Nameplate: {{ nameplates|length }}/{{ total_nameplates }}</button>
|
||||
<div id="scrollable-nameplate" class="collapsible-content">
|
||||
{% for item in nameplates.values() %}
|
||||
<img id="nameplate-{{ item["id"] }}" style="padding: 8px 8px;" onclick="changeItem('nameplate', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["texturePath"] }}')" src="img/nameplate/{{ item["texturePath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="nameplate-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<hr>
|
||||
|
||||
<!-- CHARACTER -->
|
||||
<button class="collapsible">Character: {{ characters|length }}/{{ total_characters }}</button>
|
||||
<div id="scrollable-character" class="collapsible-content">
|
||||
{% for item in characters.values() %}
|
||||
<img id="character-{{ item["id"] }}" onclick="changeItem('character', '{{ item["id"] }}', '{{ item["name"] }}', '{{ item["iconPath"] }}')" src="img/character/{{ item["iconPath"] }}" alt="{{ item["name"] }}">
|
||||
<span id="character-br-{{ loop.index }}"></span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{% if error is defined %}
|
||||
{% include "core/templates/widgets/err_banner.jinja" %}
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{% if nameplates|length == 0 or characters|length == 0 %}
|
||||
<script>
|
||||
// Server DB lacks necessary info. Maybe importer never got ran for this verison?
|
||||
document.getElementById("name_nameplate").innerHTML = "Server DB needs upgraded or is not populated with necessary data";
|
||||
</script>
|
||||
{% else %}
|
||||
<script>
|
||||
{% include 'titles/chuni/templates/scripts/collapsibles.js' %}
|
||||
|
||||
///
|
||||
/// This script handles all updates to the user box
|
||||
///
|
||||
total_items = 0;
|
||||
orig_id = 1;
|
||||
orig_name = 2;
|
||||
orig_img = 3;
|
||||
curr_id = 4;
|
||||
curr_name = 5;
|
||||
curr_img = 6;
|
||||
userbox_components = {
|
||||
// [total_items, orig_id, orig_name, orig_img, curr_id, curr_name, curr_img]
|
||||
"nameplate":["{{ nameplates|length }}",
|
||||
"{{ profile.nameplateId }}",
|
||||
"{{ nameplates[profile.nameplateId]["name"] }}",
|
||||
"{{ nameplates[profile.nameplateId]["texturePath"] }}", "", "", ""],
|
||||
|
||||
"character":["{{ characters|length }}",
|
||||
"{{ profile.characterId }}",
|
||||
"{{ characters[profile.characterId]["name"] }}",
|
||||
"{{ characters[profile.characterId]["iconPath"] }}", "", "", ""]
|
||||
};
|
||||
types = Object.keys(userbox_components);
|
||||
orig_trophy = curr_trophy = "{{ profile.trophyId }}";
|
||||
curr_trophy_img = "";
|
||||
|
||||
function enableButtons(enabled) {
|
||||
document.getElementById("reset-btn").disabled = !enabled;
|
||||
document.getElementById("save-btn").disabled = !enabled;
|
||||
}
|
||||
|
||||
function changeItem(type, id, name, img) {
|
||||
// clear select style for old component
|
||||
var element = document.getElementById(type + "-" + userbox_components[type][curr_id]);
|
||||
if (element) {
|
||||
element.style.backgroundColor="inherit";
|
||||
}
|
||||
|
||||
// set new component
|
||||
userbox_components[type][curr_id] = id;
|
||||
userbox_components[type][curr_name] = name;
|
||||
userbox_components[type][curr_img] = img;
|
||||
|
||||
// update select style for new accessory
|
||||
element = document.getElementById(type + "-" + id);
|
||||
if (element) {
|
||||
element.style.backgroundColor="#5F5";
|
||||
}
|
||||
|
||||
// Update the userbox preview and enable buttons
|
||||
updatePreview();
|
||||
if (id != userbox_components[type][orig_id]) {
|
||||
enableButtons(true);
|
||||
}
|
||||
}
|
||||
|
||||
function getRankImage(selected_rank) {
|
||||
for (const x of Array(12).keys()) {
|
||||
if (selected_rank.classList.contains("trophy-rank" + x.toString())) {
|
||||
return "rank" + x.toString() + ".png";
|
||||
}
|
||||
}
|
||||
return "rank0.png"; // shouldnt ever happen
|
||||
}
|
||||
|
||||
function changeTrophy() {
|
||||
var trophy_element = document.getElementById("trophy");
|
||||
|
||||
curr_trophy = trophy_element.value;
|
||||
curr_trophy_img = getRankImage(trophy_element[trophy_element.selectedIndex]);
|
||||
updatePreview();
|
||||
if (curr_trophy != orig_trophy) {
|
||||
enableButtons(true);
|
||||
}
|
||||
}
|
||||
|
||||
function resetUserbox() {
|
||||
for (const type of types) {
|
||||
changeItem(type, userbox_components[type][orig_id], userbox_components[type][orig_name], userbox_components[type][orig_img]);
|
||||
}
|
||||
// reset trophy
|
||||
document.getElementById("trophy").value = orig_trophy;
|
||||
changeTrophy();
|
||||
// disable the save/reset buttons until something changes
|
||||
enableButtons(false);
|
||||
}
|
||||
|
||||
function updatePreview() {
|
||||
for (const type of types) {
|
||||
document.getElementById("preview_" + type).src = "img/" + type + "/" + userbox_components[type][curr_img];
|
||||
document.getElementById("name_" + type).innerHTML = userbox_components[type][curr_name];
|
||||
}
|
||||
document.getElementById("preview_trophy_rank").src = "img/rank/" + curr_trophy_img;
|
||||
document.getElementById("preview_trophy_name").innerHTML = document.getElementById("trophy")[document.getElementById("trophy").selectedIndex].innerText;
|
||||
}
|
||||
|
||||
function saveUserbox() {
|
||||
$.post("/game/chuni/update.userbox", { nameplate: userbox_components["nameplate"][curr_id],
|
||||
trophy: curr_trophy,
|
||||
character: userbox_components["character"][curr_id] })
|
||||
.done(function (data) {
|
||||
// set the current as the original and disable buttons
|
||||
for (const type of types) {
|
||||
userbox_components[type][orig_id] = userbox_components[type][curr_id];
|
||||
userbox_components[type][orig_name] = userbox_components[type][orig_name];
|
||||
userbox_components[type][orig_img] = userbox_components[type][curr_img];
|
||||
}
|
||||
orig_trophy = curr_trophy
|
||||
enableButtons(false);
|
||||
})
|
||||
.fail(function () {
|
||||
alert("Failed to save userbox.");
|
||||
});
|
||||
}
|
||||
|
||||
function resizePage() {
|
||||
//
|
||||
// Handles item organization in the collapsible scrollables to try to keep the items-per-row presentable
|
||||
//
|
||||
// @note Yes, we could simply let the div overflow like usual. This could however get really nasty looking
|
||||
// when dealing with something like userbox characters where there are 1000s of possible items being
|
||||
// display. This approach gives us full control over where items in the div wrap, allowing us to try
|
||||
// to keep things presentable.
|
||||
//
|
||||
for (const type of types) {
|
||||
var numPerRow = Math.floor(document.getElementById("scrollable-" + type).offsetWidth / 132);
|
||||
|
||||
// Dont put fewer than 4 per row
|
||||
numPerRow = Math.max(numPerRow, 4);
|
||||
|
||||
// Dont populate more than 8 rows
|
||||
numPerRow = Math.max(numPerRow, Math.ceil(userbox_components[type][total_items] / 8));
|
||||
|
||||
// update the locations of the <br>
|
||||
for (var i = 1; document.getElementById(type + "-br-" + i) != null; i++) {
|
||||
var spanBr = document.getElementById(type + "-br-" + i);
|
||||
if ( i % numPerRow == 0 ) {
|
||||
spanBr.innerHTML = "<br>";
|
||||
} else {
|
||||
spanBr.innerHTML = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
// update the max height for any currently visible containers
|
||||
Collapsibles.updateAllHeights();
|
||||
}
|
||||
resizePage();
|
||||
window.addEventListener('resize', resizePage);
|
||||
|
||||
// Set initial preview for current userbox
|
||||
resetUserbox();
|
||||
// Initialize scroll on all current items so we can see the selected ones
|
||||
for (const type of types) {
|
||||
document.getElementById("scrollable-" + type).scrollLeft = document.getElementById(type + "-" + userbox_components[type][curr_id]).offsetLeft;
|
||||
}
|
||||
|
||||
Collapsibles.expandAll();
|
||||
</script>
|
||||
{% endif %}
|
||||
{% endblock content %}
|
||||
@@ -159,6 +159,45 @@ caption {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.rating {
|
||||
font-weight: bold;
|
||||
-webkit-text-stroke-width: 1px;
|
||||
-webkit-text-stroke-color: black;
|
||||
}
|
||||
|
||||
.rating-rank0 {
|
||||
color: #008000;
|
||||
}
|
||||
|
||||
.rating-rank1 {
|
||||
color: #ffa500;
|
||||
}
|
||||
.rating-rank2 {
|
||||
color: #ff0000;
|
||||
}
|
||||
.rating-rank3 {
|
||||
color: #800080;
|
||||
}
|
||||
.rating-rank4 {
|
||||
color: #cd853f;
|
||||
}
|
||||
.rating-rank5 {
|
||||
color: #c0c0c0;
|
||||
}
|
||||
.rating-rank6 {
|
||||
color: #ffd700;
|
||||
}
|
||||
.rating-rank7 {
|
||||
color: #a9a9a9;
|
||||
}
|
||||
|
||||
.rating-rank8 {
|
||||
background: linear-gradient(to right, red, yellow, lime, aqua, blue, fuchsia) 0 / 5em;
|
||||
background-clip: text;
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
}
|
||||
|
||||
.scrolling-text {
|
||||
overflow: hidden;
|
||||
}
|
||||
@@ -192,4 +231,306 @@ caption {
|
||||
100% {
|
||||
transform: translateX(-100%);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Styles to support collapsible boxes (used for browsing images)
|
||||
*/
|
||||
.collapsible {
|
||||
background-color: #555;
|
||||
cursor: pointer;
|
||||
padding-bottom: 16px;
|
||||
width: 100%;
|
||||
border: none;
|
||||
text-align: left;
|
||||
outline: none;
|
||||
font-family: monospace;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.collapsible:after {
|
||||
content: '[+]';
|
||||
float: right;
|
||||
}
|
||||
|
||||
.collapsible-active:after {
|
||||
content: "[-]";
|
||||
}
|
||||
|
||||
.collapsible-content {
|
||||
max-height: 0px;
|
||||
overflow: hidden;
|
||||
opacity: 0;
|
||||
transition: max-height 0.2s ease-out;
|
||||
background-color: #DDD;
|
||||
}
|
||||
|
||||
/*
|
||||
Styles for favorites star in /playlog
|
||||
*/
|
||||
.fav {
|
||||
padding: 0;
|
||||
padding-left: 4px;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.fav-set {
|
||||
color: gold;
|
||||
}
|
||||
|
||||
/*
|
||||
Styles for favorites in /favorites
|
||||
*/
|
||||
.btn-fav-remove {
|
||||
padding:10px;
|
||||
width:100%;
|
||||
}
|
||||
|
||||
/*
|
||||
Styles for userbox configuration
|
||||
*/
|
||||
.userbox {
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.userbox-nameplate {
|
||||
top: 72px;
|
||||
left: 32px;
|
||||
}
|
||||
|
||||
.userbox-teamframe {
|
||||
top: 74px;
|
||||
left: 156px;
|
||||
}
|
||||
|
||||
.userbox-teamname {
|
||||
top: 72px;
|
||||
left: 254px;
|
||||
padding: 8px 20px;
|
||||
font-size: 22px;
|
||||
text-shadow: rgba(0,0,0,0.8) 2px 2px;
|
||||
color: #DDD;
|
||||
width: 588px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.userbox-trophy {
|
||||
top: 170px;
|
||||
left: 250px;
|
||||
zoom: 0.70;
|
||||
}
|
||||
|
||||
.userbox-trophy-name {
|
||||
top: 170px;
|
||||
left: 250px;
|
||||
padding: 8px 20px;
|
||||
font-size: 28px;
|
||||
font-weight: bold;
|
||||
color: #333;
|
||||
width: 588px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.userbox-ratingframe {
|
||||
top: 160px;
|
||||
left: 175px;
|
||||
}
|
||||
|
||||
.userbox-charaframe {
|
||||
top: 267px;
|
||||
left: 824px;
|
||||
zoom: 0.61;
|
||||
}
|
||||
|
||||
.userbox-chara {
|
||||
top: 266px;
|
||||
left: 814px;
|
||||
zoom: 0.62;
|
||||
}
|
||||
|
||||
.userbox-name {
|
||||
top: 160px;
|
||||
left: 162px;
|
||||
padding: 8px 20px;
|
||||
font-size: 32px;
|
||||
font-weight: bold;
|
||||
color: #333;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.userbox-name-level-label {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.userbox-rating {
|
||||
top: 204px;
|
||||
left: 166px;
|
||||
padding: 8px 20px;
|
||||
font-size: 24px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.userbox-rating-label {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.trophy-rank0 {
|
||||
color: #111;
|
||||
background-color: #DDD;
|
||||
}
|
||||
.trophy-rank1 {
|
||||
color: #111;
|
||||
background-color: #D85;
|
||||
}
|
||||
.trophy-rank2 {
|
||||
color: #111;
|
||||
background-color: #ADF;
|
||||
}
|
||||
.trophy-rank3 {
|
||||
color: #111;
|
||||
background-color: #EB3;
|
||||
}
|
||||
.trophy-rank4 {
|
||||
color: #111;
|
||||
background-color: #EB3;
|
||||
}
|
||||
.trophy-rank5 {
|
||||
color: #111;
|
||||
background-color: #FFA;
|
||||
}
|
||||
.trophy-rank6 {
|
||||
color: #111;
|
||||
background-color: #FFA;
|
||||
}
|
||||
.trophy-rank7 {
|
||||
color: #111;
|
||||
background-color: #FCF;
|
||||
}
|
||||
.trophy-rank8 {
|
||||
color: #111;
|
||||
background-color: #FCF;
|
||||
}
|
||||
.trophy-rank9 {
|
||||
color: #111;
|
||||
background-color: #07C;
|
||||
}
|
||||
.trophy-rank10 {
|
||||
color: #111;
|
||||
background-color: #7FE;
|
||||
}
|
||||
.trophy-rank11 {
|
||||
color: #111;
|
||||
background-color: #8D7;
|
||||
}
|
||||
|
||||
/*
|
||||
Styles for scrollable divs (used for browsing images)
|
||||
*/
|
||||
.scrolling-lists {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
.scrolling-lists div {
|
||||
overflow: auto;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.scrolling-lists img {
|
||||
width: 128px;
|
||||
}
|
||||
|
||||
.scrolling-lists-lg {
|
||||
table-layout: fixed;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.scrolling-lists-lg div {
|
||||
overflow: auto;
|
||||
white-space: nowrap;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.scrolling-lists-lg img {
|
||||
padding: 4px;
|
||||
width: 128px;
|
||||
}
|
||||
|
||||
/*
|
||||
Styles for avatar configuration
|
||||
*/
|
||||
.avatar-preview {
|
||||
position:absolute;
|
||||
zoom:0.5;
|
||||
}
|
||||
|
||||
.avatar-preview-wear {
|
||||
top: 280px;
|
||||
left: 60px;
|
||||
}
|
||||
|
||||
.avatar-preview-face {
|
||||
top: 262px;
|
||||
left: 200px;
|
||||
}
|
||||
|
||||
.avatar-preview-head {
|
||||
top: 130px;
|
||||
left: 120px;
|
||||
}
|
||||
|
||||
.avatar-preview-skin-body {
|
||||
top: 250px;
|
||||
left: 190px;
|
||||
height: 406px;
|
||||
width: 256px;
|
||||
object-fit: cover;
|
||||
object-position: top;
|
||||
}
|
||||
|
||||
.avatar-preview-skin-leftfoot {
|
||||
top: 625px;
|
||||
left: 340px;
|
||||
object-position: -84px -406px;
|
||||
}
|
||||
|
||||
.avatar-preview-skin-rightfoot {
|
||||
top: 625px;
|
||||
left: 40px;
|
||||
object-position: 172px -406px;
|
||||
}
|
||||
|
||||
.avatar-preview-common {
|
||||
top: 250px;
|
||||
left: 135px;
|
||||
}
|
||||
|
||||
.avatar-preview-item-lefthand {
|
||||
top: 180px;
|
||||
left: 370px;
|
||||
height: 544px;
|
||||
width: 200px;
|
||||
object-fit: cover;
|
||||
object-position: right;
|
||||
}
|
||||
|
||||
.avatar-preview-item-righthand {
|
||||
top: 180px;
|
||||
left: 65px;
|
||||
height: 544px;
|
||||
width: 200px;
|
||||
object-fit: cover;
|
||||
object-position: left;
|
||||
}
|
||||
|
||||
.avatar-preview-back {
|
||||
top: 140px;
|
||||
left: 46px;
|
||||
}
|
||||
|
||||
.avatar-preview-platform {
|
||||
top: 310px;
|
||||
left: 55px;
|
||||
zoom: 1;
|
||||
}
|
||||
66
titles/chuni/templates/scripts/collapsibles.js
Normal file
@@ -0,0 +1,66 @@
|
||||
///
|
||||
/// Handles the collapsible behavior of each of the scrollable containers
|
||||
///
|
||||
/// @note Intent is to include this file via jinja in the same <script> tag as
|
||||
/// any page-specific logic that interacts with the collapisbles.
|
||||
///
|
||||
class Collapsibles {
|
||||
static setHeight(content) {
|
||||
// @note Add an extra 20% height buffer - the div will autosize but we
|
||||
// want to make sure we dont clip due to the horizontal scroll.
|
||||
content.style.maxHeight = (content.scrollHeight * 1.2) + "px";
|
||||
}
|
||||
|
||||
static updateAllHeights() {
|
||||
// Updates the height of all expanded collapsibles.
|
||||
// Intended for use when resolution changes cause the contents within the collapsible to move around.
|
||||
var coll = document.getElementsByClassName("collapsible");
|
||||
for (var i = 0; i < coll.length; i++) {
|
||||
var content = coll[i].nextElementSibling;
|
||||
if (content.style.maxHeight) {
|
||||
// currently visible. update height
|
||||
Collapsibles.setHeight(content);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static expandFirst() {
|
||||
// Activate the first collapsible once loaded so the user can get an idea of how things work
|
||||
window.addEventListener('load', function () {
|
||||
var coll = document.getElementsByClassName("collapsible");
|
||||
if (coll && coll.length > 0) {
|
||||
coll[0].click();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
static expandAll() {
|
||||
// Activate all collapsibles so everything is visible immediately
|
||||
window.addEventListener('load', function () {
|
||||
var coll = document.getElementsByClassName("collapsible");
|
||||
for (var i = 0; i < coll.length; i++) {
|
||||
coll[i].click();
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Initial Collapsible Setup
|
||||
//
|
||||
|
||||
// Add an event listener for a click on any collapsible object. This will change the style to collapse/expand the content.
|
||||
var coll = document.getElementsByClassName("collapsible");
|
||||
for (var i = 0; i < coll.length; i++) {
|
||||
coll[i].addEventListener("click", function () {
|
||||
this.classList.toggle("collapsible-active");
|
||||
var content = this.nextElementSibling;
|
||||
if (content.style.maxHeight) {
|
||||
content.style.maxHeight = null;
|
||||
content.style.opacity = 0;
|
||||
} else {
|
||||
Collapsibles.setHeight(content);
|
||||
content.style.opacity = 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -207,6 +207,8 @@ class CardMakerReader(BaseReader):
|
||||
"1.30": Mai2Constants.VER_MAIMAI_DX_FESTIVAL,
|
||||
"1.35": Mai2Constants.VER_MAIMAI_DX_FESTIVAL_PLUS,
|
||||
"1.40": Mai2Constants.VER_MAIMAI_DX_BUDDIES,
|
||||
"1.45": Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
"1.50": Mai2Constants.VER_MAIMAI_DX_PRISM
|
||||
}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -324,3 +326,39 @@ class CardMakerReader(BaseReader):
|
||||
maxSelectPoint=max_select_point,
|
||||
)
|
||||
self.logger.info(f"Added ongeki gacha {gacha_id}")
|
||||
|
||||
async def read_opt(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading opt data from {base_dir}...")
|
||||
cm_data_cfg = None
|
||||
cm_data_cfg_file = os.path.join(base_dir, "DataConfig.xml")
|
||||
|
||||
geki_data_cfg = None
|
||||
geki_data_cfg_file = os.path.join(base_dir, "GEKI", "DataConfig.xml")
|
||||
|
||||
mai2_data_cfg = None
|
||||
mai2_data_cfg_file = os.path.join(base_dir, "MAI", "DataConfig.xml")
|
||||
|
||||
if os.path.exists(cm_data_cfg_file):
|
||||
with open(cm_data_cfg_file, "r") as f:
|
||||
cm_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"No DataConfig.xml in {base_dir}, sequence will be null")
|
||||
|
||||
if os.path.exists(geki_data_cfg_file):
|
||||
with open(geki_data_cfg_file, "r") as f:
|
||||
geki_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {geki_data_cfg_file}, gekiVersion and gekiReleaseVer will be null")
|
||||
|
||||
if os.path.exists(mai2_data_cfg_file):
|
||||
with open(mai2_data_cfg_file, "r") as f:
|
||||
mai2_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {mai2_data_cfg_file}, mai2Version and mai2ReleaseVer will be null")
|
||||
|
||||
cm_rel_ver = int(cm_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
geki_rel_ver = int(geki_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
mai2_rel_ver = int(mai2_data_cfg.find("DataConfig/version/release").text)
|
||||
mai2_db_ver = Mai2Constants.int_ver_to_game_ver(mai2_data_cfg.find("DataConfig/version/major").text + mai2_data_cfg.find("DataConfig/version/minor").text)
|
||||
|
||||