mirror of
https://gitea.tendokyu.moe/Hay1tsme/artemis.git
synced 2026-02-13 19:27:27 +08:00
Merge branch 'refs/heads/develop' into prism_plus_support
# Conflicts: # core/data/alembic/versions/16f34bf7b968_mai2_kaleidx_scope_support.py # core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py # core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py # core/data/alembic/versions/bdf710616ba4_mai2_add_prism_playlog_support.py # titles/mai2/index.py # titles/mai2/prism.py # titles/mai2/read.py # titles/mai2/schema/static.py
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
from enum import Enum, IntEnum
|
||||
|
||||
from typing import Optional
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class ChuniConstants:
|
||||
GAME_CODE = "SDBT"
|
||||
@@ -78,10 +79,34 @@ class ChuniConstants:
|
||||
( 0, "D"),
|
||||
]
|
||||
|
||||
VERSION_LUT = {
|
||||
"100": VER_CHUNITHM,
|
||||
"105": VER_CHUNITHM_PLUS,
|
||||
"110": VER_CHUNITHM_AIR,
|
||||
"115": VER_CHUNITHM_AIR_PLUS,
|
||||
"120": VER_CHUNITHM_STAR,
|
||||
"125": VER_CHUNITHM_STAR_PLUS,
|
||||
"130": VER_CHUNITHM_AMAZON,
|
||||
"135": VER_CHUNITHM_AMAZON_PLUS,
|
||||
"140": VER_CHUNITHM_CRYSTAL,
|
||||
"145": VER_CHUNITHM_CRYSTAL_PLUS,
|
||||
"150": VER_CHUNITHM_PARADISE,
|
||||
"200": VER_CHUNITHM_NEW,
|
||||
"205": VER_CHUNITHM_NEW_PLUS,
|
||||
"210": VER_CHUNITHM_SUN,
|
||||
"215": VER_CHUNITHM_SUN_PLUS,
|
||||
"220": VER_CHUNITHM_LUMINOUS,
|
||||
"225": VER_CHUNITHM_LUMINOUS_PLUS,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
return cls.VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
class MapAreaConditionType(IntEnum):
|
||||
"""Condition types for the GetGameMapAreaConditionApi endpoint. Incomplete.
|
||||
|
||||
@@ -457,7 +457,7 @@ class ChuniFrontend(FE_Base):
|
||||
user_characters = []
|
||||
if not force_unlocked:
|
||||
user_characters = await self.data.item.get_characters(profile.user)
|
||||
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId, profile.charaIllustId]
|
||||
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["characterId"] in user_characters:
|
||||
|
||||
@@ -3,6 +3,7 @@ from os import walk, path
|
||||
import xml.etree.ElementTree as ET
|
||||
from read import BaseReader
|
||||
from PIL import Image
|
||||
import configparser
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.database import ChuniData
|
||||
@@ -50,18 +51,19 @@ class ChuniReader(BaseReader):
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music", we_diff)
|
||||
await self.read_charges(f"{dir}/chargeItem")
|
||||
await self.read_avatar(f"{dir}/avatarAccessory")
|
||||
await self.read_login_bonus(f"{dir}/")
|
||||
await self.read_nameplate(f"{dir}/namePlate")
|
||||
await self.read_trophy(f"{dir}/trophy")
|
||||
await self.read_character(f"{dir}/chara", dds_images)
|
||||
await self.read_map_icon(f"{dir}/mapIcon")
|
||||
await self.read_system_voice(f"{dir}/systemVoice")
|
||||
this_opt_id = await self.read_opt_info(dir) # this also treats A000 as an opt, which is intended
|
||||
await self.read_events(f"{dir}/event", this_opt_id)
|
||||
await self.read_music(f"{dir}/music", we_diff, this_opt_id)
|
||||
await self.read_charges(f"{dir}/chargeItem", this_opt_id)
|
||||
await self.read_avatar(f"{dir}/avatarAccessory", this_opt_id)
|
||||
await self.read_login_bonus(f"{dir}/", this_opt_id)
|
||||
await self.read_nameplate(f"{dir}/namePlate", this_opt_id)
|
||||
await self.read_trophy(f"{dir}/trophy", this_opt_id)
|
||||
await self.read_character(f"{dir}/chara", dds_images, this_opt_id)
|
||||
await self.read_map_icon(f"{dir}/mapIcon", this_opt_id)
|
||||
await self.read_system_voice(f"{dir}/systemVoice", this_opt_id)
|
||||
|
||||
async def read_login_bonus(self, root_dir: str) -> None:
|
||||
async def read_login_bonus(self, root_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/LoginBonusPreset.xml"):
|
||||
@@ -76,7 +78,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_login_bonus_preset(
|
||||
self.version, id, name, is_enabled
|
||||
self.version, id, name, is_enabled, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -123,6 +125,7 @@ class ChuniReader(BaseReader):
|
||||
item_num,
|
||||
need_login_day_count,
|
||||
login_bonus_category_type,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -132,7 +135,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert login bonus {bonus_id}"
|
||||
)
|
||||
|
||||
async def read_events(self, evt_dir: str) -> None:
|
||||
async def read_events(self, evt_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(evt_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Event.xml"):
|
||||
@@ -147,14 +150,14 @@ class ChuniReader(BaseReader):
|
||||
event_type = substances.find("type").text
|
||||
|
||||
result = await self.data.static.put_event(
|
||||
self.version, id, event_type, name
|
||||
self.version, id, event_type, name, opt_id
|
||||
)
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted event {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert event {id}")
|
||||
|
||||
async def read_music(self, music_dir: str, we_diff: str = "4") -> None:
|
||||
async def read_music(self, music_dir: str, we_diff: str = "4", opt_id: Optional[int] = None) -> None:
|
||||
max_title_len = MusicTable.columns["title"].type.length
|
||||
max_artist_len = MusicTable.columns["artist"].type.length
|
||||
|
||||
@@ -219,6 +222,7 @@ class ChuniReader(BaseReader):
|
||||
genre,
|
||||
jacket_path,
|
||||
we_chara,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -230,7 +234,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert music {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
async def read_charges(self, charge_dir: str) -> None:
|
||||
async def read_charges(self, charge_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(charge_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
|
||||
@@ -252,6 +256,7 @@ class ChuniReader(BaseReader):
|
||||
expirationDays,
|
||||
consumeType,
|
||||
sellingAppeal,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -259,7 +264,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert charge {id}")
|
||||
|
||||
async def read_avatar(self, avatar_dir: str) -> None:
|
||||
async def read_avatar(self, avatar_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(avatar_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
|
||||
@@ -284,7 +289,7 @@ class ChuniReader(BaseReader):
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/avatar/")
|
||||
|
||||
result = await self.data.static.put_avatar(
|
||||
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName
|
||||
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -292,7 +297,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert avatarAccessory {id}")
|
||||
|
||||
async def read_nameplate(self, nameplate_dir: str) -> None:
|
||||
async def read_nameplate(self, nameplate_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(nameplate_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/NamePlate.xml"):
|
||||
@@ -303,7 +308,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
@@ -313,7 +318,7 @@ class ChuniReader(BaseReader):
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/nameplate/")
|
||||
|
||||
result = await self.data.static.put_nameplate(
|
||||
self.version, id, name, texturePath, is_enabled, defaultHave, sortName
|
||||
self.version, id, name, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -321,7 +326,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert nameplate {id}")
|
||||
|
||||
async def read_trophy(self, trophy_dir: str) -> None:
|
||||
async def read_trophy(self, trophy_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(trophy_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Trophy.xml"):
|
||||
@@ -338,7 +343,7 @@ class ChuniReader(BaseReader):
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
|
||||
result = await self.data.static.put_trophy(
|
||||
self.version, id, name, rareType, is_enabled, defaultHave
|
||||
self.version, id, name, rareType, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -346,18 +351,21 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert trophy {id}")
|
||||
|
||||
async def read_character(self, chara_dir: str, dds_images: dict) -> None:
|
||||
async def read_character(self, chara_dir: str, dds_images: dict, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(chara_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Chara.xml"):
|
||||
with open(f"{root}/{dir}/Chara.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
# ET may choke if there is a & symbol (which is present in some character xml)
|
||||
if "&" in strdata:
|
||||
strdata = strdata.replace("&", "&")
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for work in xml_root.findall("works"):
|
||||
worksName = work.find("str").text
|
||||
rareType = xml_root.find("rareType").text
|
||||
@@ -382,7 +390,7 @@ class ChuniReader(BaseReader):
|
||||
self.logger.warning(f"Unable to location character {id} images")
|
||||
|
||||
result = await self.data.static.put_character(
|
||||
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -390,7 +398,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert character {id}")
|
||||
|
||||
async def read_map_icon(self, mapicon_dir: str) -> None:
|
||||
async def read_map_icon(self, mapicon_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(mapicon_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/MapIcon.xml"):
|
||||
@@ -401,7 +409,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
iconPath = image.find("path").text
|
||||
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/mapIcon/")
|
||||
@@ -410,7 +418,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_map_icon(
|
||||
self.version, id, name, sortName, iconPath, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, iconPath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -418,7 +426,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to map icon {id}")
|
||||
|
||||
async def read_system_voice(self, voice_dir: str) -> None:
|
||||
async def read_system_voice(self, voice_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(voice_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/SystemVoice.xml"):
|
||||
@@ -429,7 +437,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
imagePath = image.find("path").text
|
||||
self.copy_image(imagePath, f"{root}/{dir}", "titles/chuni/img/systemVoice/")
|
||||
@@ -438,7 +446,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_system_voice(
|
||||
self.version, id, name, sortName, imagePath, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, imagePath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -446,6 +454,51 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to system voice {id}")
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
if not path.exists(f"{directory}/data.conf"):
|
||||
self.logger.warning(f"{directory} does not contain data.conf, opt info will not be read")
|
||||
return None
|
||||
|
||||
data_config = configparser.ConfigParser()
|
||||
if not data_config.read(f"{directory}/data.conf", 'utf-8'):
|
||||
self.logger.warning(f"{directory}/data.conf failed to read or parse, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Version' not in data_config:
|
||||
self.logger.warning(f"{directory}/data.conf contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Name' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no Name item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMajor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMajor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMinor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMinor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerRelease' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerRelease item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_seq = data_config['Version']['VerRelease']
|
||||
opt_folder = path.basename(path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, opt_seq)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
self.logger.info(f"Opt folder {opt_folder} (Database ID {opt_id}) contains {data_config['Version']['Name']} v{data_config['Version']['VerMajor']}.{data_config['Version']['VerMinor']}.{opt_seq}")
|
||||
return opt_id
|
||||
|
||||
def copy_image(self, filename: str, src_dir: str, dst_dir: str) -> None:
|
||||
# Convert the image to png so we can easily display it in the frontend
|
||||
file_src = path.join(src_dir, filename)
|
||||
|
||||
@@ -7,16 +7,29 @@ from sqlalchemy import (
|
||||
PrimaryKeyConstraint,
|
||||
and_,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, VARCHAR, BOOLEAN
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from datetime import datetime
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
opts = Table(
|
||||
"chuni_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # VerRelease in data.conf
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="chuni_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
events = Table(
|
||||
"chuni_static_events",
|
||||
metadata,
|
||||
@@ -27,6 +40,7 @@ events = Table(
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", name="chuni_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -44,6 +58,7 @@ music = Table(
|
||||
Column("genre", String(255)),
|
||||
Column("jacketPath", String(255)),
|
||||
Column("worldsEndTag", String(7)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "songId", "chartId", name="chuni_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -59,6 +74,7 @@ charge = Table(
|
||||
Column("consumeType", Integer),
|
||||
Column("sellingAppeal", Boolean),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "chargeId", name="chuni_static_charge_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -76,6 +92,7 @@ avatar = Table(
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -91,6 +108,7 @@ nameplate = Table(
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "nameplateId", name="chuni_static_nameplate_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -110,6 +128,7 @@ character = Table(
|
||||
Column("imagePath3", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "characterId", name="chuni_static_character_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -124,6 +143,7 @@ trophy = Table(
|
||||
Column("rareType", Integer),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "trophyId", name="chuni_static_trophy_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -139,6 +159,7 @@ map_icon = Table(
|
||||
Column("iconPath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "mapIconId", name="chuni_static_mapicon_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -154,6 +175,7 @@ system_voice = Table(
|
||||
Column("imagePath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "voiceId", name="chuni_static_systemvoice_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -175,6 +197,7 @@ gachas = Table(
|
||||
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "gachaId", "gachaName", name="chuni_static_gachas_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -195,6 +218,7 @@ cards = Table(
|
||||
Column("combo", Integer, nullable=False),
|
||||
Column("chain", Integer, nullable=False),
|
||||
Column("skillName", String(255), nullable=False),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", name="chuni_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -219,6 +243,7 @@ login_bonus_preset = Table(
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("presetName", String(255), nullable=False),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
PrimaryKeyConstraint(
|
||||
"presetId", "version", name="chuni_static_login_bonus_preset_pk"
|
||||
),
|
||||
@@ -238,6 +263,7 @@ login_bonus = Table(
|
||||
Column("itemNum", Integer, nullable=False),
|
||||
Column("needLoginDayCount", Integer, nullable=False),
|
||||
Column("loginBonusCategoryType", Integer, nullable=False),
|
||||
Column("opt", BIGINT),
|
||||
UniqueConstraint(
|
||||
"version", "presetId", "loginBonusId", name="chuni_static_login_bonus_uk"
|
||||
),
|
||||
@@ -251,10 +277,18 @@ login_bonus = Table(
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_1",
|
||||
),
|
||||
ForeignKeyConstraint(
|
||||
["opt"],
|
||||
[
|
||||
"chuni_static_opt.id",
|
||||
],
|
||||
onupdate="SET NULL",
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_2",
|
||||
),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class ChuniStaticData(BaseData):
|
||||
async def put_login_bonus(
|
||||
self,
|
||||
@@ -267,6 +301,7 @@ class ChuniStaticData(BaseData):
|
||||
item_num: int,
|
||||
need_login_day_count: int,
|
||||
login_bonus_category_type: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus).values(
|
||||
version=version,
|
||||
@@ -278,6 +313,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -286,6 +322,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -327,17 +364,19 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
async def put_login_bonus_preset(
|
||||
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
|
||||
self, version: int, preset_id: int, preset_name: str, isEnabled: bool, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus_preset).values(
|
||||
presetId=preset_id,
|
||||
version=version,
|
||||
presetName=preset_name,
|
||||
isEnabled=is_enabled,
|
||||
isEnabled=isEnabled,
|
||||
opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=is_enabled
|
||||
|
||||
# Chuni has a habbit of including duplicates in it's opt files, so only update opt if it's null
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=isEnabled, opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -346,12 +385,12 @@ class ChuniStaticData(BaseData):
|
||||
return result.lastrowid
|
||||
|
||||
async def get_login_bonus_presets(
|
||||
self, version: int, is_enabled: bool = True
|
||||
self, version: int, isEnabled: bool = True
|
||||
) -> Optional[List[Row]]:
|
||||
sql = login_bonus_preset.select(
|
||||
and_(
|
||||
login_bonus_preset.c.version == version,
|
||||
login_bonus_preset.c.isEnabled == is_enabled,
|
||||
login_bonus_preset.c.isEnabled == isEnabled,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -361,13 +400,13 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchall()
|
||||
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, type: int, name: str
|
||||
self, version: int, event_id: int, type: int, name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version=version, eventId=event_id, type=type, name=name
|
||||
version=version, eventId=event_id, type=type, name=name, opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
conflict = sql.on_duplicate_key_update(name=name, opt=coalesce(events.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -435,6 +474,7 @@ class ChuniStaticData(BaseData):
|
||||
genre: str,
|
||||
jacketPath: str,
|
||||
we_tag: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -446,6 +486,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -455,6 +496,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -470,6 +512,7 @@ class ChuniStaticData(BaseData):
|
||||
expiration_days: int,
|
||||
consume_type: int,
|
||||
selling_appeal: bool,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(charge).values(
|
||||
version=version,
|
||||
@@ -478,6 +521,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -485,6 +529,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -542,7 +587,6 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
|
||||
async def put_avatar(
|
||||
self,
|
||||
version: int,
|
||||
@@ -553,7 +597,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(avatar).values(
|
||||
version=version,
|
||||
@@ -564,7 +609,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -574,7 +620,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -601,7 +648,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(nameplate).values(
|
||||
version=version,
|
||||
@@ -610,7 +658,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -618,7 +667,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -645,6 +695,7 @@ class ChuniStaticData(BaseData):
|
||||
rareType: int,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(trophy).values(
|
||||
version=version,
|
||||
@@ -652,14 +703,16 @@ class ChuniStaticData(BaseData):
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -687,6 +740,7 @@ class ChuniStaticData(BaseData):
|
||||
iconPath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(map_icon).values(
|
||||
version=version,
|
||||
@@ -695,7 +749,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -703,7 +758,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -731,6 +787,7 @@ class ChuniStaticData(BaseData):
|
||||
imagePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(system_voice).values(
|
||||
version=version,
|
||||
@@ -739,7 +796,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -747,7 +805,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -778,7 +837,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2: str,
|
||||
imagePath3: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(character).values(
|
||||
version=version,
|
||||
@@ -791,7 +851,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -803,7 +864,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -827,12 +889,14 @@ class ChuniStaticData(BaseData):
|
||||
version: int,
|
||||
gacha_id: int,
|
||||
gacha_name: int,
|
||||
opt_id: int = None,
|
||||
**gacha_data,
|
||||
) -> Optional[int]:
|
||||
sql = insert(gachas).values(
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -840,6 +904,7 @@ class ChuniStaticData(BaseData):
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -909,10 +974,10 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
async def put_card(self, version: int, card_id: int, opt_id: int = None,**card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -926,4 +991,86 @@ class ChuniStaticData(BaseData):
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
return result.fetchone()
|
||||
|
||||
async def put_opt(self, version: int, folder: str, sequence: int) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -118,9 +118,9 @@ userbox_components = {
|
||||
"{{ nameplates[profile.nameplateId]["texturePath"] }}", "", "", ""],
|
||||
|
||||
"character":["{{ characters|length }}",
|
||||
"{{ profile.charaIllustId }}",
|
||||
"{{ characters[profile.charaIllustId]["name"] }}",
|
||||
"{{ characters[profile.charaIllustId]["iconPath"] }}", "", "", ""]
|
||||
"{{ profile.characterId }}",
|
||||
"{{ characters[profile.characterId]["name"] }}",
|
||||
"{{ characters[profile.characterId]["iconPath"] }}", "", "", ""]
|
||||
};
|
||||
types = Object.keys(userbox_components);
|
||||
orig_trophy = curr_trophy = "{{ profile.trophyId }}";
|
||||
|
||||
@@ -327,3 +327,39 @@ class CardMakerReader(BaseReader):
|
||||
maxSelectPoint=max_select_point,
|
||||
)
|
||||
self.logger.info(f"Added ongeki gacha {gacha_id}")
|
||||
|
||||
async def read_opt(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading opt data from {base_dir}...")
|
||||
cm_data_cfg = None
|
||||
cm_data_cfg_file = os.path.join(base_dir, "DataConfig.xml")
|
||||
|
||||
geki_data_cfg = None
|
||||
geki_data_cfg_file = os.path.join(base_dir, "GEKI", "DataConfig.xml")
|
||||
|
||||
mai2_data_cfg = None
|
||||
mai2_data_cfg_file = os.path.join(base_dir, "MAI", "DataConfig.xml")
|
||||
|
||||
if os.path.exists(cm_data_cfg_file):
|
||||
with open(cm_data_cfg_file, "r") as f:
|
||||
cm_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"No DataConfig.xml in {base_dir}, sequence will be null")
|
||||
|
||||
if os.path.exists(geki_data_cfg_file):
|
||||
with open(geki_data_cfg_file, "r") as f:
|
||||
geki_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {geki_data_cfg_file}, gekiVersion and gekiReleaseVer will be null")
|
||||
|
||||
if os.path.exists(mai2_data_cfg_file):
|
||||
with open(mai2_data_cfg_file, "r") as f:
|
||||
mai2_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {mai2_data_cfg_file}, mai2Version and mai2ReleaseVer will be null")
|
||||
|
||||
cm_rel_ver = int(cm_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
geki_rel_ver = int(geki_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
mai2_rel_ver = int(mai2_data_cfg.find("DataConfig/version/release").text)
|
||||
mai2_db_ver = Mai2Constants.int_ver_to_game_ver(mai2_data_cfg.find("DataConfig/version/major").text + mai2_data_cfg.find("DataConfig/version/minor").text)
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from typing import Optional
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class Mai2Constants:
|
||||
GRADE = {
|
||||
"D": 0,
|
||||
@@ -86,7 +89,57 @@ class Mai2Constants:
|
||||
"maimai DX PRiSM",
|
||||
"maimai DX PRiSM PLUS"
|
||||
)
|
||||
KALEIDXSCOPE_KEY_CONDITION={
|
||||
1: [11009, 11008, 11100, 11097, 11098, 11099, 11163, 11162, 11161, 11228, 11229, 11231, 11463, 11464, 11465, 11538, 11539, 11541, 11620, 11622, 11623, 11737, 11738, 11164, 11230, 11466, 11540, 11621, 11739],
|
||||
#青の扉: Played 29 songs
|
||||
2: [11102, 11234, 11300, 11529, 11542, 11612],
|
||||
#白の扉: set Frame as "Latent Kingdom" (459504), play 3 or 4 songs by the composer 大国奏音 in 1 pc
|
||||
3: [],
|
||||
#紫の扉: need to enter redeem code 51090942171709440000
|
||||
4: [11023, 11106, 11221, 11222, 11300, 11374, 11458, 11523, 11619, 11663, 11746],
|
||||
#青の扉: Played 11 songs
|
||||
}
|
||||
MAI_VERSION_LUT = {
|
||||
"100": VER_MAIMAI,
|
||||
"110": VER_MAIMAI_PLUS,
|
||||
"120": VER_MAIMAI_GREEN,
|
||||
"130": VER_MAIMAI_GREEN_PLUS,
|
||||
"140": VER_MAIMAI_ORANGE,
|
||||
"150": VER_MAIMAI_ORANGE_PLUS,
|
||||
"160": VER_MAIMAI_PINK,
|
||||
"170": VER_MAIMAI_PINK_PLUS,
|
||||
"180": VER_MAIMAI_MURASAKI,
|
||||
"185": VER_MAIMAI_MURASAKI_PLUS,
|
||||
"190": VER_MAIMAI_MILK,
|
||||
"195": VER_MAIMAI_MILK_PLUS,
|
||||
"197": VER_MAIMAI_FINALE,
|
||||
}
|
||||
|
||||
MAI2_VERSION_LUT = {
|
||||
"100": VER_MAIMAI_DX,
|
||||
"105": VER_MAIMAI_DX_PLUS,
|
||||
"110": VER_MAIMAI_DX_SPLASH,
|
||||
"115": VER_MAIMAI_DX_SPLASH_PLUS,
|
||||
"120": VER_MAIMAI_DX_UNIVERSE,
|
||||
"125": VER_MAIMAI_DX_UNIVERSE_PLUS,
|
||||
"130": VER_MAIMAI_DX_FESTIVAL,
|
||||
"135": VER_MAIMAI_DX_FESTIVAL_PLUS,
|
||||
"140": VER_MAIMAI_DX_BUDDIES,
|
||||
"145": VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
"150": VER_MAIMAI_DX_PRISM
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
""" Takes an internal game version (ex 13 for maimai DX) and returns a the full name of the version """
|
||||
return cls.VERSION_STRING[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int, is_dx = True) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
if is_dx:
|
||||
return cls.MAI2_VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
else:
|
||||
if ver >= 197:
|
||||
return cls.VER_MAIMAI_FINALE
|
||||
return cls.MAI_VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
@@ -32,7 +32,6 @@ from .festivalplus import Mai2FestivalPlus
|
||||
from .buddies import Mai2Buddies
|
||||
from .buddiesplus import Mai2BuddiesPlus
|
||||
from .prism import Mai2Prism
|
||||
from .prismplus import Mai2PrismPlus
|
||||
|
||||
|
||||
class Mai2Servlet(BaseServlet):
|
||||
@@ -311,7 +310,7 @@ class Mai2Servlet(BaseServlet):
|
||||
elif version >= 140 and version < 145: # BUDDiES
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES
|
||||
elif version >= 145 and version < 150: # BUDDiES PLUS
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS
|
||||
elif version >= 150 and version < 155:
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_PRISM
|
||||
elif version >= 155:
|
||||
@@ -337,7 +336,7 @@ class Mai2Servlet(BaseServlet):
|
||||
elif version >= 140 and version < 145: # BUDDiES
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES
|
||||
elif version >= 145 and version < 150: # BUDDiES PLUS
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS
|
||||
elif version >= 150 and version < 155:
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_PRISM
|
||||
elif version >= 155:
|
||||
|
||||
@@ -43,27 +43,52 @@ class Mai2Prism(Mai2BuddiesPlus):
|
||||
{"gateId": 2, "phaseId": 6},
|
||||
{"gateId": 3, "phaseId": 6},
|
||||
{"gateId": 4, "phaseId": 6},
|
||||
{"gateId": 5, "phaseId": 6},
|
||||
{"gateId": 6, "phaseId": 6}
|
||||
]
|
||||
}
|
||||
|
||||
async def handle_get_user_kaleidx_scope_api_request(self, data: Dict) -> Dict:
|
||||
# kaleidxscope keyget condition judgement
|
||||
# player may get key before GateFound
|
||||
for gate in range(1,7):
|
||||
condition_list = await self.data.static.get_kaleidxscope_condition(gate)
|
||||
if not condition_list:
|
||||
continue
|
||||
condition_satisfy = 0
|
||||
for condition in condition_list:
|
||||
score_list = await self.data.score.get_best_scores(user_id=data["userId"], song_id=condition[3])
|
||||
if score_list:
|
||||
condition_satisfy = condition_satisfy + 1
|
||||
if len(condition_list) == condition_satisfy:
|
||||
new_kaleidxscope = {'gateId': gate, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
for gate in range(1,5):
|
||||
if gate == 1 or gate == 4:
|
||||
condition_satisfy = 0
|
||||
for condition in Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[gate]:
|
||||
score_list = await self.data.score.get_best_scores(user_id=data["userId"], song_id=condition)
|
||||
if score_list:
|
||||
condition_satisfy = condition_satisfy + 1
|
||||
if len(Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[gate]) == condition_satisfy:
|
||||
new_kaleidxscope = {'gateId': gate, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
|
||||
elif gate == 2:
|
||||
user_profile = await self.data.profile.get_profile_detail(user_id=data["userId"], version=self.version)
|
||||
user_frame = user_profile["frameId"]
|
||||
if user_frame == 459504:
|
||||
playlogs = await self.data.score.get_playlogs(user_id=data["userId"], idx=0, limit=0)
|
||||
|
||||
playlog_dict = {}
|
||||
for playlog in playlogs:
|
||||
playlog_id = playlog["playlogId"]
|
||||
if playlog_id not in playlog_dict:
|
||||
playlog_dict[playlog_id] = []
|
||||
playlog_dict[playlog_id].append(playlog["musicId"])
|
||||
valid_playlogs = []
|
||||
allowed_music = set(Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[2])
|
||||
for playlog_id, music_ids in playlog_dict.items():
|
||||
|
||||
if len(music_ids) != len(set(music_ids)):
|
||||
continue
|
||||
all_valid = True
|
||||
for mid in music_ids:
|
||||
if mid not in allowed_music:
|
||||
all_valid = False
|
||||
break
|
||||
if all_valid:
|
||||
valid_playlogs.append(playlog_id)
|
||||
|
||||
if valid_playlogs:
|
||||
new_kaleidxscope = {'gateId': 2, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
|
||||
kaleidxscope = await self.data.score.get_user_kaleidxscope_list(data["userId"])
|
||||
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Dict, List, Optional
|
||||
from Crypto.Cipher import AES
|
||||
import zlib
|
||||
import codecs
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from read import BaseReader
|
||||
from titles.mai2.const import Mai2Constants
|
||||
from titles.mai2.database import Mai2Data
|
||||
|
||||
|
||||
class Mai2Reader(BaseReader):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -46,14 +42,11 @@ class Mai2Reader(BaseReader):
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
await self.get_events(f"{dir}/event")
|
||||
this_opt_id = await self.read_opt_info(dir)
|
||||
await self.get_events(f"{dir}/event", this_opt_id)
|
||||
await self.disable_events(f"{dir}/information", f"{dir}/scoreRanking")
|
||||
#await self.read_music(f"{dir}/music")
|
||||
await self.read_tickets(f"{dir}/ticket")
|
||||
|
||||
if self.version >= Mai2Constants.VER_MAIMAI_DX_PRISM:
|
||||
for dir in data_dirs:
|
||||
await self.read_kaleidxscope_condition(f"{dir}/kaleidxScopeKeyCondition")
|
||||
await self.read_music(f"{dir}/music", this_opt_id)
|
||||
await self.read_tickets(f"{dir}/ticket", this_opt_id)
|
||||
|
||||
else:
|
||||
if not os.path.exists(f"{self.bin_dir}/tables"):
|
||||
@@ -183,7 +176,7 @@ class Mai2Reader(BaseReader):
|
||||
self.logger.warning("Failed load table content, skipping")
|
||||
return
|
||||
|
||||
async def get_events(self, base_dir: str) -> None:
|
||||
async def get_events(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading events from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -197,7 +190,7 @@ class Mai2Reader(BaseReader):
|
||||
event_type = int(troot.find("infoType").text)
|
||||
|
||||
await self.data.static.put_game_event(
|
||||
self.version, event_type, id, name
|
||||
self.version, event_type, id, name, opt_id
|
||||
)
|
||||
self.logger.info(f"Added event {id}...")
|
||||
|
||||
@@ -259,7 +252,7 @@ class Mai2Reader(BaseReader):
|
||||
await self.data.static.toggle_game_event(self.version, event_id, toggle=False)
|
||||
self.logger.info(f"Disabled event {event_id}...")
|
||||
|
||||
async def read_music(self, base_dir: str) -> None:
|
||||
async def read_music(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading music from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -300,13 +293,14 @@ class Mai2Reader(BaseReader):
|
||||
added_ver,
|
||||
diff_num,
|
||||
note_designer,
|
||||
opt_id
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Added music id {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
async def read_tickets(self, base_dir: str) -> None:
|
||||
async def read_tickets(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading tickets from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -321,7 +315,7 @@ class Mai2Reader(BaseReader):
|
||||
price = int(troot.find("creditNum").text)
|
||||
|
||||
await self.data.static.put_game_ticket(
|
||||
self.version, id, ticket_type, price, name
|
||||
self.version, id, ticket_type, price, name, opt_id
|
||||
)
|
||||
self.logger.info(f"Added ticket {id}...")
|
||||
|
||||
@@ -346,30 +340,50 @@ class Mai2Reader(BaseReader):
|
||||
return
|
||||
# TODO
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
datacfg_file = os.path.join(directory, "DataConfig.xml")
|
||||
if not os.path.exists(datacfg_file):
|
||||
self.logger.warning(f"{datacfg_file} does not contain DataConfig.xml, opt info will not be read")
|
||||
return None
|
||||
|
||||
with open(datacfg_file, encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
if troot.find("version") is None:
|
||||
self.logger.warning(f"{directory}/DataConfig.xml contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
ver_maj = troot.find("version/major")
|
||||
ver_min = troot.find("version/minor")
|
||||
ver_rel = troot.find("version/release")
|
||||
cm_maj = troot.find("cardMakerVersion/major")
|
||||
cm_min = troot.find("cardMakerVersion/minor")
|
||||
cm_rel = troot.find("cardMakerVersion/release")
|
||||
|
||||
if ver_maj is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no major item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
async def read_kaleidxscope_condition(self, base_dir: str) -> None :
|
||||
self.logger.info(f"Reading KaleidxScope Key Conditions from {base_dir}...")
|
||||
if ver_min is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no minor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
for dir in dirs:
|
||||
if os.path.exists(f"{root}/{dir}/KaleidxScopeKeyCondition.xml"):
|
||||
with open(f"{root}/{dir}/KaleidxScopeKeyCondition.xml", encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
if ver_rel is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no release item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_folder = os.path.basename(os.path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, int(ver_rel.text), int(cm_rel.text) if cm_rel is not None else None)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
condition_id = int(troot.find("name").find("id").text)
|
||||
condition_name = troot.find("name").find("str").text
|
||||
|
||||
music_list = troot.find("musicIds").find("list")
|
||||
for music in music_list.findall("StringID"):
|
||||
music_id = int(music.find("id").text)
|
||||
music_name = music.find("str").text
|
||||
|
||||
await self.data.static.put_kaleidxscope_condition(
|
||||
condition_id,
|
||||
condition_name,
|
||||
music_id,
|
||||
music_name
|
||||
)
|
||||
self.logger.info(
|
||||
f"Add music {music_id} for condition {condition_id}"
|
||||
)
|
||||
self.logger.info(
|
||||
f"Opt folder {opt_folder} (Database ID {opt_id}) contains v{ver_maj.text}.{ver_min.text}.{ver_rel.text} (cm v{cm_maj.text if cm_maj is not None else 'None'}.{cm_min.text if cm_min is not None else 'None'}.{cm_rel.text if cm_rel is not None else 'None'})"
|
||||
)
|
||||
return opt_id
|
||||
|
||||
@@ -728,10 +728,11 @@ class Mai2ItemData(BaseData):
|
||||
# Do an anti-join with the mai2_item_item table to exclude any
|
||||
# items the users have already owned.
|
||||
if exclude_owned:
|
||||
sql = sql.join(
|
||||
sql = sql.outerjoin(
|
||||
item,
|
||||
(present.c.itemKind == item.c.itemKind)
|
||||
& (present.c.itemId == item.c.itemId)
|
||||
& (item.c.user == user_id)
|
||||
)
|
||||
condition &= (item.c.itemKind.is_(None) & item.c.itemId.is_(None))
|
||||
|
||||
|
||||
@@ -2,13 +2,28 @@ from core.data.schema.base import BaseData, metadata
|
||||
|
||||
from typing import Optional, Dict, List
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, BOOLEAN, VARCHAR
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
from datetime import datetime
|
||||
|
||||
opts = Table(
|
||||
"mai2_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # release in DataConfig.xml
|
||||
Column("cmReleaseVer", INTEGER, nullable=False),
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="mai2_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
event = Table(
|
||||
"mai2_static_event",
|
||||
metadata,
|
||||
@@ -19,6 +34,7 @@ event = Table(
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", "type", name="mai2_static_event_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -37,6 +53,7 @@ music = Table(
|
||||
Column("addedVersion", String(255)),
|
||||
Column("difficulty", Float),
|
||||
Column("noteDesigner", String(255)),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("songId", "chartId", "version", name="mai2_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -51,6 +68,7 @@ ticket = Table(
|
||||
Column("name", String(255)),
|
||||
Column("price", Integer, server_default="1"),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "ticketId", name="mai2_static_ticket_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -67,34 +85,25 @@ cards = Table(
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", "cardName", name="mai2_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
kaleidxscope_condition = Table(
|
||||
"mai2_static_kaleidxscope_condition",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("conditionId", Integer),
|
||||
Column("conditionName", String(255)),
|
||||
Column("songId", Integer),
|
||||
Column("songName", String(255)),
|
||||
UniqueConstraint("conditionId", "conditionName", "songId", "songName", name="mai2_static_kaleidxscope_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class Mai2StaticData(BaseData):
|
||||
async def put_game_event(
|
||||
self, version: int, type: int, event_id: int, name: str
|
||||
self, version: int, type: int, event_id: int, name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(event).values(
|
||||
version=version,
|
||||
type=type,
|
||||
eventId=event_id,
|
||||
name=name,
|
||||
opt=coalesce(event.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(eventId=event_id)
|
||||
conflict = sql.on_duplicate_key_update(eventId=event_id, opt=coalesce(event.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -147,6 +156,7 @@ class Mai2StaticData(BaseData):
|
||||
added_version: str,
|
||||
difficulty: float,
|
||||
note_designer: str,
|
||||
opt_id: int = None
|
||||
) -> None:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -159,6 +169,7 @@ class Mai2StaticData(BaseData):
|
||||
addedVersion=added_version,
|
||||
difficulty=difficulty,
|
||||
noteDesigner=note_designer,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -169,6 +180,7 @@ class Mai2StaticData(BaseData):
|
||||
addedVersion=added_version,
|
||||
difficulty=difficulty,
|
||||
noteDesigner=note_designer,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -184,6 +196,7 @@ class Mai2StaticData(BaseData):
|
||||
ticket_type: int,
|
||||
ticket_price: int,
|
||||
name: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(ticket).values(
|
||||
version=version,
|
||||
@@ -191,11 +204,10 @@ class Mai2StaticData(BaseData):
|
||||
kind=ticket_type,
|
||||
price=ticket_price,
|
||||
name=name,
|
||||
opt=coalesce(ticket.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price, opt=coalesce(ticket.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -240,12 +252,12 @@ class Mai2StaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_card(self, version: int, card_id: int, card_name: str, **card_data) -> int:
|
||||
async def put_card(self, version: int, card_id: int, card_name: str, opt_id: int = None, **card_data) -> int:
|
||||
sql = insert(cards).values(
|
||||
version=version, cardId=card_id, cardName=card_name, **card_data
|
||||
version=version, cardId=card_id, cardName=card_name, opt=coalesce(cards.c.opt, opt_id) **card_data
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -276,34 +288,84 @@ class Mai2StaticData(BaseData):
|
||||
if not result:
|
||||
self.logger.error(f"Failed to update event {table_id} - {is_enable} {start_date}")
|
||||
|
||||
# new in prism
|
||||
async def put_kaleidxscope_condition(
|
||||
self,
|
||||
condition_id: int,
|
||||
condition_name: str,
|
||||
music_id: int,
|
||||
music_name: str
|
||||
) -> Optional[int]:
|
||||
sql = insert(kaleidxscope_condition).values(
|
||||
conditionId = condition_id,
|
||||
conditionName = condition_name,
|
||||
songId = music_id,
|
||||
songName = music_name,
|
||||
)
|
||||
|
||||
|
||||
conflict = sql.on_duplicate_key_update(conditionName=condition_name, songName=music_name)
|
||||
async def put_opt(self, version: int, folder: str, sequence: int, cm_seq: int = None) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence, cmReleaseVer=cm_seq)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
f"put_kaleidxscope_condition: Failed to insert kaleidxScope Key Condition! conditionID {condition_id} songId {music_id}"
|
||||
)
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_kaleidxscope_condition(self, condition_id: int) -> None:
|
||||
sql = kaleidxscope_condition.select(kaleidxscope_condition.c.conditionId == condition_id)
|
||||
result = await self.execute(sql)
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Final, Dict
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class OngekiConstants:
|
||||
GAME_CODE = "SDDT"
|
||||
@@ -106,6 +106,24 @@ class OngekiConstants:
|
||||
"O.N.G.E.K.I. bright MEMORY Act.3",
|
||||
)
|
||||
|
||||
VERSION_LUT = {
|
||||
"100": VER_ONGEKI,
|
||||
"105": VER_ONGEKI_PLUS,
|
||||
"110": VER_ONGEKI_SUMMER,
|
||||
"115": VER_ONGEKI_SUMMER_PLUS,
|
||||
"120": VER_ONGEKI_RED,
|
||||
"125": VER_ONGEKI_RED_PLUS,
|
||||
"130": VER_ONGEKI_BRIGHT,
|
||||
"135": VER_ONGEKI_BRIGHT_MEMORY,
|
||||
"140": VER_ONGEKI_BRIGHT_MEMORY,
|
||||
"145": VER_ONGEKI_BRIGHT_MEMORY_ACT3,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
return cls.VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
@@ -58,12 +58,13 @@ class OngekiReader(BaseReader):
|
||||
data_dirs += self.get_data_directories(self.opt_dir)
|
||||
|
||||
for dir in data_dirs:
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music")
|
||||
await self.read_card(f"{dir}/card")
|
||||
await self.read_reward(f"{dir}/reward")
|
||||
this_opt_id = await self.read_opt_info(dir)
|
||||
await self.read_events(f"{dir}/event", this_opt_id)
|
||||
await self.read_music(f"{dir}/music", this_opt_id)
|
||||
await self.read_card(f"{dir}/card", this_opt_id)
|
||||
await self.read_reward(f"{dir}/reward", this_opt_id)
|
||||
|
||||
async def read_card(self, base_dir: str) -> None:
|
||||
async def read_card(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading cards from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -73,17 +74,6 @@ class OngekiReader(BaseReader):
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
card_id = int(troot.find("Name").find("id").text)
|
||||
|
||||
# skip already existing cards
|
||||
if (
|
||||
await self.data.static.get_card(
|
||||
OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY, card_id
|
||||
)
|
||||
is not None
|
||||
):
|
||||
self.logger.info(f"Card {card_id} already added, skipping")
|
||||
continue
|
||||
|
||||
name = troot.find("Name").find("str").text
|
||||
chara_id = int(troot.find("CharaID").find("id").text)
|
||||
nick_name = troot.find("NickName").text
|
||||
@@ -108,6 +98,7 @@ class OngekiReader(BaseReader):
|
||||
await self.data.static.put_card(
|
||||
self.parse_version(troot),
|
||||
card_id,
|
||||
opt_id,
|
||||
name=name,
|
||||
charaId=chara_id,
|
||||
nickName=nick_name,
|
||||
@@ -122,7 +113,7 @@ class OngekiReader(BaseReader):
|
||||
)
|
||||
self.logger.info(f"Added card {card_id}")
|
||||
|
||||
async def read_events(self, base_dir: str) -> None:
|
||||
async def read_events(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading events from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -140,10 +131,10 @@ class OngekiReader(BaseReader):
|
||||
if troot.find("EventType").text == "MissionEvent":
|
||||
name = (troot.find("Event").find("MissionName").find("str").text)
|
||||
|
||||
await self.data.static.put_event(self.version, id, event_type, name)
|
||||
await self.data.static.put_event(self.version, id, event_type, name, opt_id)
|
||||
self.logger.info(f"Added event {id}")
|
||||
|
||||
async def read_music(self, base_dir: str) -> None:
|
||||
async def read_music(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading music from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -178,11 +169,11 @@ class OngekiReader(BaseReader):
|
||||
)
|
||||
|
||||
await self.data.static.put_chart(
|
||||
version, song_id, chart_id, title, artist, genre, level
|
||||
version, song_id, chart_id, title, artist, genre, level, opt_id
|
||||
)
|
||||
self.logger.info(f"Added song {song_id} chart {chart_id}")
|
||||
|
||||
async def read_reward(self, base_dir: str) -> None:
|
||||
async def read_reward(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading rewards from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -204,5 +195,53 @@ class OngekiReader(BaseReader):
|
||||
itemKind = OngekiConstants.REWARD_TYPES[troot.find("ItemType").text].value
|
||||
itemId = troot.find("RewardItem").find("ItemName").find("id").text
|
||||
|
||||
await self.data.static.put_reward(self.version, rewardId, rewardname, itemKind, itemId)
|
||||
await self.data.static.put_reward(self.version, rewardId, rewardname, itemKind, itemId, opt_id)
|
||||
self.logger.info(f"Added reward {rewardId}")
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
datacfg_file = os.path.join(directory, "DataConfig.xml")
|
||||
if not os.path.exists(datacfg_file):
|
||||
self.logger.warning(f"{datacfg_file} does not contain DataConfig.xml, opt info will not be read")
|
||||
return None
|
||||
|
||||
with open(datacfg_file, encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
if troot.find("version") is None:
|
||||
self.logger.warning(f"{directory}/DataConfig.xml contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
ver_maj = troot.find("version/major")
|
||||
ver_min = troot.find("version/minor")
|
||||
ver_rel = troot.find("version/release")
|
||||
cm_maj = troot.find("cardMakerVersion/major")
|
||||
cm_min = troot.find("cardMakerVersion/minor")
|
||||
cm_rel = troot.find("cardMakerVersion/release")
|
||||
|
||||
if ver_maj is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no major item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if ver_min is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no minor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if ver_rel is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no release item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_folder = os.path.basename(os.path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, int(ver_rel.text), int(cm_rel.text) if cm_rel is not None else None)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
self.logger.info(
|
||||
f"Opt folder {opt_folder} (Database ID {opt_id}) contains v{ver_maj.text}.{ver_min.text}.{ver_rel.text} (cm v{cm_maj.text if cm_maj is not None else 'None'}.{cm_min.text if cm_min is not None else 'None'}.{cm_rel.text if cm_rel is not None else 'None'})"
|
||||
)
|
||||
return opt_id
|
||||
|
||||
@@ -1,14 +1,47 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, VARCHAR, BOOLEAN
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
from datetime import datetime
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
from core.data.schema.arcade import machine
|
||||
|
||||
opts = Table(
|
||||
"ongeki_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # release in DataConfig.xml
|
||||
Column("cmReleaseVer", INTEGER, nullable=False),
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="ongeki_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
cm_opts = Table(
|
||||
"cm_static_opts",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER), # Not all opts have a DataConfig.xml
|
||||
Column("gekiVersion", INTEGER), # GEKI/DataConfig.xml
|
||||
Column("gekiReleaseVer", INTEGER), # GEKI/DataConfig.xml
|
||||
Column("maiVersion", INTEGER), # MAI/DataConfig.xml
|
||||
Column("maiReleaseVer", INTEGER), # MAI/DataConfig.xml
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="cm_static_opts_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
events = Table(
|
||||
"ongeki_static_events",
|
||||
metadata,
|
||||
@@ -20,11 +53,11 @@ events = Table(
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("endDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", "type", name="ongeki_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
music = Table(
|
||||
"ongeki_static_music",
|
||||
metadata,
|
||||
@@ -36,6 +69,7 @@ music = Table(
|
||||
Column("artist", String(255)),
|
||||
Column("genre", String(255)),
|
||||
Column("level", Float),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "songId", "chartId", name="ongeki_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -59,6 +93,7 @@ gachas = Table(
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("convertEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "gachaId", "gachaName", name="ongeki_static_gachas_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -94,6 +129,7 @@ cards = Table(
|
||||
Column("skillId", Integer, nullable=False),
|
||||
Column("choKaikaSkillId", Integer, nullable=False),
|
||||
Column("cardNumber", String(255)),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", name="ongeki_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -107,6 +143,7 @@ rewards = Table(
|
||||
Column("rewardname", String(255), nullable=False),
|
||||
Column("itemKind", Integer, nullable=False),
|
||||
Column("itemId", Integer, nullable=False),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "rewardId", name="ongeki_static_rewards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -176,10 +213,10 @@ game_point = Table(
|
||||
)
|
||||
|
||||
class OngekiStaticData(BaseData):
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
async def put_card(self, version: int, card_id: int, opt_id: int = None, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -306,7 +343,7 @@ class OngekiStaticData(BaseData):
|
||||
return result.fetchall()
|
||||
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, event_type: int, event_name: str
|
||||
self, version: int, event_id: int, event_type: int, event_name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version=version,
|
||||
@@ -314,10 +351,11 @@ class OngekiStaticData(BaseData):
|
||||
type=event_type,
|
||||
name=event_name,
|
||||
endDate=f"2038-01-01 00:00:00",
|
||||
opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=event_name,
|
||||
name=event_name, opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -363,6 +401,7 @@ class OngekiStaticData(BaseData):
|
||||
artist: str,
|
||||
genre: str,
|
||||
level: float,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -372,6 +411,7 @@ class OngekiStaticData(BaseData):
|
||||
artist=artist,
|
||||
genre=genre,
|
||||
level=level,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -379,6 +419,7 @@ class OngekiStaticData(BaseData):
|
||||
artist=artist,
|
||||
genre=genre,
|
||||
level=level,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -413,17 +454,21 @@ class OngekiStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_reward(self, version: int, rewardId: int, rewardname: str, itemKind: int, itemId: int) -> Optional[int]:
|
||||
async def put_reward(self, version: int, rewardId: int, rewardname: str, itemKind: int, itemId: int, opt_id: int = None) -> Optional[int]:
|
||||
sql = insert(rewards).values(
|
||||
version=version,
|
||||
rewardId=rewardId,
|
||||
rewardname=rewardname,
|
||||
itemKind=itemKind,
|
||||
itemId=itemId,
|
||||
)
|
||||
version=version,
|
||||
rewardId=rewardId,
|
||||
rewardname=rewardname,
|
||||
itemKind=itemKind,
|
||||
itemId=itemId,
|
||||
opt=coalesce(rewards.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
rewardname=rewardname,
|
||||
)
|
||||
rewardname=rewardname,
|
||||
opt=coalesce(rewards.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert reward! reward_id: {rewardId}")
|
||||
@@ -491,3 +536,121 @@ class OngekiStaticData(BaseData):
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_opt(self, version: int, folder: str, sequence: int, cm_seq: int = None) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence, cmReleaseVer=cm_seq)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def cm_put_opt(self, version: int, folder: str, sequence: int, geki_ver: int, geki_seq: int, mai_ver: int, mai_seq: int) -> Optional[int]:
|
||||
sql = insert(cm_opts).values(
|
||||
version=version,
|
||||
name=folder,
|
||||
sequence=sequence,
|
||||
gekiVersion=geki_ver,
|
||||
gekiReleaseVer=geki_seq,
|
||||
maiSequence=mai_ver,
|
||||
maiReleaseVer=mai_seq,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
sequence=sequence,
|
||||
gekiVersion=geki_ver,
|
||||
gekiReleaseVer=geki_seq,
|
||||
maiSequence=mai_ver,
|
||||
maiReleaseVer=mai_seq,
|
||||
whenRead=datetime.now()
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def cm_get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(cm_opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
Reference in New Issue
Block a user