mirror of
https://gitea.tendokyu.moe/Hay1tsme/artemis.git
synced 2025-12-14 08:06:25 +08:00
Merge branch 'refs/heads/develop' into prism_plus_support
# Conflicts: # core/data/alembic/versions/16f34bf7b968_mai2_kaleidx_scope_support.py # core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py # core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py # core/data/alembic/versions/bdf710616ba4_mai2_add_prism_playlog_support.py # titles/mai2/index.py # titles/mai2/prism.py # titles/mai2/read.py # titles/mai2/schema/static.py
This commit is contained in:
165
core/allnet.py
165
core/allnet.py
@@ -586,39 +586,19 @@ class BillingServlet:
|
||||
rsa = RSA.import_key(open(self.config.billing.signing_key, "rb").read())
|
||||
signer = PKCS1_v1_5.new(rsa)
|
||||
digest = SHA.new()
|
||||
traces: List[TraceData] = []
|
||||
try:
|
||||
req = BillingInfo(req_dict[0])
|
||||
except KeyError as e:
|
||||
self.logger.error(f"Billing request failed to parse: {e}")
|
||||
return PlainTextResponse("result=5&linelimit=&message=field is missing or formatting is incorrect\r\n")
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
|
||||
traces.append(tmp)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warning(f"Tracelog failed to parse: {e}")
|
||||
|
||||
kc_serial_bytes = req.keychipid.encode()
|
||||
|
||||
|
||||
machine = await self.data.arcade.get_machine(req.keychipid)
|
||||
if machine is None and not self.config.server.allow_unregistered_serials:
|
||||
msg = f"Unrecognised serial {req.keychipid} attempted billing checkin from {request_ip} for {req.gameid} v{req.gamever}."
|
||||
await self.data.base.log_event(
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg, ip=request_ip, game=req.gameid, version=req.gamever
|
||||
"allnet", "BILLING_CHECKIN_NG_SERIAL", logging.WARN, msg, ip=request_ip, game=req.gameid, version=str(req.gamever)
|
||||
)
|
||||
self.logger.warning(msg)
|
||||
|
||||
@@ -629,18 +609,101 @@ class BillingServlet:
|
||||
"billing_type": req.billingtype.name,
|
||||
"nearfull": req.nearfull,
|
||||
"playlimit": req.playlimit,
|
||||
"messages": []
|
||||
}
|
||||
playhist = "000000/0:000000/0:000000/0"
|
||||
|
||||
if machine is not None:
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, "", log_details, None, machine['arcade'], machine['id'], request_ip, req.gameid, req.gamever)
|
||||
if self.config.allnet.save_billing:
|
||||
lastcredit = await self.data.arcade.billing_get_last_playcount(machine['id'], req.gameid)
|
||||
if lastcredit is not None:
|
||||
last_playct = lastcredit['playct']
|
||||
else:
|
||||
last_playct = 0
|
||||
|
||||
# Technically if a cab resets it's playcount and then does more plays then the previous
|
||||
# playcount before a billing checkin occours, we will lose plays equal to the current playcount.
|
||||
if req.playcnt < last_playct: await self.data.arcade.billing_add_playcount(machine['id'], req.gameid, req.playcnt)
|
||||
elif req.playcnt == last_playct: pass # No plays since last checkin, skip update
|
||||
else: await self.data.arcade.billing_add_playcount(machine['id'], req.gameid, req.playcnt - last_playct)
|
||||
|
||||
plays = await self.data.arcade.billing_get_playcount_3mo(machine['id'], req.gameid)
|
||||
if plays is not None and len(plays) > 0:
|
||||
playhist = ""
|
||||
|
||||
for x in range(len(plays) - 1, -1, -1): playhist += f"{plays[x]['year']:04d}{plays[x]['month']:02d}/{plays[x]['playct']}:"
|
||||
playhist = playhist[:-1]
|
||||
|
||||
for x in range(1, len(req_dict)):
|
||||
if not req_dict[x]:
|
||||
continue
|
||||
|
||||
try:
|
||||
tmp = TraceData(req_dict[x])
|
||||
if tmp.trace_type == TraceDataType.CHARGE:
|
||||
tmp = TraceDataCharge(req_dict[x])
|
||||
if self.config.allnet.save_billing:
|
||||
await self.data.arcade.billing_add_charge(
|
||||
machine['id'],
|
||||
tmp.game_id,
|
||||
float(tmp.game_version),
|
||||
tmp.play_count,
|
||||
tmp.play_limit,
|
||||
tmp.product_code,
|
||||
tmp.product_count,
|
||||
tmp.func_type,
|
||||
tmp.player_number
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Charge Trace from {req.keychipid}: {tmp.game_id} v{tmp.game_version} - player {tmp.player_number} got {tmp.product_count} of {tmp.product_code} func {tmp.func_type}"
|
||||
)
|
||||
|
||||
elif tmp.trace_type == TraceDataType.EVENT:
|
||||
tmp = TraceDataEvent(req_dict[x])
|
||||
log_details['messages'].append(tmp.message)
|
||||
self.logger.info(f"Event Trace from {req.keychipid}: {tmp.message}")
|
||||
|
||||
elif tmp.trace_type == TraceDataType.CREDIT:
|
||||
tmp = TraceDataCredit(req_dict[x])
|
||||
if self.config.allnet.save_billing:
|
||||
await self.data.arcade.billing_set_credit(
|
||||
machine['id'],
|
||||
req.gameid,
|
||||
tmp.chute_type.value,
|
||||
tmp.service_type.value,
|
||||
tmp.operation_type.value,
|
||||
tmp.coin_rate0,
|
||||
tmp.coin_rate1,
|
||||
tmp.bonus_addition,
|
||||
tmp.credit_rate,
|
||||
tmp.credit0,
|
||||
tmp.credit1,
|
||||
tmp.credit2,
|
||||
tmp.credit3,
|
||||
tmp.credit4,
|
||||
tmp.credit5,
|
||||
tmp.credit6,
|
||||
tmp.credit7
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Credit Trace from {req.keychipid}: {tmp.operation_type} mode, {tmp.credit_rate} coins per credit, breakdown: {tmp.credit0} | {tmp.credit1} | {tmp.credit2} | {tmp.credit3} | {tmp.credit4} | {tmp.credit5} | {tmp.credit6} | {tmp.credit7} | "
|
||||
)
|
||||
|
||||
except KeyError as e:
|
||||
self.logger.warning(f"Tracelog failed to parse: {e}")
|
||||
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK", logging.INFO, "", log_details, None, machine['arcade'], machine['id'], request_ip, req.gameid, str(req.gamever))
|
||||
|
||||
self.logger.info(
|
||||
f"Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
f"{req.playcnt} billing_type {req.billingtype.name} nearfull {req.nearfull} playlimit {req.playlimit}"
|
||||
)
|
||||
|
||||
else:
|
||||
log_details['serial'] = req.keychipid
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK_UNREG", logging.INFO, "", log_details, None, None, None, request_ip, req.gameid, req.gamever)
|
||||
await self.data.base.log_event("billing", "BILLING_CHECKIN_OK_UNREG", logging.INFO, "", log_details, None, None, None, request_ip, req.gameid, str(req.gamever))
|
||||
|
||||
self.logger.info(
|
||||
f"Unregistered Billing checkin from {request_ip}: game {req.gameid} ver {req.gamever} keychip {req.keychipid} playcount "
|
||||
@@ -649,15 +712,12 @@ class BillingServlet:
|
||||
|
||||
if req.traceleft > 0:
|
||||
self.logger.warning(f"{req.traceleft} unsent tracelogs")
|
||||
kc_playlimit = req.playlimit
|
||||
kc_nearfull = req.nearfull
|
||||
|
||||
while req.playcnt > req.playlimit:
|
||||
kc_playlimit += 1024
|
||||
kc_nearfull += 1024
|
||||
playlimit = req.playlimit
|
||||
while req.playcnt > playlimit:
|
||||
playlimit += 1024
|
||||
|
||||
playlimit = kc_playlimit
|
||||
nearfull = kc_nearfull + (req.billingtype.value * 0x00010000)
|
||||
nearfull = req.nearfull + (req.billingtype.value * 0x00010000)
|
||||
|
||||
digest.update(playlimit.to_bytes(4, "little") + kc_serial_bytes)
|
||||
playlimit_sig = signer.sign(digest).hex()
|
||||
@@ -666,14 +726,12 @@ class BillingServlet:
|
||||
digest.update(nearfull.to_bytes(4, "little") + kc_serial_bytes)
|
||||
nearfull_sig = signer.sign(digest).hex()
|
||||
|
||||
# TODO: playhistory
|
||||
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver)
|
||||
resp = BillingResponse(playlimit, playlimit_sig, nearfull, nearfull_sig, req.requestno, req.protocolver, playhist)
|
||||
|
||||
resp_str = urllib.parse.unquote(urllib.parse.urlencode(vars(resp))) + "\r\n"
|
||||
|
||||
self.logger.debug(f"response {vars(resp)}")
|
||||
if req.traceleft > 0:
|
||||
if req.traceleft > 0: # TODO: should probably move this up so we don't do a ton of work that doesn't get used
|
||||
self.logger.info(f"Requesting 20 more of {req.traceleft} unsent tracelogs")
|
||||
return PlainTextResponse("result=6&waittime=0&linelimit=20\r\n")
|
||||
|
||||
@@ -768,14 +826,27 @@ class BillingType(Enum):
|
||||
A = 1
|
||||
B = 0
|
||||
|
||||
class TraceDataCreditChuteType(Enum):
|
||||
COMMON = 0
|
||||
INDIVIDUAL = 1
|
||||
|
||||
class TraceDataCreditOperationType(Enum):
|
||||
COIN = 0
|
||||
FREEPLAY = 1
|
||||
|
||||
class float5:
|
||||
def __init__(self, n: str = "0") -> None:
|
||||
def __init__(self, n: str = "0"):
|
||||
nf = float(n)
|
||||
if nf > 999.9 or nf < 0:
|
||||
raise ValueError('float5 must be between 0.000 and 999.9 inclusive')
|
||||
|
||||
return nf
|
||||
self.val = nf
|
||||
|
||||
def __float__(self) -> float:
|
||||
return self.val
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"%.{2 - int(math.log10(self.val))+1}f" % self.val
|
||||
|
||||
@classmethod
|
||||
def to_str(cls, f: float):
|
||||
return f"%.{2 - int(math.log10(f))+1}f" % f
|
||||
@@ -786,13 +857,13 @@ class BillingInfo:
|
||||
self.keychipid = str(data.get("keychipid", None))
|
||||
self.functype = int(data.get("functype", None))
|
||||
self.gameid = str(data.get("gameid", None))
|
||||
self.gamever = float(data.get("gamever", None))
|
||||
self.gamever = float5(data.get("gamever", None))
|
||||
self.boardid = str(data.get("boardid", None))
|
||||
self.tenpoip = str(data.get("tenpoip", None))
|
||||
self.libalibver = float(data.get("libalibver", None))
|
||||
self.libalibver = float5(data.get("libalibver", None))
|
||||
self.datamax = int(data.get("datamax", None))
|
||||
self.billingtype = BillingType(int(data.get("billingtype", None)))
|
||||
self.protocolver = float(data.get("protocolver", None))
|
||||
self.protocolver = float5(data.get("protocolver", None))
|
||||
self.operatingfix = bool(data.get("operatingfix", None))
|
||||
self.traceleft = int(data.get("traceleft", None))
|
||||
self.requestno = int(data.get("requestno", None))
|
||||
@@ -825,7 +896,7 @@ class TraceData:
|
||||
self.date = datetime.strptime(data.get("dt", None), BILLING_DT_FORMAT)
|
||||
|
||||
self.keychip = str(data.get("kn", None))
|
||||
self.lib_ver = float(data.get("alib", 0))
|
||||
self.lib_ver = float5(data.get("alib", 0))
|
||||
except Exception as e:
|
||||
raise KeyError(e)
|
||||
|
||||
@@ -834,7 +905,7 @@ class TraceDataCharge(TraceData):
|
||||
super().__init__(data)
|
||||
try:
|
||||
self.game_id = str(data.get("gi", None)) # these seem optional...?
|
||||
self.game_version = float(data.get("gv", 0))
|
||||
self.game_version = float5(data.get("gv", 0))
|
||||
self.board_serial = str(data.get("bn", None))
|
||||
self.shop_ip = str(data.get("ti", None))
|
||||
self.play_count = int(data.get("pc", None))
|
||||
@@ -858,9 +929,9 @@ class TraceDataCredit(TraceData):
|
||||
def __init__(self, data: Dict) -> None:
|
||||
super().__init__(data)
|
||||
try:
|
||||
self.chute_type = int(data.get("cct", None))
|
||||
self.service_type = int(data.get("cst", None))
|
||||
self.operation_type = int(data.get("cop", None))
|
||||
self.chute_type = TraceDataCreditChuteType(int(data.get("cct", None)))
|
||||
self.service_type = TraceDataCreditChuteType(int(data.get("cst", None)))
|
||||
self.operation_type = TraceDataCreditOperationType(int(data.get("cop", None)))
|
||||
self.coin_rate0 = int(data.get("cr0", None))
|
||||
self.coin_rate1 = int(data.get("cr1", None))
|
||||
self.bonus_addition = int(data.get("cba", None))
|
||||
@@ -884,7 +955,7 @@ class BillingResponse:
|
||||
nearfull: str = "",
|
||||
nearfull_sig: str = "",
|
||||
request_num: int = 1,
|
||||
protocol_ver: float = 1.000,
|
||||
protocol_ver: float5 = float5("1.000"),
|
||||
playhistory: str = "000000/0:000000/0:000000/0",
|
||||
) -> None:
|
||||
self.result = 0
|
||||
@@ -898,7 +969,7 @@ class BillingResponse:
|
||||
self.nearfull = nearfull
|
||||
self.nearfullsig = nearfull_sig
|
||||
self.linelimit = 100
|
||||
self.protocolver = float5.to_str(protocol_ver)
|
||||
self.protocolver = str(protocol_ver)
|
||||
# playhistory -> YYYYMM/C:...
|
||||
# YYYY -> 4 digit year, MM -> 2 digit month, C -> Playcount during that period
|
||||
|
||||
|
||||
@@ -362,7 +362,7 @@ class AllnetConfig:
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_online_updates(self) -> int:
|
||||
def allow_online_updates(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "allow_online_updates", default=False
|
||||
)
|
||||
@@ -373,6 +373,12 @@ class AllnetConfig:
|
||||
self.__config, "core", "allnet", "update_cfg_folder", default=""
|
||||
)
|
||||
|
||||
@property
|
||||
def save_billing(self) -> bool:
|
||||
return CoreConfig.get_config_field(
|
||||
self.__config, "core", "allnet", "save_billing", default=False
|
||||
)
|
||||
|
||||
class BillingConfig:
|
||||
def __init__(self, parent_config: "CoreConfig") -> None:
|
||||
self.__config = parent_config
|
||||
|
||||
164
core/data/alembic/versions/263884e774cc_acc_opt_tables.py
Normal file
164
core/data/alembic/versions/263884e774cc_acc_opt_tables.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""acc_opt_tables
|
||||
|
||||
Revision ID: 263884e774cc
|
||||
Revises: 1d0014d35220
|
||||
Create Date: 2025-04-07 18:05:53.349320
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '263884e774cc'
|
||||
down_revision = '1d0014d35220'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('chuni_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='chuni_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('cm_static_opts',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=True),
|
||||
sa.Column('gekiVersion', sa.INTEGER(), nullable=True),
|
||||
sa.Column('gekiReleaseVer', sa.INTEGER(), nullable=True),
|
||||
sa.Column('maiVersion', sa.INTEGER(), nullable=True),
|
||||
sa.Column('maiReleaseVer', sa.INTEGER(), nullable=True),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='cm_static_opts_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('mai2_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('cmReleaseVer', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='mai2_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('ongeki_static_opt',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('version', sa.INTEGER(), nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(length=4), nullable=False),
|
||||
sa.Column('sequence', sa.INTEGER(), nullable=False),
|
||||
sa.Column('cmReleaseVer', sa.INTEGER(), nullable=False),
|
||||
sa.Column('whenRead', sa.TIMESTAMP(), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('isEnable', sa.BOOLEAN(), server_default='1', nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('version', 'name', name='ongeki_static_opt_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.add_column('chuni_static_avatar', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_avatar', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_cards', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_character', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_character', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_charge', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_charge', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_events', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_events', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_gachas', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_gachas', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_login_bonus', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_login_bonus', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_login_bonus_preset', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_login_bonus_preset', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_map_icon', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_map_icon', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_music', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_system_voice', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_system_voice', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('chuni_static_trophy', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_trophy', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_cards', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_event', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_event', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_music', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('mai2_static_ticket', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'mai2_static_ticket', 'mai2_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_cards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_cards', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_events', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_events', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_gachas', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_gachas', 'cm_static_opts', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_music', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_music', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
op.add_column('ongeki_static_rewards', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'ongeki_static_rewards', 'ongeki_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("ongeki_static_rewards_ibfk_1", 'ongeki_static_rewards', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_rewards', 'opt')
|
||||
op.drop_constraint("ongeki_static_music_ibfk_1", 'ongeki_static_music', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_music', 'opt')
|
||||
op.drop_constraint("ongeki_static_gachas_ibfk_1", 'ongeki_static_gachas', type_='foreignkey')
|
||||
op.drop_column('ongeki_static_gachas', 'opt')
|
||||
op.drop_constraint("ongeki_static_events_ibfk_1", "ongeki_static_events", type_='foreignkey')
|
||||
op.drop_column('ongeki_static_events', 'opt')
|
||||
op.drop_constraint("ongeki_static_cards_ibfk_1", "ongeki_static_cards", type_='foreignkey')
|
||||
op.drop_column('ongeki_static_cards', 'opt')
|
||||
op.drop_constraint("mai2_static_ticket_ibfk_1", "mai2_static_ticket", type_='foreignkey')
|
||||
op.drop_column('mai2_static_ticket', 'opt')
|
||||
op.drop_constraint("mai2_static_music_ibfk_1", "mai2_static_music", type_='foreignkey')
|
||||
op.drop_column('mai2_static_music', 'opt')
|
||||
op.drop_constraint("mai2_static_event_ibfk_1", "mai2_static_event", type_='foreignkey')
|
||||
op.drop_column('mai2_static_event', 'opt')
|
||||
op.drop_constraint("mai2_static_cards_ibfk_1", "mai2_static_cards", type_='foreignkey')
|
||||
op.drop_column('mai2_static_cards', 'opt')
|
||||
op.drop_constraint("chuni_static_trophy_ibfk_1", "chuni_static_trophy", type_='foreignkey')
|
||||
op.drop_column('chuni_static_trophy', 'opt')
|
||||
op.drop_constraint("chuni_static_system_voice_ibfk_1", "chuni_static_system_voice", type_='foreignkey')
|
||||
op.drop_column('chuni_static_system_voice', 'opt')
|
||||
op.drop_constraint("chuni_static_music_ibfk_1", "chuni_static_music", type_='foreignkey')
|
||||
op.drop_column('chuni_static_music', 'opt')
|
||||
op.drop_constraint("chuni_static_map_icon_ibfk_1", "chuni_static_map_icon", type_='foreignkey')
|
||||
op.drop_column('chuni_static_map_icon', 'opt')
|
||||
op.drop_constraint("chuni_static_login_bonus_preset_ibfk_1", "chuni_static_login_bonus_preset", type_='foreignkey')
|
||||
op.drop_column('chuni_static_login_bonus_preset', 'opt')
|
||||
op.drop_constraint("chuni_static_login_bonus_ibfk_2", "chuni_static_login_bonus", type_='foreignkey')
|
||||
op.drop_column('chuni_static_login_bonus', 'opt')
|
||||
op.drop_constraint("chuni_static_gachas_ibfk_1", "chuni_static_gachas", type_='foreignkey')
|
||||
op.drop_column('chuni_static_gachas', 'opt')
|
||||
op.drop_constraint("chuni_static_events_ibfk_1", "chuni_static_events", type_='foreignkey')
|
||||
op.drop_column('chuni_static_events', 'opt')
|
||||
op.drop_constraint("chuni_static_charge_ibfk_1", "chuni_static_charge", type_='foreignkey')
|
||||
op.drop_column('chuni_static_charge', 'opt')
|
||||
op.drop_constraint("chuni_static_character_ibfk_1", "chuni_static_character", type_='foreignkey')
|
||||
op.drop_column('chuni_static_character', 'opt')
|
||||
op.drop_constraint("chuni_static_cards_ibfk_1", "chuni_static_cards", type_='foreignkey')
|
||||
op.drop_column('chuni_static_cards', 'opt')
|
||||
op.drop_constraint("chuni_static_avatar_ibfk_1", "chuni_static_avatar", type_='foreignkey')
|
||||
op.drop_column('chuni_static_avatar', 'opt')
|
||||
op.drop_table('ongeki_static_opt')
|
||||
op.drop_table('mai2_static_opt')
|
||||
op.drop_table('cm_static_opts')
|
||||
op.drop_table('chuni_static_opt')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,66 @@
|
||||
"""add_billing_tables
|
||||
|
||||
Revision ID: 27e3434740df
|
||||
Revises: ae364c078429
|
||||
Create Date: 2025-04-17 18:32:06.008601
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '27e3434740df'
|
||||
down_revision = 'ae364c078429'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('machine_billing_charge',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('game_id', sa.CHAR(length=5), nullable=False),
|
||||
sa.Column('game_ver', sa.FLOAT(), nullable=False),
|
||||
sa.Column('play_count', sa.INTEGER(), nullable=False),
|
||||
sa.Column('play_limit', sa.INTEGER(), nullable=False),
|
||||
sa.Column('product_code', sa.INTEGER(), nullable=False),
|
||||
sa.Column('product_count', sa.INTEGER(), nullable=False),
|
||||
sa.Column('func_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('player_number', sa.INTEGER(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.create_table('machine_billing_credit',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('chute_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('service_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('operation_type', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_rate0', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_rate1', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_bonus', sa.INTEGER(), nullable=False),
|
||||
sa.Column('credit_rate', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot0', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot1', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot2', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot3', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot4', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot5', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot6', sa.INTEGER(), nullable=False),
|
||||
sa.Column('coin_count_slot7', sa.INTEGER(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('machine'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('machine_billing_credit')
|
||||
op.drop_table('machine_billing_charge')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,8 +1,14 @@
|
||||
"""Mai2 PRiSM support
|
||||
|
||||
<<<<<<<< HEAD:core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py
|
||||
Revision ID: 5d7b38996e67
|
||||
Revises: 1d0014d35220
|
||||
Create Date: 2025-04-04 06:28:24.898912
|
||||
========
|
||||
Revision ID: 5cf98cfe52ad
|
||||
Revises: 263884e774cc
|
||||
Create Date: 2025-04-08 08:00:51.243089
|
||||
>>>>>>>> refs/heads/develop:core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
@@ -10,14 +16,20 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
<<<<<<<< HEAD:core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py
|
||||
revision = '5d7b38996e67'
|
||||
down_revision = '1d0014d35220'
|
||||
========
|
||||
revision = '5cf98cfe52ad'
|
||||
down_revision = '263884e774cc'
|
||||
>>>>>>>> refs/heads/develop:core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
<<<<<<<< HEAD:core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py
|
||||
op.create_table('mai2_static_kaleidxscope_condition',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('conditionId', sa.Integer(), nullable=True),
|
||||
@@ -28,6 +40,8 @@ def upgrade():
|
||||
sa.UniqueConstraint('conditionId', 'conditionName', 'songId', 'songName', name='mai2_static_kaleidxscope_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
========
|
||||
>>>>>>>> refs/heads/develop:core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py
|
||||
op.create_table('mai2_score_kaleidxscope',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user', sa.Integer(), nullable=False),
|
||||
@@ -59,5 +73,8 @@ def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('mai2_playlog', 'extBool2')
|
||||
op.drop_table('mai2_score_kaleidxscope')
|
||||
<<<<<<<< HEAD:core/data/alembic/versions/5d7b38996e67_mai2_prism_support.py
|
||||
op.drop_table('mai2_static_kaleidxscope_condition')
|
||||
========
|
||||
>>>>>>>> refs/heads/develop:core/data/alembic/versions/5cf98cfe52ad_mai2_prism_support.py
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,30 @@
|
||||
"""chuni_nameplate_add_opt
|
||||
|
||||
Revision ID: ae364c078429
|
||||
Revises: 5cf98cfe52ad
|
||||
Create Date: 2025-04-08 00:22:22.370660
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ae364c078429'
|
||||
down_revision = '5cf98cfe52ad'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('chuni_static_nameplate', sa.Column('opt', sa.BIGINT(), nullable=True))
|
||||
op.create_foreign_key(None, 'chuni_static_nameplate', 'chuni_static_opt', ['opt'], ['id'], onupdate='cascade', ondelete='SET NULL')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("chuni_static_nameplate_ibfk_1", 'chuni_static_nameplate', type_='foreignkey')
|
||||
op.drop_column('chuni_static_nameplate', 'opt')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Mai2 add PRiSM+ playlog support
|
||||
|
||||
Revision ID: bdf710616ba4
|
||||
Revises: 16f34bf7b968
|
||||
Create Date: 2025-04-02 12:42:08.981516
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bdf710616ba4'
|
||||
down_revision = '16f34bf7b968'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('mai2_playlog', sa.Column('extBool3', sa.Boolean(), nullable=True,server_default=sa.text("NULL")))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('mai2_playlog', 'extBool3')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,50 @@
|
||||
"""add_billing_playcount
|
||||
|
||||
Revision ID: f6007bbf057d
|
||||
Revises: 27e3434740df
|
||||
Create Date: 2025-04-19 18:20:35.554137
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f6007bbf057d'
|
||||
down_revision = '27e3434740df'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('machine_billing_playcount',
|
||||
sa.Column('id', sa.BIGINT(), nullable=False),
|
||||
sa.Column('machine', sa.Integer(), nullable=False),
|
||||
sa.Column('game_id', sa.CHAR(length=5), nullable=False),
|
||||
sa.Column('year', sa.INTEGER(), nullable=False),
|
||||
sa.Column('month', sa.INTEGER(), nullable=False),
|
||||
sa.Column('playct', sa.BIGINT(), server_default='1', nullable=False),
|
||||
sa.ForeignKeyConstraint(['machine'], ['machine.id'], onupdate='cascade', ondelete='cascade'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('machine'),
|
||||
sa.UniqueConstraint('machine', 'game_id', 'year', 'month', name='machine_billing_playcount_uk'),
|
||||
mysql_charset='utf8mb4'
|
||||
)
|
||||
op.add_column('machine_billing_credit', sa.Column('game_id', sa.CHAR(length=5), nullable=False))
|
||||
op.drop_constraint("machine_billing_credit_ibfk_1", "machine_billing_credit", "foreignkey")
|
||||
op.drop_index('machine', table_name='machine_billing_credit')
|
||||
op.create_unique_constraint('machine_billing_credit_uk', 'machine_billing_credit', ['machine', 'game_id'])
|
||||
op.create_foreign_key("machine_billing_credit_ibfk_1", "machine_billing_credit", "machine", ["machine"], ["id"], onupdate='cascade', ondelete='cascade')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint("machine_billing_credit_ibfk_1", "machine_billing_credit", "foreignkey")
|
||||
op.drop_constraint('machine_billing_credit_uk', 'machine_billing_credit', type_='unique')
|
||||
op.create_index('machine', 'machine_billing_credit', ['machine'], unique=True)
|
||||
op.create_foreign_key("machine_billing_credit_ibfk_1", "machine_billing_credit", "machine", ["machine"], ["id"], onupdate='cascade', ondelete='cascade')
|
||||
op.drop_column('machine_billing_credit', 'game_id')
|
||||
op.drop_table('machine_billing_playcount')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,12 +1,13 @@
|
||||
import re
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, Table, and_, or_
|
||||
from sqlalchemy import Column, Table, and_, or_, UniqueConstraint
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.sql.schema import ForeignKey, PrimaryKeyConstraint
|
||||
from sqlalchemy.types import JSON, Boolean, Integer, String
|
||||
from sqlalchemy.types import JSON, Boolean, Integer, String, BIGINT, INTEGER, CHAR, FLOAT
|
||||
|
||||
from core.data.schema.base import BaseData, metadata
|
||||
|
||||
@@ -67,6 +68,76 @@ arcade_owner: Table = Table(
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
billing_charge: Table = Table(
|
||||
"machine_billing_charge",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False,
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("game_ver", FLOAT, nullable=False),
|
||||
Column("play_count", INTEGER, nullable=False),
|
||||
Column("play_limit", INTEGER, nullable=False),
|
||||
Column("product_code", INTEGER, nullable=False),
|
||||
Column("product_count", INTEGER, nullable=False),
|
||||
Column("func_type", INTEGER, nullable=False),
|
||||
Column("player_number", INTEGER, nullable=False),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
# These settings are only really of interest
|
||||
# for real cabinets operating as pay-to-play
|
||||
billing_credit: Table = Table(
|
||||
"machine_billing_credit",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("chute_type", INTEGER, nullable=False),
|
||||
Column("service_type", INTEGER, nullable=False),
|
||||
Column("operation_type", INTEGER, nullable=False),
|
||||
Column("coin_rate0", INTEGER, nullable=False),
|
||||
Column("coin_rate1", INTEGER, nullable=False),
|
||||
Column("coin_bonus", INTEGER, nullable=False),
|
||||
Column("credit_rate", INTEGER, nullable=False),
|
||||
Column("coin_count_slot0", INTEGER, nullable=False),
|
||||
Column("coin_count_slot1", INTEGER, nullable=False),
|
||||
Column("coin_count_slot2", INTEGER, nullable=False),
|
||||
Column("coin_count_slot3", INTEGER, nullable=False),
|
||||
Column("coin_count_slot4", INTEGER, nullable=False),
|
||||
Column("coin_count_slot5", INTEGER, nullable=False),
|
||||
Column("coin_count_slot6", INTEGER, nullable=False),
|
||||
Column("coin_count_slot7", INTEGER, nullable=False),
|
||||
UniqueConstraint("machine", "game_id", name="machine_billing_credit_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
billing_playct: Table = Table(
|
||||
"machine_billing_playcount",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column(
|
||||
"machine",
|
||||
Integer,
|
||||
ForeignKey("machine.id", ondelete="cascade", onupdate="cascade"),
|
||||
nullable=False, unique=True
|
||||
),
|
||||
Column("game_id", CHAR(5), nullable=False),
|
||||
Column("year", INTEGER, nullable=False),
|
||||
Column("month", INTEGER, nullable=False),
|
||||
Column("playct", BIGINT, nullable=False, server_default="1"),
|
||||
UniqueConstraint("machine", "game_id", "year", "month", name="machine_billing_playcount_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class ArcadeData(BaseData):
|
||||
async def get_machine(self, serial: Optional[str] = None, id: Optional[int] = None) -> Optional[Row]:
|
||||
@@ -345,6 +416,120 @@ class ArcadeData(BaseData):
|
||||
return result.fetchone()['count_1']
|
||||
self.logger.error("Failed to count machine serials that start with A69A!")
|
||||
|
||||
async def billing_add_charge(self, machine_id: int, game_id: str, game_ver: float, playcount: int, playlimit, product_code: int, product_count: int, func_type: int, player_num: int) -> Optional[int]:
|
||||
result = await self.execute(billing_charge.insert().values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
game_ver=game_ver,
|
||||
play_count=playcount,
|
||||
play_limit=playlimit,
|
||||
product_code=product_code,
|
||||
product_count=product_count,
|
||||
func_type=func_type,
|
||||
player_number=player_num
|
||||
))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to add billing charge for machine {machine_id}!")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def billing_get_last_charge(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_charge.select(
|
||||
and_(billing_charge.c.machine == machine_id, billing_charge.c.game_id == game_id)
|
||||
).order_by(billing_charge.c.id.desc()).limit(3))
|
||||
if result:
|
||||
return result.fetchone()
|
||||
|
||||
async def billing_set_credit(self, machine_id: int, game_id: str, chute_type: int, service_type: int, op_mode: int, coin_rate0: int, coin_rate1: int,
|
||||
bonus_adder: int, coin_to_credit_rate: int, coin_count_slot0: int, coin_count_slot1: int, coin_count_slot2: int, coin_count_slot3: int,
|
||||
coin_count_slot4: int, coin_count_slot5: int, coin_count_slot6: int, coin_count_slot7: int) -> Optional[int]:
|
||||
|
||||
sql = insert(billing_credit).values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
chute_type=chute_type,
|
||||
service_type=service_type,
|
||||
operation_type=op_mode,
|
||||
coin_rate0=coin_rate0,
|
||||
coin_rate1=coin_rate1,
|
||||
coin_bonus=bonus_adder,
|
||||
credit_rate=coin_to_credit_rate,
|
||||
coin_count_slot0=coin_count_slot0,
|
||||
coin_count_slot1=coin_count_slot1,
|
||||
coin_count_slot2=coin_count_slot2,
|
||||
coin_count_slot3=coin_count_slot3,
|
||||
coin_count_slot4=coin_count_slot4,
|
||||
coin_count_slot5=coin_count_slot5,
|
||||
coin_count_slot6=coin_count_slot6,
|
||||
coin_count_slot7=coin_count_slot7,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
chute_type=chute_type,
|
||||
service_type=service_type,
|
||||
operation_type=op_mode,
|
||||
coin_rate0=coin_rate0,
|
||||
coin_rate1=coin_rate1,
|
||||
coin_bonus=bonus_adder,
|
||||
credit_rate=coin_to_credit_rate,
|
||||
coin_count_slot0=coin_count_slot0,
|
||||
coin_count_slot1=coin_count_slot1,
|
||||
coin_count_slot2=coin_count_slot2,
|
||||
coin_count_slot3=coin_count_slot3,
|
||||
coin_count_slot4=coin_count_slot4,
|
||||
coin_count_slot5=coin_count_slot5,
|
||||
coin_count_slot6=coin_count_slot6,
|
||||
coin_count_slot7=coin_count_slot7,
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set billing credit settings for machine {machine_id}!")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def billing_get_credit(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_credit.select(
|
||||
and_(billing_credit.c.machine == machine_id, billing_credit.c.game_id == game_id)
|
||||
))
|
||||
if result:
|
||||
return result.fetchone()
|
||||
|
||||
async def billing_add_playcount(self, machine_id: int, game_id: str, playct: int = 1) -> None:
|
||||
now = datetime.now()
|
||||
sql = insert(billing_playct).values(
|
||||
machine=machine_id,
|
||||
game_id=game_id,
|
||||
year=now.year,
|
||||
month=now.month,
|
||||
playct=playct
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(playct=billing_playct.c.playct + playct)
|
||||
result = await self.execute(conflict)
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to add playcount for machine {machine_id} running {game_id}")
|
||||
|
||||
async def billing_get_playcount_3mo(self, machine_id: int, game_id: str) -> Optional[List[Row]]:
|
||||
result = await self.execute(billing_playct.select(and_(
|
||||
billing_playct.c.machine == machine_id,
|
||||
billing_playct.c.game_id == game_id
|
||||
)).order_by(billing_playct.c.year.desc(), billing_playct.c.month.desc()).limit(3))
|
||||
|
||||
if result is not None:
|
||||
return result.fetchall()
|
||||
|
||||
async def billing_get_last_playcount(self, machine_id: int, game_id: str) -> Optional[Row]:
|
||||
result = await self.execute(billing_playct.select(and_(
|
||||
billing_playct.c.machine == machine_id,
|
||||
billing_playct.c.game_id == game_id
|
||||
)).order_by(billing_playct.c.year.desc(), billing_playct.c.month.desc()).limit(1))
|
||||
|
||||
if result is not None:
|
||||
return result.fetchone()
|
||||
|
||||
def format_serial(
|
||||
self, platform_code: str, platform_rev: int, serial_letter: str, serial_num: int, append: int, dash: bool = False
|
||||
) -> str:
|
||||
@@ -371,7 +556,6 @@ class ArcadeData(BaseData):
|
||||
month = ((month - 1) + 9) % 12 # Offset so April=0
|
||||
return f"{year:02}{month // 6:01}{month % 6 + 1:01}"
|
||||
|
||||
|
||||
def parse_keychip_suffix(self, suffix: str) -> tuple[int, int]:
|
||||
year = int(suffix[0:2])
|
||||
half = int(suffix[2])
|
||||
|
||||
@@ -5,6 +5,7 @@ from datetime import datetime, timezone
|
||||
from os import walk
|
||||
from types import ModuleType
|
||||
from typing import Any, Dict, Optional
|
||||
import math
|
||||
|
||||
import jwt
|
||||
from starlette.requests import Request
|
||||
@@ -92,6 +93,8 @@ class Utils:
|
||||
|
||||
return cls.real_title_port_ssl
|
||||
|
||||
def floor_to_nearest_005(version: int) -> int:
|
||||
return (version // 5) * 5
|
||||
|
||||
def create_sega_auth_key(
|
||||
aime_id: int,
|
||||
|
||||
@@ -195,10 +195,10 @@ Config file is located in `config/cxb.yaml`.
|
||||
### Presents
|
||||
Presents are items given to the user when they login, with a little animation (for example, the KOP song was given to the finalists as a present). To add a present, you must insert it into the `mai2_item_present` table. In that table, a NULL version means any version, a NULL user means any user, a NULL start date means always open, and a NULL end date means it never expires. Below is a list of presents one might wish to add:
|
||||
|
||||
| Game Version | Item ID | Item Kind | Item Description | Present Description |
|
||||
|--------------|---------|-----------|-------------------------------------------------|------------------------------------------------|
|
||||
| BUDDiES (21) | 409505 | Icon (3) | 旅行スタンプ(月面基地) (Travel Stamp - Moon Base) | Officially obtained on the webui with a serial |
|
||||
| | | | | number, for project raputa |
|
||||
| Game Version | Item ID | Item Kind | Item Description | Present Description |
|
||||
|--------------|---------|----------------------|--------------------------------------------|----------------------------------------------------------------------------|
|
||||
| BUDDiES (21) | 409505 | Icon (3) | 旅行スタンプ(月面基地) (Travel Stamp - Moon Base) | Officially obtained on the webui with a serial number, for project raputa |
|
||||
| PRiSM (23) | 3 | KaleidxScopeKey (15) | 紫の鍵 (Purple Key) | Officially obtained on the webui with a serial number, for KaleidxScope |
|
||||
|
||||
### Versions
|
||||
|
||||
|
||||
@@ -45,6 +45,7 @@ allnet:
|
||||
loglevel: "info"
|
||||
allow_online_updates: False
|
||||
update_cfg_folder: ""
|
||||
save_billing: True
|
||||
|
||||
billing:
|
||||
standalone: True
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from enum import Enum, IntEnum
|
||||
|
||||
from typing import Optional
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class ChuniConstants:
|
||||
GAME_CODE = "SDBT"
|
||||
@@ -78,10 +79,34 @@ class ChuniConstants:
|
||||
( 0, "D"),
|
||||
]
|
||||
|
||||
VERSION_LUT = {
|
||||
"100": VER_CHUNITHM,
|
||||
"105": VER_CHUNITHM_PLUS,
|
||||
"110": VER_CHUNITHM_AIR,
|
||||
"115": VER_CHUNITHM_AIR_PLUS,
|
||||
"120": VER_CHUNITHM_STAR,
|
||||
"125": VER_CHUNITHM_STAR_PLUS,
|
||||
"130": VER_CHUNITHM_AMAZON,
|
||||
"135": VER_CHUNITHM_AMAZON_PLUS,
|
||||
"140": VER_CHUNITHM_CRYSTAL,
|
||||
"145": VER_CHUNITHM_CRYSTAL_PLUS,
|
||||
"150": VER_CHUNITHM_PARADISE,
|
||||
"200": VER_CHUNITHM_NEW,
|
||||
"205": VER_CHUNITHM_NEW_PLUS,
|
||||
"210": VER_CHUNITHM_SUN,
|
||||
"215": VER_CHUNITHM_SUN_PLUS,
|
||||
"220": VER_CHUNITHM_LUMINOUS,
|
||||
"225": VER_CHUNITHM_LUMINOUS_PLUS,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
return cls.VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
class MapAreaConditionType(IntEnum):
|
||||
"""Condition types for the GetGameMapAreaConditionApi endpoint. Incomplete.
|
||||
|
||||
@@ -457,7 +457,7 @@ class ChuniFrontend(FE_Base):
|
||||
user_characters = []
|
||||
if not force_unlocked:
|
||||
user_characters = await self.data.item.get_characters(profile.user)
|
||||
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId, profile.charaIllustId]
|
||||
user_characters = [chara["characterId"] for chara in user_characters] + [profile.characterId]
|
||||
|
||||
for row in rows:
|
||||
if force_unlocked or row["defaultHave"] or row["characterId"] in user_characters:
|
||||
|
||||
@@ -3,6 +3,7 @@ from os import walk, path
|
||||
import xml.etree.ElementTree as ET
|
||||
from read import BaseReader
|
||||
from PIL import Image
|
||||
import configparser
|
||||
|
||||
from core.config import CoreConfig
|
||||
from titles.chuni.database import ChuniData
|
||||
@@ -50,18 +51,19 @@ class ChuniReader(BaseReader):
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music", we_diff)
|
||||
await self.read_charges(f"{dir}/chargeItem")
|
||||
await self.read_avatar(f"{dir}/avatarAccessory")
|
||||
await self.read_login_bonus(f"{dir}/")
|
||||
await self.read_nameplate(f"{dir}/namePlate")
|
||||
await self.read_trophy(f"{dir}/trophy")
|
||||
await self.read_character(f"{dir}/chara", dds_images)
|
||||
await self.read_map_icon(f"{dir}/mapIcon")
|
||||
await self.read_system_voice(f"{dir}/systemVoice")
|
||||
this_opt_id = await self.read_opt_info(dir) # this also treats A000 as an opt, which is intended
|
||||
await self.read_events(f"{dir}/event", this_opt_id)
|
||||
await self.read_music(f"{dir}/music", we_diff, this_opt_id)
|
||||
await self.read_charges(f"{dir}/chargeItem", this_opt_id)
|
||||
await self.read_avatar(f"{dir}/avatarAccessory", this_opt_id)
|
||||
await self.read_login_bonus(f"{dir}/", this_opt_id)
|
||||
await self.read_nameplate(f"{dir}/namePlate", this_opt_id)
|
||||
await self.read_trophy(f"{dir}/trophy", this_opt_id)
|
||||
await self.read_character(f"{dir}/chara", dds_images, this_opt_id)
|
||||
await self.read_map_icon(f"{dir}/mapIcon", this_opt_id)
|
||||
await self.read_system_voice(f"{dir}/systemVoice", this_opt_id)
|
||||
|
||||
async def read_login_bonus(self, root_dir: str) -> None:
|
||||
async def read_login_bonus(self, root_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(f"{root_dir}loginBonusPreset"):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/LoginBonusPreset.xml"):
|
||||
@@ -76,7 +78,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_login_bonus_preset(
|
||||
self.version, id, name, is_enabled
|
||||
self.version, id, name, is_enabled, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -123,6 +125,7 @@ class ChuniReader(BaseReader):
|
||||
item_num,
|
||||
need_login_day_count,
|
||||
login_bonus_category_type,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -132,7 +135,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert login bonus {bonus_id}"
|
||||
)
|
||||
|
||||
async def read_events(self, evt_dir: str) -> None:
|
||||
async def read_events(self, evt_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(evt_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Event.xml"):
|
||||
@@ -147,14 +150,14 @@ class ChuniReader(BaseReader):
|
||||
event_type = substances.find("type").text
|
||||
|
||||
result = await self.data.static.put_event(
|
||||
self.version, id, event_type, name
|
||||
self.version, id, event_type, name, opt_id
|
||||
)
|
||||
if result is not None:
|
||||
self.logger.info(f"Inserted event {id}")
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert event {id}")
|
||||
|
||||
async def read_music(self, music_dir: str, we_diff: str = "4") -> None:
|
||||
async def read_music(self, music_dir: str, we_diff: str = "4", opt_id: Optional[int] = None) -> None:
|
||||
max_title_len = MusicTable.columns["title"].type.length
|
||||
max_artist_len = MusicTable.columns["artist"].type.length
|
||||
|
||||
@@ -219,6 +222,7 @@ class ChuniReader(BaseReader):
|
||||
genre,
|
||||
jacket_path,
|
||||
we_chara,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -230,7 +234,7 @@ class ChuniReader(BaseReader):
|
||||
f"Failed to insert music {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
async def read_charges(self, charge_dir: str) -> None:
|
||||
async def read_charges(self, charge_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(charge_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/ChargeItem.xml"):
|
||||
@@ -252,6 +256,7 @@ class ChuniReader(BaseReader):
|
||||
expirationDays,
|
||||
consumeType,
|
||||
sellingAppeal,
|
||||
opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -259,7 +264,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert charge {id}")
|
||||
|
||||
async def read_avatar(self, avatar_dir: str) -> None:
|
||||
async def read_avatar(self, avatar_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(avatar_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/AvatarAccessory.xml"):
|
||||
@@ -284,7 +289,7 @@ class ChuniReader(BaseReader):
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/avatar/")
|
||||
|
||||
result = await self.data.static.put_avatar(
|
||||
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName
|
||||
self.version, id, name, category, iconPath, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -292,7 +297,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert avatarAccessory {id}")
|
||||
|
||||
async def read_nameplate(self, nameplate_dir: str) -> None:
|
||||
async def read_nameplate(self, nameplate_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(nameplate_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/NamePlate.xml"):
|
||||
@@ -303,7 +308,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
disableFlag = xml_root.find("disableFlag") # may not exist in older data
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
@@ -313,7 +318,7 @@ class ChuniReader(BaseReader):
|
||||
self.copy_image(texturePath, f"{root}/{dir}", "titles/chuni/img/nameplate/")
|
||||
|
||||
result = await self.data.static.put_nameplate(
|
||||
self.version, id, name, texturePath, is_enabled, defaultHave, sortName
|
||||
self.version, id, name, texturePath, is_enabled, defaultHave, sortName, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -321,7 +326,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert nameplate {id}")
|
||||
|
||||
async def read_trophy(self, trophy_dir: str) -> None:
|
||||
async def read_trophy(self, trophy_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(trophy_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Trophy.xml"):
|
||||
@@ -338,7 +343,7 @@ class ChuniReader(BaseReader):
|
||||
defaultHave = xml_root.find("defaultHave").text == 'true'
|
||||
|
||||
result = await self.data.static.put_trophy(
|
||||
self.version, id, name, rareType, is_enabled, defaultHave
|
||||
self.version, id, name, rareType, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -346,18 +351,21 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert trophy {id}")
|
||||
|
||||
async def read_character(self, chara_dir: str, dds_images: dict) -> None:
|
||||
async def read_character(self, chara_dir: str, dds_images: dict, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(chara_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/Chara.xml"):
|
||||
with open(f"{root}/{dir}/Chara.xml", "r", encoding='utf-8') as fp:
|
||||
strdata = fp.read()
|
||||
# ET may choke if there is a & symbol (which is present in some character xml)
|
||||
if "&" in strdata:
|
||||
strdata = strdata.replace("&", "&")
|
||||
|
||||
xml_root = ET.fromstring(strdata)
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for work in xml_root.findall("works"):
|
||||
worksName = work.find("str").text
|
||||
rareType = xml_root.find("rareType").text
|
||||
@@ -382,7 +390,7 @@ class ChuniReader(BaseReader):
|
||||
self.logger.warning(f"Unable to location character {id} images")
|
||||
|
||||
result = await self.data.static.put_character(
|
||||
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, worksName, rareType, imagePath1, imagePath2, imagePath3, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -390,7 +398,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to insert character {id}")
|
||||
|
||||
async def read_map_icon(self, mapicon_dir: str) -> None:
|
||||
async def read_map_icon(self, mapicon_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(mapicon_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/MapIcon.xml"):
|
||||
@@ -401,7 +409,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
iconPath = image.find("path").text
|
||||
self.copy_image(iconPath, f"{root}/{dir}", "titles/chuni/img/mapIcon/")
|
||||
@@ -410,7 +418,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_map_icon(
|
||||
self.version, id, name, sortName, iconPath, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, iconPath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -418,7 +426,7 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to map icon {id}")
|
||||
|
||||
async def read_system_voice(self, voice_dir: str) -> None:
|
||||
async def read_system_voice(self, voice_dir: str, opt_id: Optional[int] = None) -> None:
|
||||
for root, dirs, files in walk(voice_dir):
|
||||
for dir in dirs:
|
||||
if path.exists(f"{root}/{dir}/SystemVoice.xml"):
|
||||
@@ -429,7 +437,7 @@ class ChuniReader(BaseReader):
|
||||
for name in xml_root.findall("name"):
|
||||
id = name.find("id").text
|
||||
name = name.find("str").text
|
||||
sortName = xml_root.find("sortName").text
|
||||
sortName = name if xml_root.find("sortName") is None else xml_root.find("sortName").text
|
||||
for image in xml_root.findall("image"):
|
||||
imagePath = image.find("path").text
|
||||
self.copy_image(imagePath, f"{root}/{dir}", "titles/chuni/img/systemVoice/")
|
||||
@@ -438,7 +446,7 @@ class ChuniReader(BaseReader):
|
||||
is_enabled = True if (disableFlag is None or disableFlag.text == "false") else False
|
||||
|
||||
result = await self.data.static.put_system_voice(
|
||||
self.version, id, name, sortName, imagePath, is_enabled, defaultHave
|
||||
self.version, id, name, sortName, imagePath, is_enabled, defaultHave, opt_id
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
@@ -446,6 +454,51 @@ class ChuniReader(BaseReader):
|
||||
else:
|
||||
self.logger.warning(f"Failed to system voice {id}")
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
if not path.exists(f"{directory}/data.conf"):
|
||||
self.logger.warning(f"{directory} does not contain data.conf, opt info will not be read")
|
||||
return None
|
||||
|
||||
data_config = configparser.ConfigParser()
|
||||
if not data_config.read(f"{directory}/data.conf", 'utf-8'):
|
||||
self.logger.warning(f"{directory}/data.conf failed to read or parse, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Version' not in data_config:
|
||||
self.logger.warning(f"{directory}/data.conf contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'Name' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no Name item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMajor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMajor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerMinor' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerMinor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if 'VerRelease' not in data_config['Version']: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{directory}/data.conf contains no VerRelease item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_seq = data_config['Version']['VerRelease']
|
||||
opt_folder = path.basename(path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, opt_seq)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
self.logger.info(f"Opt folder {opt_folder} (Database ID {opt_id}) contains {data_config['Version']['Name']} v{data_config['Version']['VerMajor']}.{data_config['Version']['VerMinor']}.{opt_seq}")
|
||||
return opt_id
|
||||
|
||||
def copy_image(self, filename: str, src_dir: str, dst_dir: str) -> None:
|
||||
# Convert the image to png so we can easily display it in the frontend
|
||||
file_src = path.join(src_dir, filename)
|
||||
|
||||
@@ -7,16 +7,29 @@ from sqlalchemy import (
|
||||
PrimaryKeyConstraint,
|
||||
and_,
|
||||
)
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, VARCHAR, BOOLEAN
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from datetime import datetime
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
|
||||
opts = Table(
|
||||
"chuni_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # VerRelease in data.conf
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="chuni_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
events = Table(
|
||||
"chuni_static_events",
|
||||
metadata,
|
||||
@@ -27,6 +40,7 @@ events = Table(
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", name="chuni_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -44,6 +58,7 @@ music = Table(
|
||||
Column("genre", String(255)),
|
||||
Column("jacketPath", String(255)),
|
||||
Column("worldsEndTag", String(7)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "songId", "chartId", name="chuni_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -59,6 +74,7 @@ charge = Table(
|
||||
Column("consumeType", Integer),
|
||||
Column("sellingAppeal", Boolean),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "chargeId", name="chuni_static_charge_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -76,6 +92,7 @@ avatar = Table(
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "avatarAccessoryId", name="chuni_static_avatar_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -91,6 +108,7 @@ nameplate = Table(
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("sortName", String(255)),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "nameplateId", name="chuni_static_nameplate_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -110,6 +128,7 @@ character = Table(
|
||||
Column("imagePath3", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "characterId", name="chuni_static_character_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -124,6 +143,7 @@ trophy = Table(
|
||||
Column("rareType", Integer),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "trophyId", name="chuni_static_trophy_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -139,6 +159,7 @@ map_icon = Table(
|
||||
Column("iconPath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "mapIconId", name="chuni_static_mapicon_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -154,6 +175,7 @@ system_voice = Table(
|
||||
Column("imagePath", String(255)),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("defaultHave", Boolean, server_default="0"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "voiceId", name="chuni_static_systemvoice_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -175,6 +197,7 @@ gachas = Table(
|
||||
Column("endDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "gachaId", "gachaName", name="chuni_static_gachas_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -195,6 +218,7 @@ cards = Table(
|
||||
Column("combo", Integer, nullable=False),
|
||||
Column("chain", Integer, nullable=False),
|
||||
Column("skillName", String(255), nullable=False),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", name="chuni_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -219,6 +243,7 @@ login_bonus_preset = Table(
|
||||
Column("version", Integer, nullable=False),
|
||||
Column("presetName", String(255), nullable=False),
|
||||
Column("isEnabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("chuni_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
PrimaryKeyConstraint(
|
||||
"presetId", "version", name="chuni_static_login_bonus_preset_pk"
|
||||
),
|
||||
@@ -238,6 +263,7 @@ login_bonus = Table(
|
||||
Column("itemNum", Integer, nullable=False),
|
||||
Column("needLoginDayCount", Integer, nullable=False),
|
||||
Column("loginBonusCategoryType", Integer, nullable=False),
|
||||
Column("opt", BIGINT),
|
||||
UniqueConstraint(
|
||||
"version", "presetId", "loginBonusId", name="chuni_static_login_bonus_uk"
|
||||
),
|
||||
@@ -251,10 +277,18 @@ login_bonus = Table(
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_1",
|
||||
),
|
||||
ForeignKeyConstraint(
|
||||
["opt"],
|
||||
[
|
||||
"chuni_static_opt.id",
|
||||
],
|
||||
onupdate="SET NULL",
|
||||
ondelete="CASCADE",
|
||||
name="chuni_static_login_bonus_ibfk_2",
|
||||
),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
class ChuniStaticData(BaseData):
|
||||
async def put_login_bonus(
|
||||
self,
|
||||
@@ -267,6 +301,7 @@ class ChuniStaticData(BaseData):
|
||||
item_num: int,
|
||||
need_login_day_count: int,
|
||||
login_bonus_category_type: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus).values(
|
||||
version=version,
|
||||
@@ -278,6 +313,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -286,6 +322,7 @@ class ChuniStaticData(BaseData):
|
||||
itemNum=item_num,
|
||||
needLoginDayCount=need_login_day_count,
|
||||
loginBonusCategoryType=login_bonus_category_type,
|
||||
opt=coalesce(login_bonus.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -327,17 +364,19 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchone()
|
||||
|
||||
async def put_login_bonus_preset(
|
||||
self, version: int, preset_id: int, preset_name: str, is_enabled: bool
|
||||
self, version: int, preset_id: int, preset_name: str, isEnabled: bool, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(login_bonus_preset).values(
|
||||
presetId=preset_id,
|
||||
version=version,
|
||||
presetName=preset_name,
|
||||
isEnabled=is_enabled,
|
||||
isEnabled=isEnabled,
|
||||
opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=is_enabled
|
||||
|
||||
# Chuni has a habbit of including duplicates in it's opt files, so only update opt if it's null
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
presetName=preset_name, isEnabled=isEnabled, opt=coalesce(login_bonus_preset.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -346,12 +385,12 @@ class ChuniStaticData(BaseData):
|
||||
return result.lastrowid
|
||||
|
||||
async def get_login_bonus_presets(
|
||||
self, version: int, is_enabled: bool = True
|
||||
self, version: int, isEnabled: bool = True
|
||||
) -> Optional[List[Row]]:
|
||||
sql = login_bonus_preset.select(
|
||||
and_(
|
||||
login_bonus_preset.c.version == version,
|
||||
login_bonus_preset.c.isEnabled == is_enabled,
|
||||
login_bonus_preset.c.isEnabled == isEnabled,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -361,13 +400,13 @@ class ChuniStaticData(BaseData):
|
||||
return result.fetchall()
|
||||
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, type: int, name: str
|
||||
self, version: int, event_id: int, type: int, name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version=version, eventId=event_id, type=type, name=name
|
||||
version=version, eventId=event_id, type=type, name=name, opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(name=name)
|
||||
conflict = sql.on_duplicate_key_update(name=name, opt=coalesce(events.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -435,6 +474,7 @@ class ChuniStaticData(BaseData):
|
||||
genre: str,
|
||||
jacketPath: str,
|
||||
we_tag: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -446,6 +486,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -455,6 +496,7 @@ class ChuniStaticData(BaseData):
|
||||
genre=genre,
|
||||
jacketPath=jacketPath,
|
||||
worldsEndTag=we_tag,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -470,6 +512,7 @@ class ChuniStaticData(BaseData):
|
||||
expiration_days: int,
|
||||
consume_type: int,
|
||||
selling_appeal: bool,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(charge).values(
|
||||
version=version,
|
||||
@@ -478,6 +521,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -485,6 +529,7 @@ class ChuniStaticData(BaseData):
|
||||
expirationDays=expiration_days,
|
||||
consumeType=consume_type,
|
||||
sellingAppeal=selling_appeal,
|
||||
opt=coalesce(charge.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -542,7 +587,6 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
|
||||
async def put_avatar(
|
||||
self,
|
||||
version: int,
|
||||
@@ -553,7 +597,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(avatar).values(
|
||||
version=version,
|
||||
@@ -564,7 +609,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -574,7 +620,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(avatar.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -601,7 +648,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
sortName: str
|
||||
sortName: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(nameplate).values(
|
||||
version=version,
|
||||
@@ -610,7 +658,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -618,7 +667,8 @@ class ChuniStaticData(BaseData):
|
||||
texturePath=texturePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave,
|
||||
sortName=sortName
|
||||
sortName=sortName,
|
||||
opt=coalesce(nameplate.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -645,6 +695,7 @@ class ChuniStaticData(BaseData):
|
||||
rareType: int,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(trophy).values(
|
||||
version=version,
|
||||
@@ -652,14 +703,16 @@ class ChuniStaticData(BaseData):
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=name,
|
||||
rareType=rareType,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(trophy.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -687,6 +740,7 @@ class ChuniStaticData(BaseData):
|
||||
iconPath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(map_icon).values(
|
||||
version=version,
|
||||
@@ -695,7 +749,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -703,7 +758,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
iconPath=iconPath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(map_icon.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -731,6 +787,7 @@ class ChuniStaticData(BaseData):
|
||||
imagePath: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(system_voice).values(
|
||||
version=version,
|
||||
@@ -739,7 +796,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -747,7 +805,8 @@ class ChuniStaticData(BaseData):
|
||||
sortName=sortName,
|
||||
imagePath=imagePath,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(system_voice.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -778,7 +837,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2: str,
|
||||
imagePath3: str,
|
||||
isEnabled: int,
|
||||
defaultHave: int
|
||||
defaultHave: int,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(character).values(
|
||||
version=version,
|
||||
@@ -791,7 +851,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -803,7 +864,8 @@ class ChuniStaticData(BaseData):
|
||||
imagePath2=imagePath2,
|
||||
imagePath3=imagePath3,
|
||||
isEnabled=isEnabled,
|
||||
defaultHave=defaultHave
|
||||
defaultHave=defaultHave,
|
||||
opt=coalesce(character.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -827,12 +889,14 @@ class ChuniStaticData(BaseData):
|
||||
version: int,
|
||||
gacha_id: int,
|
||||
gacha_name: int,
|
||||
opt_id: int = None,
|
||||
**gacha_data,
|
||||
) -> Optional[int]:
|
||||
sql = insert(gachas).values(
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -840,6 +904,7 @@ class ChuniStaticData(BaseData):
|
||||
version=version,
|
||||
gachaId=gacha_id,
|
||||
gachaName=gacha_name,
|
||||
opt=coalesce(gachas.c.opt, opt_id),
|
||||
**gacha_data,
|
||||
)
|
||||
|
||||
@@ -909,10 +974,10 @@ class ChuniStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
async def put_card(self, version: int, card_id: int, opt_id: int = None,**card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -926,4 +991,86 @@ class ChuniStaticData(BaseData):
|
||||
result = await self.execute(sql)
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
return result.fetchone()
|
||||
|
||||
async def put_opt(self, version: int, folder: str, sequence: int) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -118,9 +118,9 @@ userbox_components = {
|
||||
"{{ nameplates[profile.nameplateId]["texturePath"] }}", "", "", ""],
|
||||
|
||||
"character":["{{ characters|length }}",
|
||||
"{{ profile.charaIllustId }}",
|
||||
"{{ characters[profile.charaIllustId]["name"] }}",
|
||||
"{{ characters[profile.charaIllustId]["iconPath"] }}", "", "", ""]
|
||||
"{{ profile.characterId }}",
|
||||
"{{ characters[profile.characterId]["name"] }}",
|
||||
"{{ characters[profile.characterId]["iconPath"] }}", "", "", ""]
|
||||
};
|
||||
types = Object.keys(userbox_components);
|
||||
orig_trophy = curr_trophy = "{{ profile.trophyId }}";
|
||||
|
||||
@@ -327,3 +327,39 @@ class CardMakerReader(BaseReader):
|
||||
maxSelectPoint=max_select_point,
|
||||
)
|
||||
self.logger.info(f"Added ongeki gacha {gacha_id}")
|
||||
|
||||
async def read_opt(self, base_dir: str) -> None:
|
||||
self.logger.info(f"Reading opt data from {base_dir}...")
|
||||
cm_data_cfg = None
|
||||
cm_data_cfg_file = os.path.join(base_dir, "DataConfig.xml")
|
||||
|
||||
geki_data_cfg = None
|
||||
geki_data_cfg_file = os.path.join(base_dir, "GEKI", "DataConfig.xml")
|
||||
|
||||
mai2_data_cfg = None
|
||||
mai2_data_cfg_file = os.path.join(base_dir, "MAI", "DataConfig.xml")
|
||||
|
||||
if os.path.exists(cm_data_cfg_file):
|
||||
with open(cm_data_cfg_file, "r") as f:
|
||||
cm_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"No DataConfig.xml in {base_dir}, sequence will be null")
|
||||
|
||||
if os.path.exists(geki_data_cfg_file):
|
||||
with open(geki_data_cfg_file, "r") as f:
|
||||
geki_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {geki_data_cfg_file}, gekiVersion and gekiReleaseVer will be null")
|
||||
|
||||
if os.path.exists(mai2_data_cfg_file):
|
||||
with open(mai2_data_cfg_file, "r") as f:
|
||||
mai2_data_cfg = ET.fromstring(f.read())
|
||||
else:
|
||||
self.logger.info(f"Cannot find {mai2_data_cfg_file}, mai2Version and mai2ReleaseVer will be null")
|
||||
|
||||
cm_rel_ver = int(cm_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
geki_rel_ver = int(geki_data_cfg.find("DataConfig/version/release").text)
|
||||
|
||||
mai2_rel_ver = int(mai2_data_cfg.find("DataConfig/version/release").text)
|
||||
mai2_db_ver = Mai2Constants.int_ver_to_game_ver(mai2_data_cfg.find("DataConfig/version/major").text + mai2_data_cfg.find("DataConfig/version/minor").text)
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from typing import Optional
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class Mai2Constants:
|
||||
GRADE = {
|
||||
"D": 0,
|
||||
@@ -86,7 +89,57 @@ class Mai2Constants:
|
||||
"maimai DX PRiSM",
|
||||
"maimai DX PRiSM PLUS"
|
||||
)
|
||||
KALEIDXSCOPE_KEY_CONDITION={
|
||||
1: [11009, 11008, 11100, 11097, 11098, 11099, 11163, 11162, 11161, 11228, 11229, 11231, 11463, 11464, 11465, 11538, 11539, 11541, 11620, 11622, 11623, 11737, 11738, 11164, 11230, 11466, 11540, 11621, 11739],
|
||||
#青の扉: Played 29 songs
|
||||
2: [11102, 11234, 11300, 11529, 11542, 11612],
|
||||
#白の扉: set Frame as "Latent Kingdom" (459504), play 3 or 4 songs by the composer 大国奏音 in 1 pc
|
||||
3: [],
|
||||
#紫の扉: need to enter redeem code 51090942171709440000
|
||||
4: [11023, 11106, 11221, 11222, 11300, 11374, 11458, 11523, 11619, 11663, 11746],
|
||||
#青の扉: Played 11 songs
|
||||
}
|
||||
MAI_VERSION_LUT = {
|
||||
"100": VER_MAIMAI,
|
||||
"110": VER_MAIMAI_PLUS,
|
||||
"120": VER_MAIMAI_GREEN,
|
||||
"130": VER_MAIMAI_GREEN_PLUS,
|
||||
"140": VER_MAIMAI_ORANGE,
|
||||
"150": VER_MAIMAI_ORANGE_PLUS,
|
||||
"160": VER_MAIMAI_PINK,
|
||||
"170": VER_MAIMAI_PINK_PLUS,
|
||||
"180": VER_MAIMAI_MURASAKI,
|
||||
"185": VER_MAIMAI_MURASAKI_PLUS,
|
||||
"190": VER_MAIMAI_MILK,
|
||||
"195": VER_MAIMAI_MILK_PLUS,
|
||||
"197": VER_MAIMAI_FINALE,
|
||||
}
|
||||
|
||||
MAI2_VERSION_LUT = {
|
||||
"100": VER_MAIMAI_DX,
|
||||
"105": VER_MAIMAI_DX_PLUS,
|
||||
"110": VER_MAIMAI_DX_SPLASH,
|
||||
"115": VER_MAIMAI_DX_SPLASH_PLUS,
|
||||
"120": VER_MAIMAI_DX_UNIVERSE,
|
||||
"125": VER_MAIMAI_DX_UNIVERSE_PLUS,
|
||||
"130": VER_MAIMAI_DX_FESTIVAL,
|
||||
"135": VER_MAIMAI_DX_FESTIVAL_PLUS,
|
||||
"140": VER_MAIMAI_DX_BUDDIES,
|
||||
"145": VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
"150": VER_MAIMAI_DX_PRISM
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
""" Takes an internal game version (ex 13 for maimai DX) and returns a the full name of the version """
|
||||
return cls.VERSION_STRING[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int, is_dx = True) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
if is_dx:
|
||||
return cls.MAI2_VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
else:
|
||||
if ver >= 197:
|
||||
return cls.VER_MAIMAI_FINALE
|
||||
return cls.MAI_VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
@@ -32,7 +32,6 @@ from .festivalplus import Mai2FestivalPlus
|
||||
from .buddies import Mai2Buddies
|
||||
from .buddiesplus import Mai2BuddiesPlus
|
||||
from .prism import Mai2Prism
|
||||
from .prismplus import Mai2PrismPlus
|
||||
|
||||
|
||||
class Mai2Servlet(BaseServlet):
|
||||
@@ -311,7 +310,7 @@ class Mai2Servlet(BaseServlet):
|
||||
elif version >= 140 and version < 145: # BUDDiES
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES
|
||||
elif version >= 145 and version < 150: # BUDDiES PLUS
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS
|
||||
elif version >= 150 and version < 155:
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_PRISM
|
||||
elif version >= 155:
|
||||
@@ -337,7 +336,7 @@ class Mai2Servlet(BaseServlet):
|
||||
elif version >= 140 and version < 145: # BUDDiES
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES
|
||||
elif version >= 145 and version < 150: # BUDDiES PLUS
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS,
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_BUDDIES_PLUS
|
||||
elif version >= 150 and version < 155:
|
||||
internal_ver = Mai2Constants.VER_MAIMAI_DX_PRISM
|
||||
elif version >= 155:
|
||||
|
||||
@@ -43,27 +43,52 @@ class Mai2Prism(Mai2BuddiesPlus):
|
||||
{"gateId": 2, "phaseId": 6},
|
||||
{"gateId": 3, "phaseId": 6},
|
||||
{"gateId": 4, "phaseId": 6},
|
||||
{"gateId": 5, "phaseId": 6},
|
||||
{"gateId": 6, "phaseId": 6}
|
||||
]
|
||||
}
|
||||
|
||||
async def handle_get_user_kaleidx_scope_api_request(self, data: Dict) -> Dict:
|
||||
# kaleidxscope keyget condition judgement
|
||||
# player may get key before GateFound
|
||||
for gate in range(1,7):
|
||||
condition_list = await self.data.static.get_kaleidxscope_condition(gate)
|
||||
if not condition_list:
|
||||
continue
|
||||
condition_satisfy = 0
|
||||
for condition in condition_list:
|
||||
score_list = await self.data.score.get_best_scores(user_id=data["userId"], song_id=condition[3])
|
||||
if score_list:
|
||||
condition_satisfy = condition_satisfy + 1
|
||||
if len(condition_list) == condition_satisfy:
|
||||
new_kaleidxscope = {'gateId': gate, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
for gate in range(1,5):
|
||||
if gate == 1 or gate == 4:
|
||||
condition_satisfy = 0
|
||||
for condition in Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[gate]:
|
||||
score_list = await self.data.score.get_best_scores(user_id=data["userId"], song_id=condition)
|
||||
if score_list:
|
||||
condition_satisfy = condition_satisfy + 1
|
||||
if len(Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[gate]) == condition_satisfy:
|
||||
new_kaleidxscope = {'gateId': gate, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
|
||||
elif gate == 2:
|
||||
user_profile = await self.data.profile.get_profile_detail(user_id=data["userId"], version=self.version)
|
||||
user_frame = user_profile["frameId"]
|
||||
if user_frame == 459504:
|
||||
playlogs = await self.data.score.get_playlogs(user_id=data["userId"], idx=0, limit=0)
|
||||
|
||||
playlog_dict = {}
|
||||
for playlog in playlogs:
|
||||
playlog_id = playlog["playlogId"]
|
||||
if playlog_id not in playlog_dict:
|
||||
playlog_dict[playlog_id] = []
|
||||
playlog_dict[playlog_id].append(playlog["musicId"])
|
||||
valid_playlogs = []
|
||||
allowed_music = set(Mai2Constants.KALEIDXSCOPE_KEY_CONDITION[2])
|
||||
for playlog_id, music_ids in playlog_dict.items():
|
||||
|
||||
if len(music_ids) != len(set(music_ids)):
|
||||
continue
|
||||
all_valid = True
|
||||
for mid in music_ids:
|
||||
if mid not in allowed_music:
|
||||
all_valid = False
|
||||
break
|
||||
if all_valid:
|
||||
valid_playlogs.append(playlog_id)
|
||||
|
||||
if valid_playlogs:
|
||||
new_kaleidxscope = {'gateId': 2, "isKeyFound": True}
|
||||
await self.data.score.put_user_kaleidxscope(data["userId"], new_kaleidxscope)
|
||||
|
||||
kaleidxscope = await self.data.score.get_user_kaleidxscope_list(data["userId"])
|
||||
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Dict, List, Optional
|
||||
from Crypto.Cipher import AES
|
||||
import zlib
|
||||
import codecs
|
||||
|
||||
from core.config import CoreConfig
|
||||
from core.data import Data
|
||||
from read import BaseReader
|
||||
from titles.mai2.const import Mai2Constants
|
||||
from titles.mai2.database import Mai2Data
|
||||
|
||||
|
||||
class Mai2Reader(BaseReader):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -46,14 +42,11 @@ class Mai2Reader(BaseReader):
|
||||
|
||||
for dir in data_dirs:
|
||||
self.logger.info(f"Read from {dir}")
|
||||
await self.get_events(f"{dir}/event")
|
||||
this_opt_id = await self.read_opt_info(dir)
|
||||
await self.get_events(f"{dir}/event", this_opt_id)
|
||||
await self.disable_events(f"{dir}/information", f"{dir}/scoreRanking")
|
||||
#await self.read_music(f"{dir}/music")
|
||||
await self.read_tickets(f"{dir}/ticket")
|
||||
|
||||
if self.version >= Mai2Constants.VER_MAIMAI_DX_PRISM:
|
||||
for dir in data_dirs:
|
||||
await self.read_kaleidxscope_condition(f"{dir}/kaleidxScopeKeyCondition")
|
||||
await self.read_music(f"{dir}/music", this_opt_id)
|
||||
await self.read_tickets(f"{dir}/ticket", this_opt_id)
|
||||
|
||||
else:
|
||||
if not os.path.exists(f"{self.bin_dir}/tables"):
|
||||
@@ -183,7 +176,7 @@ class Mai2Reader(BaseReader):
|
||||
self.logger.warning("Failed load table content, skipping")
|
||||
return
|
||||
|
||||
async def get_events(self, base_dir: str) -> None:
|
||||
async def get_events(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading events from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -197,7 +190,7 @@ class Mai2Reader(BaseReader):
|
||||
event_type = int(troot.find("infoType").text)
|
||||
|
||||
await self.data.static.put_game_event(
|
||||
self.version, event_type, id, name
|
||||
self.version, event_type, id, name, opt_id
|
||||
)
|
||||
self.logger.info(f"Added event {id}...")
|
||||
|
||||
@@ -259,7 +252,7 @@ class Mai2Reader(BaseReader):
|
||||
await self.data.static.toggle_game_event(self.version, event_id, toggle=False)
|
||||
self.logger.info(f"Disabled event {event_id}...")
|
||||
|
||||
async def read_music(self, base_dir: str) -> None:
|
||||
async def read_music(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading music from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -300,13 +293,14 @@ class Mai2Reader(BaseReader):
|
||||
added_ver,
|
||||
diff_num,
|
||||
note_designer,
|
||||
opt_id
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Added music id {song_id} chart {chart_id}"
|
||||
)
|
||||
|
||||
async def read_tickets(self, base_dir: str) -> None:
|
||||
async def read_tickets(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading tickets from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -321,7 +315,7 @@ class Mai2Reader(BaseReader):
|
||||
price = int(troot.find("creditNum").text)
|
||||
|
||||
await self.data.static.put_game_ticket(
|
||||
self.version, id, ticket_type, price, name
|
||||
self.version, id, ticket_type, price, name, opt_id
|
||||
)
|
||||
self.logger.info(f"Added ticket {id}...")
|
||||
|
||||
@@ -346,30 +340,50 @@ class Mai2Reader(BaseReader):
|
||||
return
|
||||
# TODO
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
datacfg_file = os.path.join(directory, "DataConfig.xml")
|
||||
if not os.path.exists(datacfg_file):
|
||||
self.logger.warning(f"{datacfg_file} does not contain DataConfig.xml, opt info will not be read")
|
||||
return None
|
||||
|
||||
with open(datacfg_file, encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
if troot.find("version") is None:
|
||||
self.logger.warning(f"{directory}/DataConfig.xml contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
ver_maj = troot.find("version/major")
|
||||
ver_min = troot.find("version/minor")
|
||||
ver_rel = troot.find("version/release")
|
||||
cm_maj = troot.find("cardMakerVersion/major")
|
||||
cm_min = troot.find("cardMakerVersion/minor")
|
||||
cm_rel = troot.find("cardMakerVersion/release")
|
||||
|
||||
if ver_maj is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no major item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
async def read_kaleidxscope_condition(self, base_dir: str) -> None :
|
||||
self.logger.info(f"Reading KaleidxScope Key Conditions from {base_dir}...")
|
||||
if ver_min is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no minor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
for dir in dirs:
|
||||
if os.path.exists(f"{root}/{dir}/KaleidxScopeKeyCondition.xml"):
|
||||
with open(f"{root}/{dir}/KaleidxScopeKeyCondition.xml", encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
if ver_rel is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no release item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_folder = os.path.basename(os.path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, int(ver_rel.text), int(cm_rel.text) if cm_rel is not None else None)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
condition_id = int(troot.find("name").find("id").text)
|
||||
condition_name = troot.find("name").find("str").text
|
||||
|
||||
music_list = troot.find("musicIds").find("list")
|
||||
for music in music_list.findall("StringID"):
|
||||
music_id = int(music.find("id").text)
|
||||
music_name = music.find("str").text
|
||||
|
||||
await self.data.static.put_kaleidxscope_condition(
|
||||
condition_id,
|
||||
condition_name,
|
||||
music_id,
|
||||
music_name
|
||||
)
|
||||
self.logger.info(
|
||||
f"Add music {music_id} for condition {condition_id}"
|
||||
)
|
||||
self.logger.info(
|
||||
f"Opt folder {opt_folder} (Database ID {opt_id}) contains v{ver_maj.text}.{ver_min.text}.{ver_rel.text} (cm v{cm_maj.text if cm_maj is not None else 'None'}.{cm_min.text if cm_min is not None else 'None'}.{cm_rel.text if cm_rel is not None else 'None'})"
|
||||
)
|
||||
return opt_id
|
||||
|
||||
@@ -728,10 +728,11 @@ class Mai2ItemData(BaseData):
|
||||
# Do an anti-join with the mai2_item_item table to exclude any
|
||||
# items the users have already owned.
|
||||
if exclude_owned:
|
||||
sql = sql.join(
|
||||
sql = sql.outerjoin(
|
||||
item,
|
||||
(present.c.itemKind == item.c.itemKind)
|
||||
& (present.c.itemId == item.c.itemId)
|
||||
& (item.c.user == user_id)
|
||||
)
|
||||
condition &= (item.c.itemKind.is_(None) & item.c.itemId.is_(None))
|
||||
|
||||
|
||||
@@ -2,13 +2,28 @@ from core.data.schema.base import BaseData, metadata
|
||||
|
||||
from typing import Optional, Dict, List
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, BOOLEAN, VARCHAR
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
from datetime import datetime
|
||||
|
||||
opts = Table(
|
||||
"mai2_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # release in DataConfig.xml
|
||||
Column("cmReleaseVer", INTEGER, nullable=False),
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="mai2_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
event = Table(
|
||||
"mai2_static_event",
|
||||
metadata,
|
||||
@@ -19,6 +34,7 @@ event = Table(
|
||||
Column("name", String(255)),
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", "type", name="mai2_static_event_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -37,6 +53,7 @@ music = Table(
|
||||
Column("addedVersion", String(255)),
|
||||
Column("difficulty", Float),
|
||||
Column("noteDesigner", String(255)),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("songId", "chartId", "version", name="mai2_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -51,6 +68,7 @@ ticket = Table(
|
||||
Column("name", String(255)),
|
||||
Column("price", Integer, server_default="1"),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("mai2_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "ticketId", name="mai2_static_ticket_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -67,34 +85,25 @@ cards = Table(
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", "cardName", name="mai2_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
kaleidxscope_condition = Table(
|
||||
"mai2_static_kaleidxscope_condition",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True, nullable=False),
|
||||
Column("conditionId", Integer),
|
||||
Column("conditionName", String(255)),
|
||||
Column("songId", Integer),
|
||||
Column("songName", String(255)),
|
||||
UniqueConstraint("conditionId", "conditionName", "songId", "songName", name="mai2_static_kaleidxscope_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
class Mai2StaticData(BaseData):
|
||||
async def put_game_event(
|
||||
self, version: int, type: int, event_id: int, name: str
|
||||
self, version: int, type: int, event_id: int, name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(event).values(
|
||||
version=version,
|
||||
type=type,
|
||||
eventId=event_id,
|
||||
name=name,
|
||||
opt=coalesce(event.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(eventId=event_id)
|
||||
conflict = sql.on_duplicate_key_update(eventId=event_id, opt=coalesce(event.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -147,6 +156,7 @@ class Mai2StaticData(BaseData):
|
||||
added_version: str,
|
||||
difficulty: float,
|
||||
note_designer: str,
|
||||
opt_id: int = None
|
||||
) -> None:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -159,6 +169,7 @@ class Mai2StaticData(BaseData):
|
||||
addedVersion=added_version,
|
||||
difficulty=difficulty,
|
||||
noteDesigner=note_designer,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -169,6 +180,7 @@ class Mai2StaticData(BaseData):
|
||||
addedVersion=added_version,
|
||||
difficulty=difficulty,
|
||||
noteDesigner=note_designer,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -184,6 +196,7 @@ class Mai2StaticData(BaseData):
|
||||
ticket_type: int,
|
||||
ticket_price: int,
|
||||
name: str,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(ticket).values(
|
||||
version=version,
|
||||
@@ -191,11 +204,10 @@ class Mai2StaticData(BaseData):
|
||||
kind=ticket_type,
|
||||
price=ticket_price,
|
||||
name=name,
|
||||
opt=coalesce(ticket.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price)
|
||||
conflict = sql.on_duplicate_key_update(price=ticket_price, opt=coalesce(ticket.c.opt, opt_id))
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -240,12 +252,12 @@ class Mai2StaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_card(self, version: int, card_id: int, card_name: str, **card_data) -> int:
|
||||
async def put_card(self, version: int, card_id: int, card_name: str, opt_id: int = None, **card_data) -> int:
|
||||
sql = insert(cards).values(
|
||||
version=version, cardId=card_id, cardName=card_name, **card_data
|
||||
version=version, cardId=card_id, cardName=card_name, opt=coalesce(cards.c.opt, opt_id) **card_data
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -276,34 +288,84 @@ class Mai2StaticData(BaseData):
|
||||
if not result:
|
||||
self.logger.error(f"Failed to update event {table_id} - {is_enable} {start_date}")
|
||||
|
||||
# new in prism
|
||||
async def put_kaleidxscope_condition(
|
||||
self,
|
||||
condition_id: int,
|
||||
condition_name: str,
|
||||
music_id: int,
|
||||
music_name: str
|
||||
) -> Optional[int]:
|
||||
sql = insert(kaleidxscope_condition).values(
|
||||
conditionId = condition_id,
|
||||
conditionName = condition_name,
|
||||
songId = music_id,
|
||||
songName = music_name,
|
||||
)
|
||||
|
||||
|
||||
conflict = sql.on_duplicate_key_update(conditionName=condition_name, songName=music_name)
|
||||
async def put_opt(self, version: int, folder: str, sequence: int, cm_seq: int = None) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence, cmReleaseVer=cm_seq)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(
|
||||
f"put_kaleidxscope_condition: Failed to insert kaleidxScope Key Condition! conditionID {condition_id} songId {music_id}"
|
||||
)
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_kaleidxscope_condition(self, condition_id: int) -> None:
|
||||
sql = kaleidxscope_condition.select(kaleidxscope_condition.c.conditionId == condition_id)
|
||||
result = await self.execute(sql)
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Final, Dict
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
|
||||
from core.utils import floor_to_nearest_005
|
||||
|
||||
class OngekiConstants:
|
||||
GAME_CODE = "SDDT"
|
||||
@@ -106,6 +106,24 @@ class OngekiConstants:
|
||||
"O.N.G.E.K.I. bright MEMORY Act.3",
|
||||
)
|
||||
|
||||
VERSION_LUT = {
|
||||
"100": VER_ONGEKI,
|
||||
"105": VER_ONGEKI_PLUS,
|
||||
"110": VER_ONGEKI_SUMMER,
|
||||
"115": VER_ONGEKI_SUMMER_PLUS,
|
||||
"120": VER_ONGEKI_RED,
|
||||
"125": VER_ONGEKI_RED_PLUS,
|
||||
"130": VER_ONGEKI_BRIGHT,
|
||||
"135": VER_ONGEKI_BRIGHT_MEMORY,
|
||||
"140": VER_ONGEKI_BRIGHT_MEMORY,
|
||||
"145": VER_ONGEKI_BRIGHT_MEMORY_ACT3,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def game_ver_to_string(cls, ver: int):
|
||||
return cls.VERSION_NAMES[ver]
|
||||
|
||||
@classmethod
|
||||
def int_ver_to_game_ver(cls, ver: int) -> Optional[int]:
|
||||
""" Takes an int ver (ex 100 for 1.00) and returns an internal game version """
|
||||
return cls.VERSION_LUT.get(str(floor_to_nearest_005(ver)), None)
|
||||
|
||||
@@ -58,12 +58,13 @@ class OngekiReader(BaseReader):
|
||||
data_dirs += self.get_data_directories(self.opt_dir)
|
||||
|
||||
for dir in data_dirs:
|
||||
await self.read_events(f"{dir}/event")
|
||||
await self.read_music(f"{dir}/music")
|
||||
await self.read_card(f"{dir}/card")
|
||||
await self.read_reward(f"{dir}/reward")
|
||||
this_opt_id = await self.read_opt_info(dir)
|
||||
await self.read_events(f"{dir}/event", this_opt_id)
|
||||
await self.read_music(f"{dir}/music", this_opt_id)
|
||||
await self.read_card(f"{dir}/card", this_opt_id)
|
||||
await self.read_reward(f"{dir}/reward", this_opt_id)
|
||||
|
||||
async def read_card(self, base_dir: str) -> None:
|
||||
async def read_card(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading cards from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -73,17 +74,6 @@ class OngekiReader(BaseReader):
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
card_id = int(troot.find("Name").find("id").text)
|
||||
|
||||
# skip already existing cards
|
||||
if (
|
||||
await self.data.static.get_card(
|
||||
OngekiConstants.VER_ONGEKI_BRIGHT_MEMORY, card_id
|
||||
)
|
||||
is not None
|
||||
):
|
||||
self.logger.info(f"Card {card_id} already added, skipping")
|
||||
continue
|
||||
|
||||
name = troot.find("Name").find("str").text
|
||||
chara_id = int(troot.find("CharaID").find("id").text)
|
||||
nick_name = troot.find("NickName").text
|
||||
@@ -108,6 +98,7 @@ class OngekiReader(BaseReader):
|
||||
await self.data.static.put_card(
|
||||
self.parse_version(troot),
|
||||
card_id,
|
||||
opt_id,
|
||||
name=name,
|
||||
charaId=chara_id,
|
||||
nickName=nick_name,
|
||||
@@ -122,7 +113,7 @@ class OngekiReader(BaseReader):
|
||||
)
|
||||
self.logger.info(f"Added card {card_id}")
|
||||
|
||||
async def read_events(self, base_dir: str) -> None:
|
||||
async def read_events(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading events from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -140,10 +131,10 @@ class OngekiReader(BaseReader):
|
||||
if troot.find("EventType").text == "MissionEvent":
|
||||
name = (troot.find("Event").find("MissionName").find("str").text)
|
||||
|
||||
await self.data.static.put_event(self.version, id, event_type, name)
|
||||
await self.data.static.put_event(self.version, id, event_type, name, opt_id)
|
||||
self.logger.info(f"Added event {id}")
|
||||
|
||||
async def read_music(self, base_dir: str) -> None:
|
||||
async def read_music(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading music from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -178,11 +169,11 @@ class OngekiReader(BaseReader):
|
||||
)
|
||||
|
||||
await self.data.static.put_chart(
|
||||
version, song_id, chart_id, title, artist, genre, level
|
||||
version, song_id, chart_id, title, artist, genre, level, opt_id
|
||||
)
|
||||
self.logger.info(f"Added song {song_id} chart {chart_id}")
|
||||
|
||||
async def read_reward(self, base_dir: str) -> None:
|
||||
async def read_reward(self, base_dir: str, opt_id: int = None) -> None:
|
||||
self.logger.info(f"Reading rewards from {base_dir}...")
|
||||
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
@@ -204,5 +195,53 @@ class OngekiReader(BaseReader):
|
||||
itemKind = OngekiConstants.REWARD_TYPES[troot.find("ItemType").text].value
|
||||
itemId = troot.find("RewardItem").find("ItemName").find("id").text
|
||||
|
||||
await self.data.static.put_reward(self.version, rewardId, rewardname, itemKind, itemId)
|
||||
await self.data.static.put_reward(self.version, rewardId, rewardname, itemKind, itemId, opt_id)
|
||||
self.logger.info(f"Added reward {rewardId}")
|
||||
|
||||
async def read_opt_info(self, directory: str) -> Optional[int]:
|
||||
datacfg_file = os.path.join(directory, "DataConfig.xml")
|
||||
if not os.path.exists(datacfg_file):
|
||||
self.logger.warning(f"{datacfg_file} does not contain DataConfig.xml, opt info will not be read")
|
||||
return None
|
||||
|
||||
with open(datacfg_file, encoding="utf-8") as f:
|
||||
troot = ET.fromstring(f.read())
|
||||
|
||||
if troot.find("version") is None:
|
||||
self.logger.warning(f"{directory}/DataConfig.xml contains no Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
ver_maj = troot.find("version/major")
|
||||
ver_min = troot.find("version/minor")
|
||||
ver_rel = troot.find("version/release")
|
||||
cm_maj = troot.find("cardMakerVersion/major")
|
||||
cm_min = troot.find("cardMakerVersion/minor")
|
||||
cm_rel = troot.find("cardMakerVersion/release")
|
||||
|
||||
if ver_maj is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no major item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if ver_min is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no minor item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
if ver_rel is None: # Probably not worth checking that the other sections exist
|
||||
self.logger.warning(f"{datacfg_file} contains no release item in the Version section, opt info will not be read")
|
||||
return None
|
||||
|
||||
opt_folder = os.path.basename(os.path.normpath(directory))
|
||||
opt_id = await self.data.static.get_opt_by_version_folder(self.version, opt_folder)
|
||||
|
||||
if not opt_id:
|
||||
opt_id = await self.data.static.put_opt(self.version, opt_folder, int(ver_rel.text), int(cm_rel.text) if cm_rel is not None else None)
|
||||
if not opt_id:
|
||||
self.logger.error(f"Failed to put opt folder info for {opt_folder}")
|
||||
return None
|
||||
else:
|
||||
opt_id = opt_id['id']
|
||||
|
||||
self.logger.info(
|
||||
f"Opt folder {opt_folder} (Database ID {opt_id}) contains v{ver_maj.text}.{ver_min.text}.{ver_rel.text} (cm v{cm_maj.text if cm_maj is not None else 'None'}.{cm_min.text if cm_min is not None else 'None'}.{cm_rel.text if cm_rel is not None else 'None'})"
|
||||
)
|
||||
return opt_id
|
||||
|
||||
@@ -1,14 +1,47 @@
|
||||
from typing import Dict, List, Optional
|
||||
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, and_
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, JSON, Float
|
||||
from sqlalchemy.types import Integer, String, TIMESTAMP, Boolean, BIGINT, Float, INTEGER, VARCHAR, BOOLEAN
|
||||
from sqlalchemy.schema import ForeignKey
|
||||
from sqlalchemy.sql import func, select
|
||||
from sqlalchemy.engine import Row
|
||||
from sqlalchemy.dialects.mysql import insert
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
from datetime import datetime
|
||||
|
||||
from core.data.schema import BaseData, metadata
|
||||
from core.data.schema.arcade import machine
|
||||
|
||||
opts = Table(
|
||||
"ongeki_static_opt",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER, nullable=False), # release in DataConfig.xml
|
||||
Column("cmReleaseVer", INTEGER, nullable=False),
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="ongeki_static_opt_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
cm_opts = Table(
|
||||
"cm_static_opts",
|
||||
metadata,
|
||||
Column("id", BIGINT, primary_key=True, nullable=False),
|
||||
Column("version", INTEGER, nullable=False),
|
||||
Column("name", VARCHAR(4), nullable=False), # Axxx
|
||||
Column("sequence", INTEGER), # Not all opts have a DataConfig.xml
|
||||
Column("gekiVersion", INTEGER), # GEKI/DataConfig.xml
|
||||
Column("gekiReleaseVer", INTEGER), # GEKI/DataConfig.xml
|
||||
Column("maiVersion", INTEGER), # MAI/DataConfig.xml
|
||||
Column("maiReleaseVer", INTEGER), # MAI/DataConfig.xml
|
||||
Column("whenRead", TIMESTAMP, nullable=False, server_default=func.now()),
|
||||
Column("isEnable", BOOLEAN, nullable=False, server_default="1"),
|
||||
UniqueConstraint("version", "name", name="cm_static_opts_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
events = Table(
|
||||
"ongeki_static_events",
|
||||
metadata,
|
||||
@@ -20,11 +53,11 @@ events = Table(
|
||||
Column("startDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("endDate", TIMESTAMP, server_default=func.now()),
|
||||
Column("enabled", Boolean, server_default="1"),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "eventId", "type", name="ongeki_static_events_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
|
||||
|
||||
music = Table(
|
||||
"ongeki_static_music",
|
||||
metadata,
|
||||
@@ -36,6 +69,7 @@ music = Table(
|
||||
Column("artist", String(255)),
|
||||
Column("genre", String(255)),
|
||||
Column("level", Float),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "songId", "chartId", name="ongeki_static_music_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -59,6 +93,7 @@ gachas = Table(
|
||||
Column("noticeStartDate", TIMESTAMP, server_default="2018-01-01 00:00:00.0"),
|
||||
Column("noticeEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("convertEndDate", TIMESTAMP, server_default="2038-01-01 00:00:00.0"),
|
||||
Column("opt", ForeignKey("cm_static_opts.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "gachaId", "gachaName", name="ongeki_static_gachas_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -94,6 +129,7 @@ cards = Table(
|
||||
Column("skillId", Integer, nullable=False),
|
||||
Column("choKaikaSkillId", Integer, nullable=False),
|
||||
Column("cardNumber", String(255)),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "cardId", name="ongeki_static_cards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -107,6 +143,7 @@ rewards = Table(
|
||||
Column("rewardname", String(255), nullable=False),
|
||||
Column("itemKind", Integer, nullable=False),
|
||||
Column("itemId", Integer, nullable=False),
|
||||
Column("opt", ForeignKey("ongeki_static_opt.id", ondelete="SET NULL", onupdate="cascade")),
|
||||
UniqueConstraint("version", "rewardId", name="ongeki_static_rewards_uk"),
|
||||
mysql_charset="utf8mb4",
|
||||
)
|
||||
@@ -176,10 +213,10 @@ game_point = Table(
|
||||
)
|
||||
|
||||
class OngekiStaticData(BaseData):
|
||||
async def put_card(self, version: int, card_id: int, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, **card_data)
|
||||
async def put_card(self, version: int, card_id: int, opt_id: int = None, **card_data) -> Optional[int]:
|
||||
sql = insert(cards).values(version=version, cardId=card_id, opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(**card_data)
|
||||
conflict = sql.on_duplicate_key_update(opt=coalesce(cards.c.opt, opt_id), **card_data)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
@@ -306,7 +343,7 @@ class OngekiStaticData(BaseData):
|
||||
return result.fetchall()
|
||||
|
||||
async def put_event(
|
||||
self, version: int, event_id: int, event_type: int, event_name: str
|
||||
self, version: int, event_id: int, event_type: int, event_name: str, opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(events).values(
|
||||
version=version,
|
||||
@@ -314,10 +351,11 @@ class OngekiStaticData(BaseData):
|
||||
type=event_type,
|
||||
name=event_name,
|
||||
endDate=f"2038-01-01 00:00:00",
|
||||
opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
name=event_name,
|
||||
name=event_name, opt=coalesce(events.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -363,6 +401,7 @@ class OngekiStaticData(BaseData):
|
||||
artist: str,
|
||||
genre: str,
|
||||
level: float,
|
||||
opt_id: int = None
|
||||
) -> Optional[int]:
|
||||
sql = insert(music).values(
|
||||
version=version,
|
||||
@@ -372,6 +411,7 @@ class OngekiStaticData(BaseData):
|
||||
artist=artist,
|
||||
genre=genre,
|
||||
level=level,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
@@ -379,6 +419,7 @@ class OngekiStaticData(BaseData):
|
||||
artist=artist,
|
||||
genre=genre,
|
||||
level=level,
|
||||
opt=coalesce(music.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
@@ -413,17 +454,21 @@ class OngekiStaticData(BaseData):
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def put_reward(self, version: int, rewardId: int, rewardname: str, itemKind: int, itemId: int) -> Optional[int]:
|
||||
async def put_reward(self, version: int, rewardId: int, rewardname: str, itemKind: int, itemId: int, opt_id: int = None) -> Optional[int]:
|
||||
sql = insert(rewards).values(
|
||||
version=version,
|
||||
rewardId=rewardId,
|
||||
rewardname=rewardname,
|
||||
itemKind=itemKind,
|
||||
itemId=itemId,
|
||||
)
|
||||
version=version,
|
||||
rewardId=rewardId,
|
||||
rewardname=rewardname,
|
||||
itemKind=itemKind,
|
||||
itemId=itemId,
|
||||
opt=coalesce(rewards.c.opt, opt_id)
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
rewardname=rewardname,
|
||||
)
|
||||
rewardname=rewardname,
|
||||
opt=coalesce(rewards.c.opt, opt_id)
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert reward! reward_id: {rewardId}")
|
||||
@@ -491,3 +536,121 @@ class OngekiStaticData(BaseData):
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def put_opt(self, version: int, folder: str, sequence: int, cm_seq: int = None) -> Optional[int]:
|
||||
sql = insert(opts).values(version=version, name=folder, sequence=sequence, cmReleaseVer=cm_seq)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(sequence=sequence, whenRead=datetime.now())
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opt_by_version_sequence(self, version: int, sequence: str) -> Optional[Row]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.sequence == sequence,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(opts.c.version == version))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts_enabled_by_version(self, version: int) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_latest_enabled_opt_by_version(self, version: int) -> Optional[Row]:
|
||||
result = await self.execute(
|
||||
opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.isEnable == True,
|
||||
)).order_by(opts.c.sequence.desc())
|
||||
)
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def get_opts(self) -> Optional[List[Row]]:
|
||||
result = await self.execute(opts.select())
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchall()
|
||||
|
||||
async def set_opt_enabled(self, opt_id: int, enabled: bool) -> bool:
|
||||
result = await self.execute(opts.update(opts.c.id == opt_id).values(isEnable=enabled))
|
||||
|
||||
if result is None:
|
||||
self.logger.error(f"Failed to set opt enabled status to {enabled} for opt {opt_id}")
|
||||
return False
|
||||
return True
|
||||
|
||||
async def cm_put_opt(self, version: int, folder: str, sequence: int, geki_ver: int, geki_seq: int, mai_ver: int, mai_seq: int) -> Optional[int]:
|
||||
sql = insert(cm_opts).values(
|
||||
version=version,
|
||||
name=folder,
|
||||
sequence=sequence,
|
||||
gekiVersion=geki_ver,
|
||||
gekiReleaseVer=geki_seq,
|
||||
maiSequence=mai_ver,
|
||||
maiReleaseVer=mai_seq,
|
||||
)
|
||||
|
||||
conflict = sql.on_duplicate_key_update(
|
||||
sequence=sequence,
|
||||
gekiVersion=geki_ver,
|
||||
gekiReleaseVer=geki_seq,
|
||||
maiSequence=mai_ver,
|
||||
maiReleaseVer=mai_seq,
|
||||
whenRead=datetime.now()
|
||||
)
|
||||
|
||||
result = await self.execute(conflict)
|
||||
if result is None:
|
||||
self.logger.warning(f"Failed to insert opt! version {version} folder {folder} sequence {sequence}")
|
||||
return None
|
||||
return result.lastrowid
|
||||
|
||||
async def cm_get_opt_by_version_folder(self, version: int, folder: str) -> Optional[Row]:
|
||||
result = await self.execute(cm_opts.select(and_(
|
||||
opts.c.version == version,
|
||||
opts.c.name == folder,
|
||||
)))
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
return result.fetchone()
|
||||
|
||||
Reference in New Issue
Block a user