mirror of
https://github.com/283375/arcaea-offline.git
synced 2025-06-30 19:56:26 +00:00
Compare commits
41 Commits
2a2a063a3c
...
0.3.0-refa
Author | SHA1 | Date | |
---|---|---|---|
2b8b13ca95
|
|||
743bbe209f
|
|||
a680a6fd7d
|
|||
ebb649aef6
|
|||
9d7054d29a
|
|||
4ea49ebeda
|
|||
113e022967
|
|||
0fd7d3aa5e
|
|||
8e9c61829d
|
|||
d143632025
|
|||
6e8ac3dee7
|
|||
779fe0130e
|
|||
5ca9a5aaa3
|
|||
2377d233b1
|
|||
3b9609ee82
|
|||
e93904bb0d
|
|||
f19ac4d8d5
|
|||
96551c61ca
|
|||
d270636862
|
|||
f10c3648a7
|
|||
6fb24d4907
|
|||
eab2a3e520
|
|||
caced6eaec
|
|||
990efee900
|
|||
10c869846c
|
|||
d97ed91631
|
|||
5e996d35d2
|
|||
bfa1472b5c
|
|||
bb163ad78d
|
|||
864f524e68
|
|||
03696650ea
|
|||
4e799034d7
|
|||
b8136bf25f
|
|||
86d7a86700
|
|||
a32453b989
|
|||
d52d234adc
|
|||
88201e2ca4
|
|||
43be27bd4a
|
|||
1c114816c0
|
|||
f6e5f45579
|
|||
a27afca8a7
|
54
.github/workflows/main.yml
vendored
Normal file
54
.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
name: test & lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "*"
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: "pip install .[dev]"
|
||||
- name: Run tests
|
||||
run: "python -m pytest -v"
|
||||
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: "pip install .[dev]"
|
||||
- name: Run linter
|
||||
run: "ruff check"
|
||||
|
||||
pyright:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: "pip install .[dev]"
|
||||
- name: Run pyright
|
||||
uses: jakebailey/pyright-action@v2
|
23
.github/workflows/test.yml
vendored
23
.github/workflows/test.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Run tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- run: 'pip install -r requirements.dev.txt .'
|
||||
- run: 'pytest -v'
|
@ -4,11 +4,10 @@ repos:
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.12
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- id: ruff
|
||||
args: ["--fix"]
|
||||
- id: ruff-format
|
||||
|
@ -4,38 +4,50 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "arcaea-offline"
|
||||
version = "0.2.2"
|
||||
version = "0.3.0a0.dev0"
|
||||
authors = [{ name = "283375", email = "log_283375@163.com" }]
|
||||
description = "Manage your local Arcaea score database."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
dependencies = [
|
||||
"SQLAlchemy==2.0.20",
|
||||
"SQLAlchemy-Utils==0.41.1",
|
||||
]
|
||||
dependencies = ["SQLAlchemy==2.0.20", "SQLAlchemy-Utils==0.41.1"]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Programming Language :: Python :: 3",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["ruff~=0.6.8", "pre-commit~=3.3", "pytest~=7.4", "tox~=4.11"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/283375/arcaea-offline"
|
||||
"Bug Tracker" = "https://github.com/283375/arcaea-offline/issues"
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
src_paths = ["src/arcaea_offline"]
|
||||
|
||||
[tool.pyright]
|
||||
ignore = ["build/"]
|
||||
|
||||
[tool.pylint.main]
|
||||
jobs = 0
|
||||
|
||||
[tool.pylint.logging]
|
||||
disable = [
|
||||
"missing-module-docstring",
|
||||
"missing-class-docstring",
|
||||
"missing-function-docstring",
|
||||
"not-callable", # false positive to sqlalchemy `func.*`, remove this when pylint-dev/pylint(#8138) closed
|
||||
[tool.ruff.lint]
|
||||
# Full list: https://docs.astral.sh/ruff/rules
|
||||
select = [
|
||||
"E", # pycodestyle (Error)
|
||||
"W", # pycodestyle (Warning)
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"PL", # pylint
|
||||
"N", # pep8-naming
|
||||
"FBT", # flake8-boolean-trap
|
||||
"A", # flake8-builtins
|
||||
"DTZ", # flake8-datetimez
|
||||
"LOG", # flake8-logging
|
||||
"Q", # flake8-quotes
|
||||
"G", # flake8-logging-format
|
||||
"PIE", # flake8-pie
|
||||
"PT", # flake8-pytest-style
|
||||
]
|
||||
ignore = [
|
||||
"E501", # line-too-long
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"PLR2004", # magic-value-comparison
|
||||
]
|
||||
|
@ -1,6 +1,4 @@
|
||||
black==23.3.0
|
||||
isort==5.12.0
|
||||
pre-commit==3.3.1
|
||||
pylint==3.0.2
|
||||
pytest==7.4.3
|
||||
tox==4.11.3
|
||||
ruff~=0.6.8
|
||||
pre-commit~=3.3
|
||||
pytest~=7.4
|
||||
tox~=4.11
|
||||
|
@ -0,0 +1 @@
|
||||
DATABASE_VERSION = 5
|
||||
|
@ -1,2 +1,3 @@
|
||||
from . import world
|
||||
from .play_result import PlayResultCalculators
|
||||
|
||||
__all__ = ["PlayResultCalculators"]
|
||||
|
@ -7,7 +7,7 @@ from arcaea_offline.constants.play_result import ScoreLowerLimits
|
||||
|
||||
class PlayResultCalculators:
|
||||
@staticmethod
|
||||
def score_possible_range(notes: int, pure: int, far: int) -> tuple[int, int]:
|
||||
def score_possible_range(notes: int, pure: int, far: int) -> Tuple[int, int]:
|
||||
"""
|
||||
Returns the possible range of score based on the given values.
|
||||
|
||||
@ -44,9 +44,9 @@ class PlayResultCalculators:
|
||||
if score < 0:
|
||||
raise ValueError("score cannot be negative")
|
||||
|
||||
if score >= 10000000:
|
||||
if score >= ScoreLowerLimits.PM:
|
||||
return Decimal(2)
|
||||
if score >= 9800000:
|
||||
if score >= ScoreLowerLimits.EX:
|
||||
return Decimal(1) + (Decimal(score - 9800000) / 200000)
|
||||
return Decimal(score - 9500000) / 300000
|
||||
|
||||
|
@ -9,3 +9,17 @@ from .partners import (
|
||||
MayaPartnerBonus,
|
||||
MithraTerceraPartnerBonus,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AmaneBelowExPartnerBonus",
|
||||
"AwakenedEtoPartnerBonus",
|
||||
"AwakenedIlithPartnerBonus",
|
||||
"AwakenedLunaPartnerBonus",
|
||||
"LegacyMapStepBooster",
|
||||
"MayaPartnerBonus",
|
||||
"MemoriesStepBooster",
|
||||
"MithraTerceraPartnerBonus",
|
||||
"PartnerBonus",
|
||||
"WorldMainMapCalculators",
|
||||
"WorldPlayResult",
|
||||
]
|
||||
|
@ -5,6 +5,13 @@ from ._common import StepBooster
|
||||
|
||||
|
||||
class LegacyMapStepBooster(StepBooster):
|
||||
__fragment_boost_multipliers = {
|
||||
None: Decimal("1.0"),
|
||||
100: Decimal("1.1"),
|
||||
250: Decimal("1.25"),
|
||||
500: Decimal("1.5"),
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stamina: Literal[2, 4, 6],
|
||||
@ -35,11 +42,5 @@ class LegacyMapStepBooster(StepBooster):
|
||||
|
||||
def final_value(self) -> Decimal:
|
||||
stamina_multiplier = Decimal(self.stamina)
|
||||
fragments_multiplier = Decimal(1)
|
||||
if self.fragments == 100:
|
||||
fragments_multiplier = Decimal("1.1")
|
||||
elif self.fragments == 250:
|
||||
fragments_multiplier = Decimal("1.25")
|
||||
elif self.fragments == 500:
|
||||
fragments_multiplier = Decimal("1.5")
|
||||
fragments_multiplier = self.__fragment_boost_multipliers[self.fragments]
|
||||
return stamina_multiplier * fragments_multiplier
|
||||
|
@ -52,6 +52,4 @@ class WorldMainMapCalculators:
|
||||
play_rating_sqrt = (
|
||||
Decimal(50) * step - Decimal("2.5") * partner_step_value
|
||||
) / (Decimal("2.45") * partner_step_value)
|
||||
return (
|
||||
play_rating_sqrt**2 if play_rating_sqrt >= 0 else -(play_rating_sqrt**2)
|
||||
)
|
||||
return play_rating_sqrt**2 if play_rating_sqrt >= 0 else -(play_rating_sqrt**2)
|
||||
|
@ -13,6 +13,7 @@ class ArcaeaSongSide(IntEnum):
|
||||
LIGHT = 0
|
||||
CONFLICT = 1
|
||||
COLORLESS = 2
|
||||
LEPHON = 3
|
||||
|
||||
|
||||
class ArcaeaPlayResultModifier(IntEnum):
|
||||
@ -31,6 +32,7 @@ class ArcaeaPlayResultClearType(IntEnum):
|
||||
|
||||
|
||||
class ArcaeaLanguage(Enum):
|
||||
EN = "en"
|
||||
JA = "ja"
|
||||
KO = "ko"
|
||||
ZH_HANT = "zh-Hant"
|
||||
|
@ -3,6 +3,7 @@ from dataclasses import dataclass
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ScoreLowerLimits:
|
||||
PM = 10000000
|
||||
EX_PLUS = 9900000
|
||||
EX = 9800000
|
||||
AA = 9500000
|
||||
|
@ -1 +0,0 @@
|
||||
from .db import Database
|
||||
|
@ -1,432 +0,0 @@
|
||||
import logging
|
||||
import math
|
||||
from typing import Iterable, List, Optional, Type, Union
|
||||
|
||||
from sqlalchemy import Engine, func, inspect, select
|
||||
from sqlalchemy.orm import DeclarativeBase, InstrumentedAttribute, sessionmaker
|
||||
|
||||
from arcaea_offline.external.arcsong.arcsong_json import ArcSongJsonBuilder
|
||||
from arcaea_offline.external.exports import (
|
||||
ArcaeaOfflineDEFV2_Score,
|
||||
ScoreExport,
|
||||
exporters,
|
||||
)
|
||||
from arcaea_offline.singleton import Singleton
|
||||
|
||||
from .models.v4.config import ConfigBase, Property
|
||||
from .models.v4.scores import (
|
||||
CalculatedPotential,
|
||||
Score,
|
||||
ScoreBest,
|
||||
ScoreCalculated,
|
||||
ScoresBase,
|
||||
ScoresViewBase,
|
||||
)
|
||||
from .models.v4.songs import (
|
||||
Chart,
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
DifficultyLocalized,
|
||||
Pack,
|
||||
PackLocalized,
|
||||
Song,
|
||||
SongLocalized,
|
||||
SongsBase,
|
||||
SongsViewBase,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Database(metaclass=Singleton):
|
||||
def __init__(self, engine: Optional[Engine]):
|
||||
try:
|
||||
self.__engine
|
||||
except AttributeError:
|
||||
self.__engine = None
|
||||
|
||||
if engine is None:
|
||||
if isinstance(self.engine, Engine):
|
||||
return
|
||||
raise ValueError("No sqlalchemy.Engine instance specified before.")
|
||||
|
||||
if not isinstance(engine, Engine):
|
||||
raise ValueError(
|
||||
f"A sqlalchemy.Engine instance expected, not {repr(engine)}"
|
||||
)
|
||||
|
||||
if isinstance(self.engine, Engine):
|
||||
logger.warning(
|
||||
"A sqlalchemy.Engine instance %r has been specified "
|
||||
"and will be replaced to %r",
|
||||
self.engine,
|
||||
engine,
|
||||
)
|
||||
self.engine = engine
|
||||
|
||||
@property
|
||||
def engine(self) -> Engine:
|
||||
return self.__engine # type: ignore
|
||||
|
||||
@engine.setter
|
||||
def engine(self, value: Engine):
|
||||
if not isinstance(value, Engine):
|
||||
raise ValueError("Database.engine only accepts sqlalchemy.Engine")
|
||||
self.__engine = value
|
||||
self.__sessionmaker = sessionmaker(self.__engine)
|
||||
|
||||
@property
|
||||
def sessionmaker(self):
|
||||
return self.__sessionmaker
|
||||
|
||||
# region init
|
||||
|
||||
def init(self, checkfirst: bool = True):
|
||||
# create tables & views
|
||||
if checkfirst:
|
||||
# > https://github.com/kvesteri/sqlalchemy-utils/issues/396
|
||||
# > view.create_view() causes DuplicateTableError on
|
||||
# > Base.metadata.create_all(checkfirst=True)
|
||||
# so if `checkfirst` is True, drop these views before creating
|
||||
SongsViewBase.metadata.drop_all(self.engine)
|
||||
ScoresViewBase.metadata.drop_all(self.engine)
|
||||
|
||||
SongsBase.metadata.create_all(self.engine, checkfirst=checkfirst)
|
||||
SongsViewBase.metadata.create_all(self.engine)
|
||||
ScoresBase.metadata.create_all(self.engine, checkfirst=checkfirst)
|
||||
ScoresViewBase.metadata.create_all(self.engine)
|
||||
ConfigBase.metadata.create_all(self.engine, checkfirst=checkfirst)
|
||||
|
||||
# insert version property
|
||||
with self.sessionmaker() as session:
|
||||
stmt = select(Property.value).where(Property.key == "version")
|
||||
result = session.execute(stmt).fetchone()
|
||||
if not checkfirst or not result:
|
||||
session.add(Property(key="version", value="4"))
|
||||
session.commit()
|
||||
|
||||
def check_init(self) -> bool:
|
||||
# check table exists
|
||||
expect_tables = (
|
||||
list(SongsBase.metadata.tables.keys())
|
||||
+ list(ScoresBase.metadata.tables.keys())
|
||||
+ list(ConfigBase.metadata.tables.keys())
|
||||
+ [
|
||||
Chart.__tablename__,
|
||||
ScoreCalculated.__tablename__,
|
||||
ScoreBest.__tablename__,
|
||||
CalculatedPotential.__tablename__,
|
||||
]
|
||||
)
|
||||
return all(inspect(self.engine).has_table(t) for t in expect_tables)
|
||||
|
||||
# endregion
|
||||
|
||||
def version(self) -> Union[int, None]:
|
||||
stmt = select(Property).where(Property.key == "version")
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return None if result is None else int(result.value)
|
||||
|
||||
# region Pack
|
||||
|
||||
def get_packs(self):
|
||||
stmt = select(Pack)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_pack(self, pack_id: str):
|
||||
stmt = select(Pack).where(Pack.id == pack_id)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
def get_pack_localized(self, pack_id: str):
|
||||
stmt = select(PackLocalized).where(PackLocalized.id == pack_id)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# endregion
|
||||
|
||||
# region Song
|
||||
|
||||
def get_songs(self):
|
||||
stmt = select(Song)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_songs_by_pack_id(self, pack_id: str):
|
||||
stmt = select(Song).where(Song.set == pack_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_song(self, song_id: str):
|
||||
stmt = select(Song).where(Song.id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
def get_song_localized(self, song_id: str):
|
||||
stmt = select(SongLocalized).where(SongLocalized.id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# endregion
|
||||
|
||||
# region Difficulty
|
||||
|
||||
def get_difficulties(self):
|
||||
stmt = select(Difficulty)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_difficulties_by_song_id(self, song_id: str):
|
||||
stmt = select(Difficulty).where(Difficulty.song_id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_difficulties_localized_by_song_id(self, song_id: str):
|
||||
stmt = select(DifficultyLocalized).where(DifficultyLocalized.song_id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_difficulty(self, song_id: str, rating_class: int):
|
||||
stmt = select(Difficulty).where(
|
||||
(Difficulty.song_id == song_id) & (Difficulty.rating_class == rating_class)
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
def get_difficulty_localized(self, song_id: str, rating_class: int):
|
||||
stmt = select(DifficultyLocalized).where(
|
||||
(DifficultyLocalized.song_id == song_id)
|
||||
& (DifficultyLocalized.rating_class == rating_class)
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# endregion
|
||||
|
||||
# region ChartInfo
|
||||
|
||||
def get_chart_infos(self):
|
||||
stmt = select(ChartInfo)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_chart_infos_by_song_id(self, song_id: str):
|
||||
stmt = select(ChartInfo).where(ChartInfo.song_id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_chart_info(self, song_id: str, rating_class: int):
|
||||
stmt = select(ChartInfo).where(
|
||||
(ChartInfo.song_id == song_id) & (ChartInfo.rating_class == rating_class)
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# endregion
|
||||
|
||||
# region Chart
|
||||
|
||||
def get_charts_by_pack_id(self, pack_id: str):
|
||||
stmt = select(Chart).where(Chart.set == pack_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_charts_by_song_id(self, song_id: str):
|
||||
stmt = select(Chart).where(Chart.song_id == song_id)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_charts_by_constant(self, constant: int):
|
||||
stmt = select(Chart).where(Chart.constant == constant)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_chart(self, song_id: str, rating_class: int):
|
||||
stmt = select(Chart).where(
|
||||
(Chart.song_id == song_id) & (Chart.rating_class == rating_class)
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# endregion
|
||||
|
||||
# region Score
|
||||
|
||||
def get_scores(self):
|
||||
stmt = select(Score)
|
||||
with self.sessionmaker() as session:
|
||||
results = list(session.scalars(stmt))
|
||||
return results
|
||||
|
||||
def get_score(self, score_id: int):
|
||||
stmt = select(Score).where(Score.id == score_id)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
def get_score_best(self, song_id: str, rating_class: int):
|
||||
stmt = select(ScoreBest).where(
|
||||
(ScoreBest.song_id == song_id) & (ScoreBest.rating_class == rating_class)
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
def insert_score(self, score: Score):
|
||||
with self.sessionmaker() as session:
|
||||
session.add(score)
|
||||
session.commit()
|
||||
|
||||
def insert_scores(self, scores: Iterable[Score]):
|
||||
with self.sessionmaker() as session:
|
||||
session.add_all(scores)
|
||||
session.commit()
|
||||
|
||||
def update_score(self, score: Score):
|
||||
if score.id is None:
|
||||
raise ValueError(
|
||||
"Cannot determine which score to update, please specify `score.id`"
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
session.merge(score)
|
||||
session.commit()
|
||||
|
||||
def delete_score(self, score: Score):
|
||||
with self.sessionmaker() as session:
|
||||
session.delete(score)
|
||||
session.commit()
|
||||
|
||||
def recommend_charts(self, play_result: float, bounds: float = 0.1):
|
||||
base_constant = math.ceil(play_result * 10)
|
||||
|
||||
results = []
|
||||
results_id = []
|
||||
with self.sessionmaker() as session:
|
||||
for constant in range(base_constant - 20, base_constant + 1):
|
||||
# from Pure Memory(EX+) to AA
|
||||
score_modifier = (play_result * 10 - constant) / 10
|
||||
if score_modifier >= 2.0:
|
||||
min_score = 10000000
|
||||
elif score_modifier >= 1.0:
|
||||
min_score = 200000 * (score_modifier - 1) + 9800000
|
||||
else:
|
||||
min_score = 300000 * score_modifier + 9500000
|
||||
min_score = int(min_score)
|
||||
|
||||
charts = self.get_charts_by_constant(constant)
|
||||
for chart in charts:
|
||||
score_best_stmt = select(ScoreBest).where(
|
||||
(ScoreBest.song_id == chart.song_id)
|
||||
& (ScoreBest.rating_class == chart.rating_class)
|
||||
& (ScoreBest.score >= min_score)
|
||||
& (play_result - bounds < ScoreBest.potential)
|
||||
& (ScoreBest.potential < play_result + bounds)
|
||||
)
|
||||
if session.scalar(score_best_stmt):
|
||||
chart_id = f"{chart.song_id},{chart.rating_class}"
|
||||
if chart_id not in results_id:
|
||||
results.append(chart)
|
||||
results_id.append(chart_id)
|
||||
|
||||
return results
|
||||
|
||||
# endregion
|
||||
|
||||
def get_b30(self):
|
||||
stmt = select(CalculatedPotential.b30).select_from(CalculatedPotential)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result
|
||||
|
||||
# region COUNT
|
||||
|
||||
def __count_table(self, base: Type[DeclarativeBase]):
|
||||
stmt = select(func.count()).select_from(base)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result or 0
|
||||
|
||||
def __count_column(self, column: InstrumentedAttribute):
|
||||
stmt = select(func.count(column))
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result or 0
|
||||
|
||||
def count_packs(self):
|
||||
return self.__count_column(Pack.id)
|
||||
|
||||
def count_songs(self):
|
||||
return self.__count_column(Song.id)
|
||||
|
||||
def count_difficulties(self):
|
||||
return self.__count_table(Difficulty)
|
||||
|
||||
def count_chart_infos(self):
|
||||
return self.__count_table(ChartInfo)
|
||||
|
||||
def count_complete_chart_infos(self):
|
||||
stmt = (
|
||||
select(func.count())
|
||||
.select_from(ChartInfo)
|
||||
.where((ChartInfo.constant != None) & (ChartInfo.notes != None))
|
||||
)
|
||||
with self.sessionmaker() as session:
|
||||
result = session.scalar(stmt)
|
||||
return result or 0
|
||||
|
||||
def count_charts(self):
|
||||
return self.__count_table(Chart)
|
||||
|
||||
def count_scores(self):
|
||||
return self.__count_column(Score.id)
|
||||
|
||||
def count_scores_calculated(self):
|
||||
return self.__count_table(ScoreCalculated)
|
||||
|
||||
def count_scores_best(self):
|
||||
return self.__count_table(ScoreBest)
|
||||
|
||||
# endregion
|
||||
|
||||
# region export
|
||||
|
||||
def export_scores(self) -> List[ScoreExport]:
|
||||
scores = self.get_scores()
|
||||
return [exporters.score(score) for score in scores]
|
||||
|
||||
def export_scores_def_v2(self) -> ArcaeaOfflineDEFV2_Score:
|
||||
scores = self.get_scores()
|
||||
return {
|
||||
"$schema": "https://arcaeaoffline.sevive.xyz/schemas/def/v2/score.schema.json",
|
||||
"type": "score",
|
||||
"version": 2,
|
||||
"scores": [exporters.score_def_v2(score) for score in scores],
|
||||
}
|
||||
|
||||
def generate_arcsong(self):
|
||||
with self.sessionmaker() as session:
|
||||
arcsong = ArcSongJsonBuilder(session).generate_arcsong_json()
|
||||
return arcsong
|
||||
|
||||
# endregion
|
1
src/arcaea_offline/database/migrations/README.md
Normal file
1
src/arcaea_offline/database/migrations/README.md
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
0
src/arcaea_offline/database/migrations/__init__.py
Normal file
0
src/arcaea_offline/database/migrations/__init__.py
Normal file
82
src/arcaea_offline/database/migrations/env.py
Normal file
82
src/arcaea_offline/database/migrations/env.py
Normal file
@ -0,0 +1,82 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from arcaea_offline.database.models._base import ModelBase
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = [ModelBase.metadata]
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True,
|
||||
transaction_per_migration=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
28
src/arcaea_offline/database/migrations/legacies/v5.py
Normal file
28
src/arcaea_offline/database/migrations/legacies/v5.py
Normal file
@ -0,0 +1,28 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
|
||||
class ForceTimezoneDateTime(TypeDecorator):
|
||||
"""
|
||||
Store timezone aware timestamps as timezone naive UTC
|
||||
|
||||
https://docs.sqlalchemy.org/en/20/core/custom_types.html#store-timezone-aware-timestamps-as-timezone-naive-utc
|
||||
"""
|
||||
|
||||
impl = DateTime
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
if not value.tzinfo or value.tzinfo.utcoffset(value) is None:
|
||||
raise TypeError("datetime tzinfo is required")
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
return value
|
||||
|
||||
def process_result_value(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
return value
|
26
src/arcaea_offline/database/migrations/script.py.mako
Normal file
26
src/arcaea_offline/database/migrations/script.py.mako
Normal file
@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,506 @@
|
||||
"""v4 to v5
|
||||
|
||||
Revision ID: 0ca6733e40dc
|
||||
Revises: a3f9d48b7de3
|
||||
Create Date: 2025-05-31 11:38:25.575124
|
||||
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Sequence, Union
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import context, op
|
||||
|
||||
from arcaea_offline.database.migrations.legacies.v5 import ForceTimezoneDateTime
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0ca6733e40dc"
|
||||
down_revision: Union[str, None] = "a3f9d48b7de3"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade(
|
||||
*,
|
||||
data_migration: bool = True,
|
||||
data_migration_options: Any = None,
|
||||
) -> None:
|
||||
op.create_table(
|
||||
"property",
|
||||
sa.Column("key", sa.String(), nullable=False),
|
||||
sa.Column("value", sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("key", name=op.f("pk_property")),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"pack",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("name", sa.String(), nullable=True),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("section", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"is_world_extend", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.Column("plus_character", sa.Integer(), nullable=True),
|
||||
sa.Column("append_parent_id", sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["append_parent_id"],
|
||||
["pack.id"],
|
||||
name=op.f("fk_pack_append_parent_id_pack"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_pack")),
|
||||
)
|
||||
with op.batch_alter_table("pack", schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f("ix_pack_name"), ["name"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"pack_localization",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("lang", sa.String(), nullable=False),
|
||||
sa.Column("name", sa.String(), nullable=True),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"],
|
||||
["pack.id"],
|
||||
name=op.f("fk_pack_localization_id_pack"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", "lang", name=op.f("pk_pack_localization")),
|
||||
)
|
||||
op.create_table(
|
||||
"song",
|
||||
sa.Column("pack_id", sa.String(), nullable=False),
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("idx", sa.Integer(), nullable=True),
|
||||
sa.Column("title", sa.String(), nullable=True),
|
||||
sa.Column("artist", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"is_deleted", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.Column("added_at", ForceTimezoneDateTime(), nullable=False),
|
||||
sa.Column("version", sa.String(), nullable=True),
|
||||
sa.Column("bpm", sa.String(), nullable=True),
|
||||
sa.Column("bpm_base", sa.Numeric(), nullable=True),
|
||||
sa.Column(
|
||||
"is_remote", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.Column(
|
||||
"is_unlockable_in_world",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("0"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"is_beyond_unlock_state_local",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("0"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("purchase", sa.String(), nullable=True),
|
||||
sa.Column("category", sa.String(), nullable=True),
|
||||
sa.Column("side", sa.Integer(), nullable=True),
|
||||
sa.Column("bg", sa.String(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.String(), nullable=True),
|
||||
sa.Column("bg_day", sa.String(), nullable=True),
|
||||
sa.Column("bg_night", sa.String(), nullable=True),
|
||||
sa.Column("source", sa.String(), nullable=True),
|
||||
sa.Column("source_copyright", sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["pack_id"],
|
||||
["pack.id"],
|
||||
name=op.f("fk_song_pack_id_pack"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_song")),
|
||||
)
|
||||
with op.batch_alter_table("song", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_song_added_at"), ["added_at"], unique=False
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_song_artist"), ["artist"], unique=False)
|
||||
batch_op.create_index(batch_op.f("ix_song_title"), ["title"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"difficulty",
|
||||
sa.Column("song_id", sa.String(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("rating", sa.Integer(), nullable=False),
|
||||
sa.Column(
|
||||
"is_rating_plus", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.Column("chart_designer", sa.String(), nullable=True),
|
||||
sa.Column("jacket_designer", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"has_overriding_audio",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("0"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"has_overriding_jacket",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("0"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("jacket_night", sa.String(), nullable=True),
|
||||
sa.Column("title", sa.String(), nullable=True),
|
||||
sa.Column("artist", sa.String(), nullable=True),
|
||||
sa.Column("bg", sa.String(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.String(), nullable=True),
|
||||
sa.Column("bpm", sa.String(), nullable=True),
|
||||
sa.Column("bpm_base", sa.Numeric(), nullable=True),
|
||||
sa.Column("added_at", ForceTimezoneDateTime(), nullable=True),
|
||||
sa.Column("version", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"is_legacy11", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id"],
|
||||
["song.id"],
|
||||
name=op.f("fk_difficulty_song_id_song"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("song_id", "rating_class", name=op.f("pk_difficulty")),
|
||||
)
|
||||
op.create_table(
|
||||
"song_localization",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("lang", sa.String(), nullable=False),
|
||||
sa.Column("title", sa.String(), nullable=True),
|
||||
sa.Column("source", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"has_jacket", sa.Boolean(), server_default=sa.text("0"), nullable=False
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"],
|
||||
["song.id"],
|
||||
name=op.f("fk_song_localization_id_song"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", "lang", name=op.f("pk_song_localization")),
|
||||
)
|
||||
op.create_table(
|
||||
"chart_info",
|
||||
sa.Column("song_id", sa.String(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("constant", sa.Numeric(), nullable=False),
|
||||
sa.Column("notes", sa.Integer(), nullable=False),
|
||||
sa.Column(
|
||||
"added_at",
|
||||
ForceTimezoneDateTime(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("version", sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id", "rating_class"],
|
||||
["difficulty.song_id", "difficulty.rating_class"],
|
||||
name=op.f("fk_chart_info_song_id_difficulty"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint(
|
||||
"song_id", "rating_class", "added_at", name=op.f("pk_chart_info")
|
||||
),
|
||||
)
|
||||
op.create_table(
|
||||
"difficulty_localization",
|
||||
sa.Column("song_id", sa.String(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("lang", sa.String(), nullable=False),
|
||||
sa.Column("title", sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id", "rating_class"],
|
||||
["difficulty.song_id", "difficulty.rating_class"],
|
||||
name=op.f("fk_difficulty_localization_song_id_difficulty"),
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint(
|
||||
"song_id", "rating_class", "lang", name=op.f("pk_difficulty_localization")
|
||||
),
|
||||
)
|
||||
|
||||
op.drop_table("properties")
|
||||
op.drop_table("packs")
|
||||
op.drop_table("packs_localized")
|
||||
op.drop_table("difficulties")
|
||||
op.drop_table("songs")
|
||||
op.drop_table("songs_localized")
|
||||
op.drop_table("charts")
|
||||
op.drop_table("charts_info")
|
||||
op.drop_table("difficulties_localized")
|
||||
|
||||
op.rename_table("scores", "scores_old")
|
||||
play_result_tbl = op.create_table(
|
||||
"play_result",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("uuid", sa.Uuid(), nullable=False),
|
||||
sa.Column("song_id", sa.String(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("played_at", ForceTimezoneDateTime(), nullable=True),
|
||||
sa.Column("score", sa.Integer(), nullable=False),
|
||||
sa.Column("pure", sa.Integer(), nullable=True),
|
||||
sa.Column("pure_early", sa.Integer(), nullable=True),
|
||||
sa.Column("pure_late", sa.Integer(), nullable=True),
|
||||
sa.Column("far", sa.Integer(), nullable=True),
|
||||
sa.Column("far_early", sa.Integer(), nullable=True),
|
||||
sa.Column("far_late", sa.Integer(), nullable=True),
|
||||
sa.Column("lost", sa.Integer(), nullable=True),
|
||||
sa.Column("max_recall", sa.Integer(), nullable=True),
|
||||
sa.Column("clear_type", sa.Integer(), nullable=True),
|
||||
sa.Column("modifier", sa.Integer(), nullable=True),
|
||||
sa.Column("comment", sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_play_result")),
|
||||
sa.UniqueConstraint("uuid", name=op.f("uq_play_result_uuid")),
|
||||
)
|
||||
|
||||
if data_migration:
|
||||
conn = op.get_bind()
|
||||
query = conn.execute(
|
||||
sa.text(
|
||||
"SELECT id, song_id, rating_class, score, pure, far, lost, "
|
||||
" `date`, max_recall, modifier, clear_type, comment "
|
||||
"FROM scores_old"
|
||||
)
|
||||
)
|
||||
batch_size = 30
|
||||
|
||||
while True:
|
||||
rows = query.fetchmany(batch_size)
|
||||
if not rows:
|
||||
break
|
||||
|
||||
rows_to_insert = []
|
||||
for row in rows:
|
||||
result = row._asdict()
|
||||
|
||||
date = result.pop("date")
|
||||
result["uuid"] = uuid4()
|
||||
result["played_at"] = (
|
||||
datetime.fromtimestamp(date, tz=timezone.utc)
|
||||
if date is not None
|
||||
else None
|
||||
)
|
||||
rows_to_insert.append(result)
|
||||
|
||||
conn.execute(sa.insert(play_result_tbl), rows_to_insert)
|
||||
|
||||
op.drop_table("scores_old")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
raise NotImplementedError(
|
||||
f"Downgrade not supported! ({context.get_context().get_current_revision()})"
|
||||
)
|
||||
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"difficulties_localized",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.INTEGER(), nullable=False),
|
||||
sa.Column("title_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["rating_class"],
|
||||
["difficulties.rating_class"],
|
||||
name=op.f("fk_difficulties_localized_rating_class_difficulties"),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id"],
|
||||
["difficulties.song_id"],
|
||||
name=op.f("fk_difficulties_localized_song_id_difficulties"),
|
||||
),
|
||||
sa.PrimaryKeyConstraint(
|
||||
"song_id", "rating_class", name=op.f("pk_difficulties_localized")
|
||||
),
|
||||
)
|
||||
op.create_table(
|
||||
"charts_info",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.INTEGER(), nullable=False),
|
||||
sa.Column("constant", sa.INTEGER(), nullable=False),
|
||||
sa.Column("notes", sa.INTEGER(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["rating_class"],
|
||||
["difficulties.rating_class"],
|
||||
name=op.f("fk_charts_info_rating_class_difficulties"),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id"],
|
||||
["difficulties.song_id"],
|
||||
name=op.f("fk_charts_info_song_id_difficulties"),
|
||||
),
|
||||
sa.PrimaryKeyConstraint("song_id", "rating_class", name=op.f("pk_charts_info")),
|
||||
)
|
||||
op.create_table(
|
||||
"charts",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.INTEGER(), nullable=False),
|
||||
sa.Column("name_en", sa.TEXT(), nullable=False),
|
||||
sa.Column("name_jp", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist", sa.TEXT(), nullable=False),
|
||||
sa.Column("bpm", sa.TEXT(), nullable=False),
|
||||
sa.Column("bpm_base", sa.REAL(), nullable=False),
|
||||
sa.Column("package_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("time", sa.INTEGER(), nullable=True),
|
||||
sa.Column("side", sa.INTEGER(), nullable=False),
|
||||
sa.Column("world_unlock", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("remote_download", sa.BOOLEAN(), nullable=True),
|
||||
sa.Column("bg", sa.TEXT(), nullable=False),
|
||||
sa.Column("date", sa.INTEGER(), nullable=False),
|
||||
sa.Column("version", sa.TEXT(), nullable=False),
|
||||
sa.Column("difficulty", sa.INTEGER(), nullable=False),
|
||||
sa.Column("rating", sa.INTEGER(), nullable=False),
|
||||
sa.Column("note", sa.INTEGER(), nullable=False),
|
||||
sa.Column("chart_designer", sa.TEXT(), nullable=True),
|
||||
sa.Column("jacket_designer", sa.TEXT(), nullable=True),
|
||||
sa.Column("jacket_override", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("audio_override", sa.BOOLEAN(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("song_id", "rating_class"),
|
||||
)
|
||||
op.create_table(
|
||||
"songs_localized",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("title_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_title_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_title_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_title_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_title_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_artist_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_artist_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_artist_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_artist_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"], ["songs.id"], name=op.f("fk_songs_localized_id_songs")
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_songs_localized")),
|
||||
)
|
||||
op.create_table(
|
||||
"packs",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("name", sa.TEXT(), nullable=False),
|
||||
sa.Column("description", sa.TEXT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id", name="fk_packs"),
|
||||
)
|
||||
op.create_table(
|
||||
"properties",
|
||||
sa.Column("key", sa.TEXT(), nullable=False),
|
||||
sa.Column("value", sa.TEXT(), nullable=False),
|
||||
sa.UniqueConstraint("key"),
|
||||
)
|
||||
op.create_table(
|
||||
"songs",
|
||||
sa.Column("idx", sa.INTEGER(), nullable=False),
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("title", sa.TEXT(), nullable=False),
|
||||
sa.Column("artist", sa.TEXT(), nullable=False),
|
||||
sa.Column("set", sa.TEXT(), nullable=False),
|
||||
sa.Column("bpm", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm_base", sa.FLOAT(), nullable=True),
|
||||
sa.Column("audio_preview", sa.INTEGER(), nullable=True),
|
||||
sa.Column("audio_preview_end", sa.INTEGER(), nullable=True),
|
||||
sa.Column("side", sa.INTEGER(), nullable=True),
|
||||
sa.Column("version", sa.TEXT(), nullable=True),
|
||||
sa.Column("date", sa.INTEGER(), nullable=True),
|
||||
sa.Column("bg", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_day", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_night", sa.TEXT(), nullable=True),
|
||||
sa.Column("source", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_copyright", sa.TEXT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("songs")),
|
||||
)
|
||||
op.create_table(
|
||||
"difficulties",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.INTEGER(), nullable=False),
|
||||
sa.Column("rating", sa.INTEGER(), nullable=False),
|
||||
sa.Column("rating_plus", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("chart_designer", sa.TEXT(), nullable=True),
|
||||
sa.Column("jacket_desginer", sa.TEXT(), nullable=True),
|
||||
sa.Column("audio_override", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("jacket_override", sa.BOOLEAN(), nullable=False),
|
||||
sa.Column("jacket_night", sa.TEXT(), nullable=True),
|
||||
sa.Column("title", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm_base", sa.FLOAT(), nullable=True),
|
||||
sa.Column("version", sa.TEXT(), nullable=True),
|
||||
sa.Column("date", sa.INTEGER(), nullable=True),
|
||||
sa.PrimaryKeyConstraint(
|
||||
"song_id", "rating_class", name=op.f("pk_difficulties")
|
||||
),
|
||||
)
|
||||
op.create_table(
|
||||
"packs_localized",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("name_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"], ["packs.id"], name=op.f("fk_packs_localized_id_packs")
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_packs_localized")),
|
||||
)
|
||||
op.create_table(
|
||||
"scores",
|
||||
sa.Column("id", sa.INTEGER(), nullable=False),
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.INTEGER(), nullable=False),
|
||||
sa.Column("score", sa.INTEGER(), nullable=False),
|
||||
sa.Column("pure", sa.INTEGER(), nullable=True),
|
||||
sa.Column("far", sa.INTEGER(), nullable=True),
|
||||
sa.Column("lost", sa.INTEGER(), nullable=True),
|
||||
sa.Column("date", sa.INTEGER(), nullable=True),
|
||||
sa.Column("max_recall", sa.INTEGER(), nullable=True),
|
||||
sa.Column("modifier", sa.INTEGER(), nullable=True),
|
||||
sa.Column("clear_type", sa.INTEGER(), nullable=True),
|
||||
sa.Column("comment", sa.TEXT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.drop_table("difficulty_localization")
|
||||
op.drop_table("chart_info")
|
||||
op.drop_table("song_localization")
|
||||
op.drop_table("difficulty")
|
||||
with op.batch_alter_table("song", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_song_title"))
|
||||
batch_op.drop_index(batch_op.f("ix_song_artist"))
|
||||
batch_op.drop_index(batch_op.f("ix_song_added_at"))
|
||||
|
||||
op.drop_table("song")
|
||||
op.drop_table("pack_localization")
|
||||
op.drop_table("property")
|
||||
op.drop_table("play_result")
|
||||
with op.batch_alter_table("pack", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_pack_name"))
|
||||
|
||||
op.drop_table("pack")
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,275 @@
|
||||
"""v1 to v4
|
||||
|
||||
Revision ID: a3f9d48b7de3
|
||||
Revises:
|
||||
Create Date: 2024-11-24 00:03:07.697165
|
||||
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Mapping, Optional, Sequence, TypedDict, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import context, op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "a3f9d48b7de3"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
class V4DataMigrationOptions(TypedDict):
|
||||
threshold_date: Optional[datetime]
|
||||
|
||||
|
||||
def _data_migration_options(user_input: Optional[Mapping]):
|
||||
options: V4DataMigrationOptions = {
|
||||
"threshold_date": datetime(year=2017, month=1, day=23, tzinfo=timezone.utc),
|
||||
}
|
||||
|
||||
if user_input is None:
|
||||
return options
|
||||
|
||||
if not isinstance(user_input, dict):
|
||||
raise TypeError("v4 migration: data migration options should be a dict object")
|
||||
|
||||
threshold_date = user_input.get("threshold_date")
|
||||
if threshold_date is not None and not isinstance(threshold_date, datetime):
|
||||
raise ValueError(
|
||||
"v4 migration: threshold_date should be None or a datetime.datetime object"
|
||||
)
|
||||
options["threshold_date"] = threshold_date
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def upgrade(
|
||||
*,
|
||||
data_migration: bool = True,
|
||||
data_migration_options: Optional[V4DataMigrationOptions] = None,
|
||||
) -> None:
|
||||
data_migration_options = _data_migration_options(data_migration_options)
|
||||
threshold_date = data_migration_options["threshold_date"]
|
||||
|
||||
op.create_table(
|
||||
"difficulties",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("rating", sa.Integer(), nullable=False),
|
||||
sa.Column("rating_plus", sa.Boolean(), nullable=False),
|
||||
sa.Column("chart_designer", sa.TEXT(), nullable=True),
|
||||
sa.Column("jacket_desginer", sa.TEXT(), nullable=True),
|
||||
sa.Column("audio_override", sa.Boolean(), nullable=False),
|
||||
sa.Column("jacket_override", sa.Boolean(), nullable=False),
|
||||
sa.Column("jacket_night", sa.TEXT(), nullable=True),
|
||||
sa.Column("title", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm_base", sa.Float(), nullable=True),
|
||||
sa.Column("version", sa.TEXT(), nullable=True),
|
||||
sa.Column("date", sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("song_id", "rating_class", name="pk_difficulties"),
|
||||
)
|
||||
op.create_table(
|
||||
"packs",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("name", sa.TEXT(), nullable=False),
|
||||
sa.Column("description", sa.TEXT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id", name="fk_packs"),
|
||||
)
|
||||
op.create_table(
|
||||
"songs",
|
||||
sa.Column("idx", sa.Integer(), nullable=False),
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("title", sa.TEXT(), nullable=False),
|
||||
sa.Column("artist", sa.TEXT(), nullable=False),
|
||||
sa.Column("set", sa.TEXT(), nullable=False),
|
||||
sa.Column("bpm", sa.TEXT(), nullable=True),
|
||||
sa.Column("bpm_base", sa.Float(), nullable=True),
|
||||
sa.Column("audio_preview", sa.Integer(), nullable=True),
|
||||
sa.Column("audio_preview_end", sa.Integer(), nullable=True),
|
||||
sa.Column("side", sa.Integer(), nullable=True),
|
||||
sa.Column("version", sa.TEXT(), nullable=True),
|
||||
sa.Column("date", sa.Integer(), nullable=True),
|
||||
sa.Column("bg", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_inverse", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_day", sa.TEXT(), nullable=True),
|
||||
sa.Column("bg_night", sa.TEXT(), nullable=True),
|
||||
sa.Column("source", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_copyright", sa.TEXT(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id", name="songs"),
|
||||
)
|
||||
op.create_table(
|
||||
"charts_info",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column(
|
||||
"constant",
|
||||
sa.Integer(),
|
||||
nullable=False,
|
||||
comment="real_constant * 10. For example, Crimson Throne [FTR] is 10.4, then store 104.",
|
||||
),
|
||||
sa.Column("notes", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["rating_class"],
|
||||
["difficulties.rating_class"],
|
||||
name="fk_charts_info_rating_class_difficulties",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id"],
|
||||
["difficulties.song_id"],
|
||||
name="fk_charts_info_song_id_difficulties",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("song_id", "rating_class", name="pk_charts_info"),
|
||||
)
|
||||
op.create_table(
|
||||
"difficulties_localized",
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("title_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("artist_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["rating_class"],
|
||||
["difficulties.rating_class"],
|
||||
name="fk_difficulties_localized_rating_class_difficulties",
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["song_id"],
|
||||
["difficulties.song_id"],
|
||||
name="fk_difficulties_localized_song_id_difficulties",
|
||||
),
|
||||
sa.PrimaryKeyConstraint(
|
||||
"song_id", "rating_class", name="pk_difficulties_localized"
|
||||
),
|
||||
)
|
||||
op.create_table(
|
||||
"packs_localized",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("name_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("name_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("description_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"],
|
||||
["packs.id"],
|
||||
name="fk_packs_localized_id_packs",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name="pk_packs_localized"),
|
||||
)
|
||||
op.create_table(
|
||||
"songs_localized",
|
||||
sa.Column("id", sa.TEXT(), nullable=False),
|
||||
sa.Column("title_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("title_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.Column("search_title_ja", sa.TEXT(), nullable=True, comment="JSON array"),
|
||||
sa.Column("search_title_ko", sa.TEXT(), nullable=True, comment="JSON array"),
|
||||
sa.Column(
|
||||
"search_title_zh_hans", sa.TEXT(), nullable=True, comment="JSON array"
|
||||
),
|
||||
sa.Column(
|
||||
"search_title_zh_hant", sa.TEXT(), nullable=True, comment="JSON array"
|
||||
),
|
||||
sa.Column("search_artist_ja", sa.TEXT(), nullable=True, comment="JSON array"),
|
||||
sa.Column("search_artist_ko", sa.TEXT(), nullable=True, comment="JSON array"),
|
||||
sa.Column(
|
||||
"search_artist_zh_hans", sa.TEXT(), nullable=True, comment="JSON array"
|
||||
),
|
||||
sa.Column(
|
||||
"search_artist_zh_hant", sa.TEXT(), nullable=True, comment="JSON array"
|
||||
),
|
||||
sa.Column("source_ja", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_ko", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_zh_hans", sa.TEXT(), nullable=True),
|
||||
sa.Column("source_zh_hant", sa.TEXT(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["id"],
|
||||
["songs.id"],
|
||||
name="fk_songs_localized_id_songs",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name="pk_songs_localized"),
|
||||
)
|
||||
op.drop_table("aliases")
|
||||
op.drop_table("packages")
|
||||
op.execute(sa.text("DROP VIEW IF EXISTS bests"))
|
||||
op.execute(sa.text("DROP VIEW IF EXISTS calculated"))
|
||||
op.execute(sa.text("DROP VIEW IF EXISTS calculated_potential"))
|
||||
op.execute(sa.text("DROP VIEW IF EXISTS song_id_names"))
|
||||
|
||||
op.rename_table("scores", "scores_old")
|
||||
scores_tbl = op.create_table(
|
||||
"scores",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, primary_key=True),
|
||||
sa.Column("song_id", sa.TEXT(), nullable=False),
|
||||
sa.Column("rating_class", sa.Integer(), nullable=False),
|
||||
sa.Column("score", sa.Integer(), nullable=False),
|
||||
sa.Column("pure", sa.Integer()),
|
||||
sa.Column("far", sa.Integer()),
|
||||
sa.Column("lost", sa.Integer()),
|
||||
sa.Column("date", sa.Integer()),
|
||||
sa.Column("max_recall", sa.Integer()),
|
||||
sa.Column("modifier", sa.Integer(), comment="0: NORMAL, 1: EASY, 2: HARD"),
|
||||
sa.Column(
|
||||
"clear_type",
|
||||
sa.Integer(),
|
||||
comment="0: TRACK LOST, 1: NORMAL CLEAR, 2: FULL RECALL, "
|
||||
"3: PURE MEMORY, 4: EASY CLEAR, 5: HARD CLEAR",
|
||||
),
|
||||
sa.Column("comment", sa.TEXT()),
|
||||
)
|
||||
if data_migration:
|
||||
conn = op.get_bind()
|
||||
query = conn.execute(
|
||||
sa.text(
|
||||
"SELECT id, song_id, rating_class, score, time, pure, far, lost, max_recall, clear_type "
|
||||
"FROM scores_old"
|
||||
)
|
||||
)
|
||||
batch_size = 30
|
||||
|
||||
while True:
|
||||
rows = query.fetchmany(batch_size)
|
||||
if not rows:
|
||||
break
|
||||
|
||||
rows_to_insert = []
|
||||
|
||||
for row in rows:
|
||||
result = row._asdict()
|
||||
result["date"] = datetime.fromtimestamp(
|
||||
result.pop("time"), tz=timezone.utc
|
||||
)
|
||||
|
||||
if threshold_date is not None and result["date"] <= threshold_date:
|
||||
result["date"] = None
|
||||
|
||||
result["date"] = (
|
||||
int(result["date"].timestamp())
|
||||
if result["date"] is not None
|
||||
else None
|
||||
)
|
||||
rows_to_insert.append(result)
|
||||
|
||||
conn.execute(sa.insert(scores_tbl), rows_to_insert)
|
||||
|
||||
op.drop_table("scores_old")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
raise NotImplementedError(
|
||||
f"Downgrade not supported! ({context.get_context().get_current_revision()})"
|
||||
)
|
@ -0,0 +1,32 @@
|
||||
from ._base import ModelBase, ModelViewBase
|
||||
from .chart_info import ChartInfo
|
||||
from .config import Property
|
||||
from .difficulty import Difficulty, DifficultyLocalization
|
||||
from .pack import Pack, PackLocalization
|
||||
from .song import Song, SongLocalization
|
||||
|
||||
from .chart import Chart # isort: skip
|
||||
from .play_result import (
|
||||
CalculatedPotential,
|
||||
PlayResult,
|
||||
PlayResultBest,
|
||||
PlayResultCalculated,
|
||||
) # isort: skip
|
||||
|
||||
__all__ = [
|
||||
"CalculatedPotential",
|
||||
"Chart",
|
||||
"ChartInfo",
|
||||
"Difficulty",
|
||||
"DifficultyLocalization",
|
||||
"ModelBase",
|
||||
"ModelViewBase",
|
||||
"Pack",
|
||||
"PackLocalization",
|
||||
"PlayResult",
|
||||
"PlayResultBest",
|
||||
"PlayResultCalculated",
|
||||
"Property",
|
||||
"Song",
|
||||
"SongLocalization",
|
||||
]
|
||||
|
@ -1,15 +1,39 @@
|
||||
# pylint: disable=too-few-public-methods
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import MetaData
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
from sqlalchemy.orm.exc import DetachedInstanceError
|
||||
|
||||
from ._types import ForceTimezoneDateTime
|
||||
|
||||
TYPE_ANNOTATION_MAP = {
|
||||
datetime: ForceTimezoneDateTime,
|
||||
}
|
||||
|
||||
|
||||
class ModelBase(DeclarativeBase):
|
||||
type_annotation_map = TYPE_ANNOTATION_MAP
|
||||
metadata = MetaData(
|
||||
naming_convention={
|
||||
"ix": "ix_%(column_0_label)s",
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"ck": "ck_%(table_name)s_`%(constraint_name)s`",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ModelViewBase(DeclarativeBase):
|
||||
type_annotation_map = TYPE_ANNOTATION_MAP
|
||||
|
||||
|
||||
class ReprHelper:
|
||||
# pylint: disable=no-member
|
||||
|
||||
def _repr(self, **kwargs) -> str:
|
||||
"""
|
||||
Helper for __repr__
|
||||
SQLAlchemy model __repr__ helper
|
||||
|
||||
https://stackoverflow.com/a/55749579/16484891
|
||||
|
||||
@ -24,8 +48,9 @@ class ReprHelper:
|
||||
field_strings.append(f"{key}=DetachedInstanceError")
|
||||
else:
|
||||
at_least_one_attached_attribute = True
|
||||
|
||||
if at_least_one_attached_attribute:
|
||||
return f"<{self.__class__.__name__}({','.join(field_strings)})>"
|
||||
return f"<{self.__class__.__name__}({', '.join(field_strings)})>"
|
||||
return f"<{self.__class__.__name__} {id(self)}>"
|
||||
|
||||
def __repr__(self):
|
@ -1,46 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import IntEnum
|
||||
from typing import Optional, Type
|
||||
|
||||
from sqlalchemy import DateTime, Integer
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
|
||||
class DbIntEnum(TypeDecorator):
|
||||
"""sqlalchemy `TypeDecorator` for `IntEnum`s"""
|
||||
|
||||
impl = Integer
|
||||
cache_ok = True
|
||||
|
||||
def __init__(self, enum_class: Type[IntEnum]):
|
||||
super().__init__()
|
||||
self.enum_class = enum_class
|
||||
|
||||
def process_bind_param(self, value: Optional[IntEnum], dialect) -> Optional[int]:
|
||||
return None if value is None else value.value
|
||||
|
||||
def process_result_value(self, value: Optional[int], dialect) -> Optional[IntEnum]:
|
||||
return None if value is None else self.enum_class(value)
|
||||
|
||||
|
||||
class TZDateTime(TypeDecorator):
|
||||
"""
|
||||
Store Timezone Aware Timestamps as Timezone Naive UTC
|
||||
|
||||
https://docs.sqlalchemy.org/en/20/core/custom_types.html#store-timezone-aware-timestamps-as-timezone-naive-utc
|
||||
"""
|
||||
|
||||
impl = DateTime
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
if not value.tzinfo or value.tzinfo.utcoffset(value) is None:
|
||||
raise TypeError("tzinfo is required")
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
return value
|
||||
|
||||
def process_result_value(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
return value
|
28
src/arcaea_offline/database/models/_types.py
Normal file
28
src/arcaea_offline/database/models/_types.py
Normal file
@ -0,0 +1,28 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
|
||||
class ForceTimezoneDateTime(TypeDecorator):
|
||||
"""
|
||||
Store timezone aware timestamps as timezone naive UTC
|
||||
|
||||
https://docs.sqlalchemy.org/en/20/core/custom_types.html#store-timezone-aware-timestamps-as-timezone-naive-utc
|
||||
"""
|
||||
|
||||
impl = DateTime
|
||||
cache_ok = True
|
||||
|
||||
def process_bind_param(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
if not value.tzinfo or value.tzinfo.utcoffset(value) is None:
|
||||
raise TypeError("datetime tzinfo is required")
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
return value
|
||||
|
||||
def process_result_value(self, value: Optional[datetime], dialect):
|
||||
if value is not None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
return value
|
85
src/arcaea_offline/database/models/chart.py
Normal file
85
src/arcaea_offline/database/models/chart.py
Normal file
@ -0,0 +1,85 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import Mapped
|
||||
from sqlalchemy_utils import create_view
|
||||
|
||||
from ._base import ModelBase, ModelViewBase, ReprHelper
|
||||
from .chart_info import ChartInfo
|
||||
from .difficulty import Difficulty
|
||||
from .song import Song
|
||||
|
||||
|
||||
class Chart(ModelBase, ReprHelper):
|
||||
__tablename__ = "charts"
|
||||
|
||||
song_idx: Mapped[int]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
rating: Mapped[int]
|
||||
is_rating_plus: Mapped[bool]
|
||||
title: Mapped[str]
|
||||
artist: Mapped[str]
|
||||
pack_id: Mapped[str]
|
||||
bpm: Mapped[Optional[str]]
|
||||
bpm_base: Mapped[Optional[float]]
|
||||
audio_preview: Mapped[Optional[int]]
|
||||
audio_preview_end: Mapped[Optional[int]]
|
||||
side: Mapped[Optional[int]]
|
||||
version: Mapped[Optional[str]]
|
||||
added_at: Mapped[Optional[datetime]]
|
||||
bg: Mapped[Optional[str]]
|
||||
bg_inverse: Mapped[Optional[str]]
|
||||
bg_day: Mapped[Optional[str]]
|
||||
bg_night: Mapped[Optional[str]]
|
||||
source: Mapped[Optional[str]]
|
||||
source_copyright: Mapped[Optional[str]]
|
||||
chart_designer: Mapped[Optional[str]]
|
||||
jacket_desginer: Mapped[Optional[str]]
|
||||
has_overriding_audio: Mapped[bool]
|
||||
has_overriding_jacket: Mapped[bool]
|
||||
jacket_night: Mapped[Optional[str]]
|
||||
constant: Mapped[int]
|
||||
notes: Mapped[Optional[int]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
Song.idx.label("song_idx"),
|
||||
Difficulty.song_id,
|
||||
Difficulty.rating_class,
|
||||
Difficulty.rating,
|
||||
Difficulty.is_rating_plus,
|
||||
func.coalesce(Difficulty.title, Song.title).label("title"),
|
||||
func.coalesce(Difficulty.artist, Song.artist).label("artist"),
|
||||
Song.pack_id,
|
||||
func.coalesce(Difficulty.bpm, Song.bpm).label("bpm"),
|
||||
func.coalesce(Difficulty.bpm_base, Song.bpm_base).label("bpm_base"),
|
||||
Song.side,
|
||||
func.coalesce(Difficulty.version, Song.version).label("version"),
|
||||
func.coalesce(Difficulty.added_at, Song.added_at).label("added_at"),
|
||||
func.coalesce(Difficulty.bg, Song.bg).label("bg"),
|
||||
func.coalesce(Difficulty.bg_inverse, Song.bg_inverse).label("bg_inverse"),
|
||||
Song.bg_day,
|
||||
Song.bg_night,
|
||||
Song.source,
|
||||
Song.source_copyright,
|
||||
Difficulty.chart_designer,
|
||||
Difficulty.jacket_designer,
|
||||
Difficulty.has_overriding_audio,
|
||||
Difficulty.has_overriding_jacket,
|
||||
Difficulty.jacket_night,
|
||||
ChartInfo.constant,
|
||||
ChartInfo.notes,
|
||||
)
|
||||
.select_from(Difficulty)
|
||||
.join(
|
||||
ChartInfo,
|
||||
(Difficulty.song_id == ChartInfo.song_id)
|
||||
& (Difficulty.rating_class == ChartInfo.rating_class),
|
||||
)
|
||||
.join(Song, Difficulty.song_id == Song.id),
|
||||
metadata=ModelViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
33
src/arcaea_offline/database/models/chart_info.py
Normal file
33
src/arcaea_offline/database/models/chart_info.py
Normal file
@ -0,0 +1,33 @@
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKeyConstraint, Integer, Numeric, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ._base import ModelBase, ReprHelper
|
||||
from ._types import ForceTimezoneDateTime
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .difficulty import Difficulty
|
||||
|
||||
|
||||
class ChartInfo(ModelBase, ReprHelper):
|
||||
__tablename__ = "chart_info"
|
||||
__table_args__ = (
|
||||
ForeignKeyConstraint(
|
||||
["song_id", "rating_class"],
|
||||
["difficulty.song_id", "difficulty.rating_class"],
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
)
|
||||
|
||||
difficulty: Mapped["Difficulty"] = relationship(back_populates="chart_info_list")
|
||||
|
||||
song_id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
rating_class: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
constant: Mapped[Decimal] = mapped_column(Numeric, nullable=False)
|
||||
notes: Mapped[int] = mapped_column(Integer)
|
||||
added_at: Mapped[datetime] = mapped_column(ForceTimezoneDateTime, primary_key=True)
|
||||
version: Mapped[Optional[str]] = mapped_column(String)
|
12
src/arcaea_offline/database/models/config.py
Normal file
12
src/arcaea_offline/database/models/config.py
Normal file
@ -0,0 +1,12 @@
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from ._base import ModelBase, ReprHelper
|
||||
|
||||
__all__ = ["Property"]
|
||||
|
||||
|
||||
class Property(ModelBase, ReprHelper):
|
||||
__tablename__ = "property"
|
||||
|
||||
key: Mapped[str] = mapped_column(primary_key=True)
|
||||
value: Mapped[str] = mapped_column()
|
93
src/arcaea_offline/database/models/difficulty.py
Normal file
93
src/arcaea_offline/database/models/difficulty.py
Normal file
@ -0,0 +1,93 @@
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
ForeignKey,
|
||||
ForeignKeyConstraint,
|
||||
Integer,
|
||||
Numeric,
|
||||
String,
|
||||
text,
|
||||
)
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ._base import ModelBase, ReprHelper
|
||||
from ._types import ForceTimezoneDateTime
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .chart_info import ChartInfo
|
||||
from .song import Song
|
||||
|
||||
|
||||
class Difficulty(ModelBase, ReprHelper):
|
||||
__tablename__ = "difficulty"
|
||||
|
||||
song_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("song.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
song: Mapped["Song"] = relationship(back_populates="difficulties")
|
||||
localization_entries: Mapped[list["DifficultyLocalization"]] = relationship(
|
||||
back_populates="difficulty",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
chart_info_list: Mapped[list["ChartInfo"]] = relationship(
|
||||
back_populates="difficulty",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
rating_class: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
|
||||
rating: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
is_rating_plus: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
|
||||
chart_designer: Mapped[Optional[str]] = mapped_column(String)
|
||||
jacket_designer: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
has_overriding_audio: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
has_overriding_jacket: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
jacket_night: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
title: Mapped[Optional[str]] = mapped_column(String)
|
||||
artist: Mapped[Optional[str]] = mapped_column(String)
|
||||
bg: Mapped[Optional[str]] = mapped_column(String)
|
||||
bg_inverse: Mapped[Optional[str]] = mapped_column(String)
|
||||
bpm: Mapped[Optional[str]] = mapped_column(String)
|
||||
bpm_base: Mapped[Optional[Decimal]] = mapped_column(Numeric(asdecimal=True))
|
||||
added_at: Mapped[Optional[datetime]] = mapped_column(ForceTimezoneDateTime)
|
||||
version: Mapped[Optional[str]] = mapped_column(String)
|
||||
is_legacy11: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
|
||||
|
||||
class DifficultyLocalization(ModelBase, ReprHelper):
|
||||
__tablename__ = "difficulty_localization"
|
||||
__table_args__ = (
|
||||
ForeignKeyConstraint(
|
||||
["song_id", "rating_class"],
|
||||
["difficulty.song_id", "difficulty.rating_class"],
|
||||
onupdate="CASCADE",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
)
|
||||
|
||||
difficulty: Mapped["Difficulty"] = relationship(
|
||||
back_populates="localization_entries"
|
||||
)
|
||||
song_id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
rating_class: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||
|
||||
lang: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
title: Mapped[Optional[str]] = mapped_column(String)
|
||||
artist: Mapped[Optional[str]] = mapped_column(String)
|
61
src/arcaea_offline/database/models/pack.py
Normal file
61
src/arcaea_offline/database/models/pack.py
Normal file
@ -0,0 +1,61 @@
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, ForeignKey, Integer, String, Text, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ._base import ModelBase, ReprHelper
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .song import Song
|
||||
|
||||
|
||||
class Pack(ModelBase, ReprHelper):
|
||||
__tablename__ = "pack"
|
||||
|
||||
songs: Mapped[list["Song"]] = relationship(
|
||||
back_populates="pack",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
localized_entries: Mapped[list["PackLocalization"]] = relationship(
|
||||
back_populates="pack",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
name: Mapped[Optional[str]] = mapped_column(String, index=True)
|
||||
description: Mapped[Optional[str]] = mapped_column(Text)
|
||||
section: Mapped[Optional[str]] = mapped_column(String)
|
||||
is_world_extend: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
|
||||
plus_character: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
|
||||
append_parent_id: Mapped[Optional[str]] = mapped_column(
|
||||
ForeignKey("pack.id", onupdate="CASCADE", ondelete="CASCADE")
|
||||
)
|
||||
|
||||
parent: Mapped["Pack"] = relationship(
|
||||
"Pack",
|
||||
back_populates="appendages",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
remote_side=[id],
|
||||
)
|
||||
appendages: Mapped[list["Pack"]] = relationship("Pack", back_populates="parent")
|
||||
|
||||
|
||||
class PackLocalization(ModelBase, ReprHelper):
|
||||
__tablename__ = "pack_localization"
|
||||
|
||||
pack: Mapped["Pack"] = relationship(back_populates="localized_entries")
|
||||
id: Mapped[str] = mapped_column(
|
||||
ForeignKey("pack.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
|
||||
lang: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
name: Mapped[Optional[str]] = mapped_column(String)
|
||||
description: Mapped[Optional[str]] = mapped_column(Text)
|
192
src/arcaea_offline/database/models/play_result.py
Normal file
192
src/arcaea_offline/database/models/play_result.py
Normal file
@ -0,0 +1,192 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import Integer, String, Text, Uuid, case, func, inspect, select, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy_utils import create_view
|
||||
|
||||
from ._base import ModelBase, ModelViewBase, ReprHelper
|
||||
from .chart_info import ChartInfo
|
||||
from .difficulty import Difficulty
|
||||
|
||||
__all__ = [
|
||||
"CalculatedPotential",
|
||||
"PlayResult",
|
||||
"PlayResultBest",
|
||||
"PlayResultCalculated",
|
||||
]
|
||||
|
||||
|
||||
class PlayResult(ModelBase, ReprHelper):
|
||||
__tablename__ = "play_result"
|
||||
|
||||
id: Mapped[int] = mapped_column(autoincrement=True, primary_key=True)
|
||||
uuid: Mapped[UUID] = mapped_column(
|
||||
Uuid, nullable=False, unique=True, default=lambda: uuid4()
|
||||
)
|
||||
song_id: Mapped[str] = mapped_column(String)
|
||||
rating_class: Mapped[int] = mapped_column(Integer)
|
||||
played_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
default=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
pure_early: Mapped[Optional[int]]
|
||||
pure_late: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
far_early: Mapped[Optional[int]]
|
||||
far_late: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
|
||||
max_recall: Mapped[Optional[int]]
|
||||
clear_type: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]]
|
||||
comment: Mapped[Optional[str]] = mapped_column(Text)
|
||||
|
||||
|
||||
class PlayResultCalculated(ModelViewBase, ReprHelper):
|
||||
__tablename__ = "play_results_calculated"
|
||||
|
||||
id: Mapped[int]
|
||||
uuid: Mapped[UUID]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
pure_early: Mapped[Optional[int]]
|
||||
pure_late: Mapped[Optional[int]]
|
||||
shiny_pure: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
far_early: Mapped[Optional[int]]
|
||||
far_late: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
played_at: Mapped[Optional[datetime]]
|
||||
max_recall: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]]
|
||||
clear_type: Mapped[Optional[int]]
|
||||
potential: Mapped[float]
|
||||
comment: Mapped[Optional[str]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
PlayResult.id,
|
||||
Difficulty.song_id,
|
||||
Difficulty.rating_class,
|
||||
PlayResult.score,
|
||||
PlayResult.pure,
|
||||
(
|
||||
case(
|
||||
(
|
||||
(
|
||||
ChartInfo.notes.is_not(None)
|
||||
& PlayResult.pure.is_not(None)
|
||||
& PlayResult.far.is_not(None)
|
||||
& (ChartInfo.notes != 0)
|
||||
),
|
||||
PlayResult.score
|
||||
- func.floor(
|
||||
(PlayResult.pure * 10000000.0 / ChartInfo.notes)
|
||||
+ (PlayResult.far * 0.5 * 10000000.0 / ChartInfo.notes)
|
||||
),
|
||||
),
|
||||
else_=text("NULL"),
|
||||
)
|
||||
).label("shiny_pure"),
|
||||
PlayResult.far,
|
||||
PlayResult.lost,
|
||||
PlayResult.played_at,
|
||||
PlayResult.max_recall,
|
||||
PlayResult.modifier,
|
||||
PlayResult.clear_type,
|
||||
case(
|
||||
(PlayResult.score >= 10000000, ChartInfo.constant / 10.0 + 2), # noqa: PLR2004
|
||||
(
|
||||
PlayResult.score >= 9800000, # noqa: PLR2004
|
||||
ChartInfo.constant / 10.0
|
||||
+ 1
|
||||
+ (PlayResult.score - 9800000) / 200000.0,
|
||||
),
|
||||
else_=func.max(
|
||||
(ChartInfo.constant / 10.0)
|
||||
+ (PlayResult.score - 9500000) / 300000.0,
|
||||
0,
|
||||
),
|
||||
).label("potential"),
|
||||
PlayResult.comment,
|
||||
)
|
||||
.select_from(Difficulty)
|
||||
.join(
|
||||
ChartInfo,
|
||||
(Difficulty.song_id == ChartInfo.song_id)
|
||||
& (Difficulty.rating_class == ChartInfo.rating_class),
|
||||
)
|
||||
.join(
|
||||
PlayResult,
|
||||
(Difficulty.song_id == PlayResult.song_id)
|
||||
& (Difficulty.rating_class == PlayResult.rating_class),
|
||||
),
|
||||
metadata=ModelViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
||||
|
||||
|
||||
class PlayResultBest(ModelViewBase, ReprHelper):
|
||||
__tablename__ = "play_results_best"
|
||||
|
||||
id: Mapped[int]
|
||||
uuid: Mapped[UUID]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
pure_early: Mapped[Optional[int]]
|
||||
pure_late: Mapped[Optional[int]]
|
||||
shiny_pure: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
far_early: Mapped[Optional[int]]
|
||||
far_late: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
played_at: Mapped[Optional[datetime]]
|
||||
max_recall: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]]
|
||||
clear_type: Mapped[Optional[int]]
|
||||
potential: Mapped[float]
|
||||
comment: Mapped[Optional[str]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
*[
|
||||
col
|
||||
for col in inspect(PlayResultCalculated).columns
|
||||
if col.name != "potential"
|
||||
],
|
||||
func.max(PlayResultCalculated.potential).label("potential"),
|
||||
)
|
||||
.select_from(PlayResultCalculated)
|
||||
.group_by(PlayResultCalculated.song_id, PlayResultCalculated.rating_class)
|
||||
.order_by(PlayResultCalculated.potential.desc()),
|
||||
metadata=ModelViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
||||
|
||||
|
||||
class CalculatedPotential(ModelViewBase, ReprHelper):
|
||||
__tablename__ = "calculated_potential"
|
||||
|
||||
b30: Mapped[float]
|
||||
|
||||
_select_bests_subquery = (
|
||||
select(PlayResultBest.potential.label("b30_sum"))
|
||||
.order_by(PlayResultBest.potential.desc())
|
||||
.limit(30)
|
||||
.subquery()
|
||||
)
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(func.avg(_select_bests_subquery.c.b30_sum).label("b30")),
|
||||
metadata=ModelViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
85
src/arcaea_offline/database/models/song.py
Normal file
85
src/arcaea_offline/database/models/song.py
Normal file
@ -0,0 +1,85 @@
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Boolean, ForeignKey, Integer, Numeric, String, text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ._base import ModelBase, ReprHelper
|
||||
from ._types import ForceTimezoneDateTime
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .difficulty import Difficulty
|
||||
from .pack import Pack
|
||||
|
||||
|
||||
class Song(ModelBase, ReprHelper):
|
||||
__tablename__ = "song"
|
||||
|
||||
pack_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("pack.id", onupdate="CASCADE", ondelete="CASCADE")
|
||||
)
|
||||
pack: Mapped["Pack"] = relationship(back_populates="songs")
|
||||
difficulties: Mapped[list["Difficulty"]] = relationship(
|
||||
back_populates="song",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
localized_entries: Mapped[list["SongLocalization"]] = relationship(
|
||||
back_populates="song",
|
||||
cascade="all, delete",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
idx: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
title: Mapped[Optional[str]] = mapped_column(String, index=True)
|
||||
artist: Mapped[Optional[str]] = mapped_column(String, index=True)
|
||||
is_deleted: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
|
||||
added_at: Mapped[datetime] = mapped_column(
|
||||
ForceTimezoneDateTime, nullable=False, index=True
|
||||
)
|
||||
version: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
bpm: Mapped[Optional[str]] = mapped_column(String)
|
||||
bpm_base: Mapped[Optional[Decimal]] = mapped_column(Numeric(asdecimal=True))
|
||||
is_remote: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
is_unlockable_in_world: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
is_beyond_unlock_state_local: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
||||
purchase: Mapped[Optional[str]] = mapped_column(String)
|
||||
category: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
side: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
bg: Mapped[Optional[str]] = mapped_column(String)
|
||||
bg_inverse: Mapped[Optional[str]] = mapped_column(String)
|
||||
bg_day: Mapped[Optional[str]] = mapped_column(String)
|
||||
bg_night: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
source: Mapped[Optional[str]] = mapped_column(String)
|
||||
source_copyright: Mapped[Optional[str]] = mapped_column(String)
|
||||
|
||||
|
||||
class SongLocalization(ModelBase, ReprHelper):
|
||||
__tablename__ = "song_localization"
|
||||
|
||||
song: Mapped["Song"] = relationship(back_populates="localized_entries")
|
||||
id: Mapped[str] = mapped_column(
|
||||
ForeignKey("song.id", onupdate="CASCADE", ondelete="CASCADE"),
|
||||
primary_key=True,
|
||||
)
|
||||
|
||||
lang: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
title: Mapped[Optional[str]] = mapped_column(String)
|
||||
source: Mapped[Optional[str]] = mapped_column(String)
|
||||
has_jacket: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, insert_default=False, server_default=text("0")
|
||||
)
|
@ -1,21 +0,0 @@
|
||||
from .config import ConfigBase, Property
|
||||
from .scores import (
|
||||
CalculatedPotential,
|
||||
Score,
|
||||
ScoreBest,
|
||||
ScoreCalculated,
|
||||
ScoresBase,
|
||||
ScoresViewBase,
|
||||
)
|
||||
from .songs import (
|
||||
Chart,
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
DifficultyLocalized,
|
||||
Pack,
|
||||
PackLocalized,
|
||||
Song,
|
||||
SongLocalized,
|
||||
SongsBase,
|
||||
SongsViewBase,
|
||||
)
|
@ -1,22 +0,0 @@
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
from sqlalchemy import TEXT
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
from .common import ReprHelper
|
||||
|
||||
__all__ = [
|
||||
"ConfigBase",
|
||||
"Property",
|
||||
]
|
||||
|
||||
|
||||
class ConfigBase(DeclarativeBase, ReprHelper):
|
||||
pass
|
||||
|
||||
|
||||
class Property(ConfigBase):
|
||||
__tablename__ = "properties"
|
||||
|
||||
key: Mapped[str] = mapped_column(TEXT(), primary_key=True)
|
||||
value: Mapped[str] = mapped_column(TEXT())
|
@ -1,188 +0,0 @@
|
||||
# pylint: disable=too-few-public-methods, duplicate-code
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import TEXT, case, func, inspect, select, text
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
from sqlalchemy_utils import create_view
|
||||
|
||||
from .common import ReprHelper
|
||||
from .songs import ChartInfo, Difficulty
|
||||
|
||||
__all__ = [
|
||||
"ScoresBase",
|
||||
"Score",
|
||||
"ScoresViewBase",
|
||||
"ScoreCalculated",
|
||||
"ScoreBest",
|
||||
"CalculatedPotential",
|
||||
]
|
||||
|
||||
|
||||
class ScoresBase(DeclarativeBase, ReprHelper):
|
||||
pass
|
||||
|
||||
|
||||
class Score(ScoresBase):
|
||||
__tablename__ = "scores"
|
||||
|
||||
id: Mapped[int] = mapped_column(autoincrement=True, primary_key=True)
|
||||
song_id: Mapped[str] = mapped_column(TEXT())
|
||||
rating_class: Mapped[int]
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
date: Mapped[Optional[int]]
|
||||
max_recall: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]] = mapped_column(
|
||||
comment="0: NORMAL, 1: EASY, 2: HARD"
|
||||
)
|
||||
clear_type: Mapped[Optional[int]] = mapped_column(
|
||||
comment="0: TRACK LOST, 1: NORMAL CLEAR, 2: FULL RECALL, "
|
||||
"3: PURE MEMORY, 4: EASY CLEAR, 5: HARD CLEAR"
|
||||
)
|
||||
comment: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
# How to create an SQL View with SQLAlchemy?
|
||||
# https://stackoverflow.com/a/53253105/16484891
|
||||
# CC BY-SA 4.0
|
||||
|
||||
|
||||
class ScoresViewBase(DeclarativeBase, ReprHelper):
|
||||
pass
|
||||
|
||||
|
||||
class ScoreCalculated(ScoresViewBase):
|
||||
__tablename__ = "scores_calculated"
|
||||
|
||||
id: Mapped[int]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
shiny_pure: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
date: Mapped[Optional[int]]
|
||||
max_recall: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]]
|
||||
clear_type: Mapped[Optional[int]]
|
||||
potential: Mapped[float]
|
||||
comment: Mapped[Optional[str]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
Score.id,
|
||||
Difficulty.song_id,
|
||||
Difficulty.rating_class,
|
||||
Score.score,
|
||||
Score.pure,
|
||||
(
|
||||
case(
|
||||
(
|
||||
(
|
||||
ChartInfo.notes.is_not(None)
|
||||
& Score.pure.is_not(None)
|
||||
& Score.far.is_not(None)
|
||||
& (ChartInfo.notes != 0)
|
||||
),
|
||||
Score.score
|
||||
- func.floor(
|
||||
(Score.pure * 10000000.0 / ChartInfo.notes)
|
||||
+ (Score.far * 0.5 * 10000000.0 / ChartInfo.notes)
|
||||
),
|
||||
),
|
||||
else_=text("NULL"),
|
||||
)
|
||||
).label("shiny_pure"),
|
||||
Score.far,
|
||||
Score.lost,
|
||||
Score.date,
|
||||
Score.max_recall,
|
||||
Score.modifier,
|
||||
Score.clear_type,
|
||||
case(
|
||||
(Score.score >= 10000000, ChartInfo.constant / 10.0 + 2),
|
||||
(
|
||||
Score.score >= 9800000,
|
||||
ChartInfo.constant / 10.0 + 1 + (Score.score - 9800000) / 200000.0,
|
||||
),
|
||||
else_=func.max(
|
||||
(ChartInfo.constant / 10.0) + (Score.score - 9500000) / 300000.0,
|
||||
0,
|
||||
),
|
||||
).label("potential"),
|
||||
Score.comment,
|
||||
)
|
||||
.select_from(Difficulty)
|
||||
.join(
|
||||
ChartInfo,
|
||||
(Difficulty.song_id == ChartInfo.song_id)
|
||||
& (Difficulty.rating_class == ChartInfo.rating_class),
|
||||
)
|
||||
.join(
|
||||
Score,
|
||||
(Difficulty.song_id == Score.song_id)
|
||||
& (Difficulty.rating_class == Score.rating_class),
|
||||
),
|
||||
metadata=ScoresViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
||||
|
||||
|
||||
class ScoreBest(ScoresViewBase):
|
||||
__tablename__ = "scores_best"
|
||||
|
||||
id: Mapped[int]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
score: Mapped[int]
|
||||
pure: Mapped[Optional[int]]
|
||||
shiny_pure: Mapped[Optional[int]]
|
||||
far: Mapped[Optional[int]]
|
||||
lost: Mapped[Optional[int]]
|
||||
date: Mapped[Optional[int]]
|
||||
max_recall: Mapped[Optional[int]]
|
||||
modifier: Mapped[Optional[int]]
|
||||
clear_type: Mapped[Optional[int]]
|
||||
potential: Mapped[float]
|
||||
comment: Mapped[Optional[str]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
*[
|
||||
col
|
||||
for col in inspect(ScoreCalculated).columns
|
||||
if col.name != "potential"
|
||||
],
|
||||
func.max(ScoreCalculated.potential).label("potential"),
|
||||
)
|
||||
.select_from(ScoreCalculated)
|
||||
.group_by(ScoreCalculated.song_id, ScoreCalculated.rating_class)
|
||||
.order_by(ScoreCalculated.potential.desc()),
|
||||
metadata=ScoresViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
||||
|
||||
|
||||
class CalculatedPotential(ScoresViewBase):
|
||||
__tablename__ = "calculated_potential"
|
||||
|
||||
b30: Mapped[float]
|
||||
|
||||
_select_bests_subquery = (
|
||||
select(ScoreBest.potential.label("b30_sum"))
|
||||
.order_by(ScoreBest.potential.desc())
|
||||
.limit(30)
|
||||
.subquery()
|
||||
)
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(func.avg(_select_bests_subquery.c.b30_sum).label("b30")),
|
||||
metadata=ScoresViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
@ -1,241 +0,0 @@
|
||||
# pylint: disable=too-few-public-methods, duplicate-code
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import TEXT, ForeignKey, func, select
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
from sqlalchemy_utils import create_view
|
||||
|
||||
from .common import ReprHelper
|
||||
|
||||
__all__ = [
|
||||
"SongsBase",
|
||||
"Pack",
|
||||
"PackLocalized",
|
||||
"Song",
|
||||
"SongLocalized",
|
||||
"Difficulty",
|
||||
"DifficultyLocalized",
|
||||
"ChartInfo",
|
||||
"SongsViewBase",
|
||||
"Chart",
|
||||
]
|
||||
|
||||
|
||||
class SongsBase(DeclarativeBase, ReprHelper):
|
||||
pass
|
||||
|
||||
|
||||
class Pack(SongsBase):
|
||||
__tablename__ = "packs"
|
||||
|
||||
id: Mapped[str] = mapped_column(TEXT(), primary_key=True)
|
||||
name: Mapped[str] = mapped_column(TEXT())
|
||||
description: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
class PackLocalized(SongsBase):
|
||||
__tablename__ = "packs_localized"
|
||||
|
||||
id: Mapped[str] = mapped_column(ForeignKey("packs.id"), primary_key=True)
|
||||
name_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
name_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
name_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
name_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
description_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
description_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
description_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
description_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
class Song(SongsBase):
|
||||
__tablename__ = "songs"
|
||||
|
||||
idx: Mapped[int]
|
||||
id: Mapped[str] = mapped_column(TEXT(), primary_key=True)
|
||||
title: Mapped[str] = mapped_column(TEXT())
|
||||
artist: Mapped[str] = mapped_column(TEXT())
|
||||
set: Mapped[str] = mapped_column(TEXT())
|
||||
bpm: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bpm_base: Mapped[Optional[float]]
|
||||
audio_preview: Mapped[Optional[int]]
|
||||
audio_preview_end: Mapped[Optional[int]]
|
||||
side: Mapped[Optional[int]]
|
||||
version: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
date: Mapped[Optional[int]]
|
||||
bg: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bg_inverse: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bg_day: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bg_night: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
source: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
source_copyright: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
class SongLocalized(SongsBase):
|
||||
__tablename__ = "songs_localized"
|
||||
|
||||
id: Mapped[str] = mapped_column(ForeignKey("songs.id"), primary_key=True)
|
||||
title_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
search_title_ja: Mapped[Optional[str]] = mapped_column(TEXT(), comment="JSON array")
|
||||
search_title_ko: Mapped[Optional[str]] = mapped_column(TEXT(), comment="JSON array")
|
||||
search_title_zh_hans: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
search_title_zh_hant: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
search_artist_ja: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
search_artist_ko: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
search_artist_zh_hans: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
search_artist_zh_hant: Mapped[Optional[str]] = mapped_column(
|
||||
TEXT(), comment="JSON array"
|
||||
)
|
||||
source_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
source_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
source_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
source_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
class Difficulty(SongsBase):
|
||||
__tablename__ = "difficulties"
|
||||
|
||||
song_id: Mapped[str] = mapped_column(TEXT(), primary_key=True)
|
||||
rating_class: Mapped[int] = mapped_column(primary_key=True)
|
||||
rating: Mapped[int]
|
||||
rating_plus: Mapped[bool]
|
||||
chart_designer: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
jacket_desginer: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
audio_override: Mapped[bool]
|
||||
jacket_override: Mapped[bool]
|
||||
jacket_night: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
artist: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bg: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bg_inverse: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bpm: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
bpm_base: Mapped[Optional[float]]
|
||||
version: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
date: Mapped[Optional[int]]
|
||||
|
||||
|
||||
class DifficultyLocalized(SongsBase):
|
||||
__tablename__ = "difficulties_localized"
|
||||
|
||||
song_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("difficulties.song_id"), primary_key=True
|
||||
)
|
||||
rating_class: Mapped[str] = mapped_column(
|
||||
ForeignKey("difficulties.rating_class"), primary_key=True
|
||||
)
|
||||
title_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
title_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
artist_ja: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
artist_ko: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
artist_zh_hans: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
artist_zh_hant: Mapped[Optional[str]] = mapped_column(TEXT())
|
||||
|
||||
|
||||
class ChartInfo(SongsBase):
|
||||
__tablename__ = "charts_info"
|
||||
|
||||
song_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("difficulties.song_id"), primary_key=True
|
||||
)
|
||||
rating_class: Mapped[str] = mapped_column(
|
||||
ForeignKey("difficulties.rating_class"), primary_key=True
|
||||
)
|
||||
constant: Mapped[int] = mapped_column(
|
||||
comment="real_constant * 10. For example, Crimson Throne [FTR] is 10.4, then store 104."
|
||||
)
|
||||
notes: Mapped[Optional[int]]
|
||||
|
||||
|
||||
class SongsViewBase(DeclarativeBase, ReprHelper):
|
||||
pass
|
||||
|
||||
|
||||
class Chart(SongsViewBase):
|
||||
__tablename__ = "charts"
|
||||
|
||||
song_idx: Mapped[int]
|
||||
song_id: Mapped[str]
|
||||
rating_class: Mapped[int]
|
||||
rating: Mapped[int]
|
||||
rating_plus: Mapped[bool]
|
||||
title: Mapped[str]
|
||||
artist: Mapped[str]
|
||||
set: Mapped[str]
|
||||
bpm: Mapped[Optional[str]]
|
||||
bpm_base: Mapped[Optional[float]]
|
||||
audio_preview: Mapped[Optional[int]]
|
||||
audio_preview_end: Mapped[Optional[int]]
|
||||
side: Mapped[Optional[int]]
|
||||
version: Mapped[Optional[str]]
|
||||
date: Mapped[Optional[int]]
|
||||
bg: Mapped[Optional[str]]
|
||||
bg_inverse: Mapped[Optional[str]]
|
||||
bg_day: Mapped[Optional[str]]
|
||||
bg_night: Mapped[Optional[str]]
|
||||
source: Mapped[Optional[str]]
|
||||
source_copyright: Mapped[Optional[str]]
|
||||
chart_designer: Mapped[Optional[str]]
|
||||
jacket_desginer: Mapped[Optional[str]]
|
||||
audio_override: Mapped[bool]
|
||||
jacket_override: Mapped[bool]
|
||||
jacket_night: Mapped[Optional[str]]
|
||||
constant: Mapped[int]
|
||||
notes: Mapped[Optional[int]]
|
||||
|
||||
__table__ = create_view(
|
||||
name=__tablename__,
|
||||
selectable=select(
|
||||
Song.idx.label("song_idx"),
|
||||
Difficulty.song_id,
|
||||
Difficulty.rating_class,
|
||||
Difficulty.rating,
|
||||
Difficulty.rating_plus,
|
||||
func.coalesce(Difficulty.title, Song.title).label("title"),
|
||||
func.coalesce(Difficulty.artist, Song.artist).label("artist"),
|
||||
Song.set,
|
||||
func.coalesce(Difficulty.bpm, Song.bpm).label("bpm"),
|
||||
func.coalesce(Difficulty.bpm_base, Song.bpm_base).label("bpm_base"),
|
||||
Song.audio_preview,
|
||||
Song.audio_preview_end,
|
||||
Song.side,
|
||||
func.coalesce(Difficulty.version, Song.version).label("version"),
|
||||
func.coalesce(Difficulty.date, Song.date).label("date"),
|
||||
func.coalesce(Difficulty.bg, Song.bg).label("bg"),
|
||||
func.coalesce(Difficulty.bg_inverse, Song.bg_inverse).label("bg_inverse"),
|
||||
Song.bg_day,
|
||||
Song.bg_night,
|
||||
Song.source,
|
||||
Song.source_copyright,
|
||||
Difficulty.chart_designer,
|
||||
Difficulty.jacket_desginer,
|
||||
Difficulty.audio_override,
|
||||
Difficulty.jacket_override,
|
||||
Difficulty.jacket_night,
|
||||
ChartInfo.constant,
|
||||
ChartInfo.notes,
|
||||
)
|
||||
.select_from(Difficulty)
|
||||
.join(
|
||||
ChartInfo,
|
||||
(Difficulty.song_id == ChartInfo.song_id)
|
||||
& (Difficulty.rating_class == ChartInfo.rating_class),
|
||||
)
|
||||
.join(Song, Difficulty.song_id == Song.id),
|
||||
metadata=SongsViewBase.metadata,
|
||||
cascade_on_drop=False,
|
||||
)
|
@ -1 +0,0 @@
|
||||
from .api_data import AndrealImageGeneratorApiDataConverter
|
14
src/arcaea_offline/external/andreal/account.py
vendored
14
src/arcaea_offline/external/andreal/account.py
vendored
@ -1,14 +0,0 @@
|
||||
class AndrealImageGeneratorAccount:
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "Player",
|
||||
code: int = 123456789,
|
||||
rating: int = -1,
|
||||
character: int = 5,
|
||||
character_uncapped: bool = False,
|
||||
):
|
||||
self.name = name
|
||||
self.code = code
|
||||
self.rating = rating
|
||||
self.character = character
|
||||
self.character_uncapped = character_uncapped
|
98
src/arcaea_offline/external/andreal/api_data.py
vendored
98
src/arcaea_offline/external/andreal/api_data.py
vendored
@ -1,98 +0,0 @@
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models import CalculatedPotential, ScoreBest, ScoreCalculated
|
||||
from .account import AndrealImageGeneratorAccount
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataConverter:
|
||||
def __init__(
|
||||
self,
|
||||
session: Session,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
):
|
||||
self.session = session
|
||||
self.account = account
|
||||
|
||||
def account_info(self):
|
||||
return {
|
||||
"code": self.account.code,
|
||||
"name": self.account.name,
|
||||
"is_char_uncapped": self.account.character_uncapped,
|
||||
"rating": self.account.rating,
|
||||
"character": self.account.character,
|
||||
}
|
||||
|
||||
def score(self, score: Union[ScoreCalculated, ScoreBest]):
|
||||
return {
|
||||
"score": score.score,
|
||||
"health": 75,
|
||||
"rating": score.potential,
|
||||
"song_id": score.song_id,
|
||||
"modifier": score.modifier or 0,
|
||||
"difficulty": score.rating_class,
|
||||
"clear_type": score.clear_type or 1,
|
||||
"best_clear_type": score.clear_type or 1,
|
||||
"time_played": score.date * 1000 if score.date else 0,
|
||||
"near_count": score.far,
|
||||
"miss_count": score.lost,
|
||||
"perfect_count": score.pure,
|
||||
"shiny_perfect_count": score.shiny_pure,
|
||||
}
|
||||
|
||||
def user_info(self, score: Optional[ScoreCalculated] = None):
|
||||
if not score:
|
||||
score = self.session.scalar(
|
||||
select(ScoreCalculated).order_by(ScoreCalculated.date.desc()).limit(1)
|
||||
)
|
||||
if not score:
|
||||
raise ValueError("No score available.")
|
||||
|
||||
return {
|
||||
"content": {
|
||||
"account_info": self.account_info(),
|
||||
"recent_score": [self.score(score)],
|
||||
}
|
||||
}
|
||||
|
||||
def user_best(self, song_id: str, rating_class: int):
|
||||
score = self.session.scalar(
|
||||
select(ScoreBest).where(
|
||||
(ScoreBest.song_id == song_id)
|
||||
& (ScoreBest.rating_class == rating_class)
|
||||
)
|
||||
)
|
||||
if not score:
|
||||
raise ValueError("No score available.")
|
||||
|
||||
return {
|
||||
"content": {
|
||||
"account_info": self.account_info(),
|
||||
"record": self.score(score),
|
||||
}
|
||||
}
|
||||
|
||||
def user_best30(self):
|
||||
scores = list(
|
||||
self.session.scalars(
|
||||
select(ScoreBest).order_by(ScoreBest.potential.desc()).limit(40)
|
||||
)
|
||||
)
|
||||
if not scores:
|
||||
raise ValueError("No score available.")
|
||||
best30_avg = self.session.scalar(select(CalculatedPotential.b30))
|
||||
|
||||
best30_overflow = (
|
||||
[self.score(score) for score in scores[30:40]] if len(scores) > 30 else []
|
||||
)
|
||||
|
||||
return {
|
||||
"content": {
|
||||
"account_info": self.account_info(),
|
||||
"best30_avg": best30_avg,
|
||||
"best30_list": [self.score(score) for score in scores[:30]],
|
||||
"best30_overflow": best30_overflow,
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
from .online import ArcaeaOnlineParser
|
||||
from .packlist import PacklistParser
|
||||
from .songlist import SonglistDifficultiesParser, SonglistParser
|
||||
from .st3 import St3ScoreParser
|
99
src/arcaea_offline/external/arcaea/common.py
vendored
99
src/arcaea_offline/external/arcaea/common.py
vendored
@ -1,99 +0,0 @@
|
||||
import contextlib
|
||||
import json
|
||||
import math
|
||||
import time
|
||||
from os import PathLike
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
from sqlalchemy.orm import DeclarativeBase, Session
|
||||
|
||||
|
||||
def fix_timestamp(timestamp: int) -> Union[int, None]:
|
||||
"""
|
||||
Some of the `date` column in st3 are strangely truncated. For example,
|
||||
a `1670283375` may be truncated to `167028`, even `1`. Yes, a single `1`.
|
||||
|
||||
To properly handle this situation, we check the timestamp's digits.
|
||||
If `digits < 5`, we treat this timestamp as a `None`. Otherwise, we try to
|
||||
fix the timestamp.
|
||||
|
||||
:param timestamp: a POSIX timestamp
|
||||
:return: `None` if the timestamp's digits < 5, otherwise a fixed POSIX timestamp
|
||||
"""
|
||||
# find digit length from https://stackoverflow.com/a/2189827/16484891
|
||||
# CC BY-SA 2.5
|
||||
# this might give incorrect result when timestamp > 999999999999997,
|
||||
# see https://stackoverflow.com/a/28883802/16484891 (CC BY-SA 4.0).
|
||||
# but that's way too later than 9999-12-31 23:59:59, 253402271999,
|
||||
# I don't think Arcaea would still be an active updated game by then.
|
||||
# so don't mind those small issues, just use this.
|
||||
digits = int(math.log10(abs(timestamp))) + 1 if timestamp != 0 else 1
|
||||
if digits < 5:
|
||||
return None
|
||||
timestamp_str = str(timestamp)
|
||||
current_timestamp_digits = int(math.log10(int(time.time()))) + 1
|
||||
timestamp_str = timestamp_str.ljust(current_timestamp_digits, "0")
|
||||
return int(timestamp_str, 10)
|
||||
|
||||
|
||||
def to_db_value(val: Any) -> Any:
|
||||
if not val:
|
||||
return None
|
||||
return json.dumps(val, ensure_ascii=False) if isinstance(val, list) else val
|
||||
|
||||
|
||||
def is_localized(item: dict, key: str, append_localized: bool = True):
|
||||
item_key = f"{key}_localized" if append_localized else key
|
||||
subitem: Optional[dict] = item.get(item_key)
|
||||
return subitem and (
|
||||
subitem.get("ja")
|
||||
or subitem.get("ko")
|
||||
or subitem.get("zh-Hant")
|
||||
or subitem.get("zh-Hans")
|
||||
)
|
||||
|
||||
|
||||
def set_model_localized_attrs(
|
||||
model: DeclarativeBase, item: dict, model_key: str, item_key: Optional[str] = None
|
||||
):
|
||||
if item_key is None:
|
||||
item_key = f"{model_key}_localized"
|
||||
subitem: dict = item.get(item_key, {})
|
||||
if not subitem:
|
||||
return
|
||||
setattr(model, f"{model_key}_ja", to_db_value(subitem.get("ja")))
|
||||
setattr(model, f"{model_key}_ko", to_db_value(subitem.get("ko")))
|
||||
setattr(model, f"{model_key}_zh_hans", to_db_value(subitem.get("zh-Hans")))
|
||||
setattr(model, f"{model_key}_zh_hant", to_db_value(subitem.get("zh-Hant")))
|
||||
|
||||
|
||||
class ArcaeaParser:
|
||||
def __init__(self, filepath: Union[str, bytes, PathLike]):
|
||||
self.filepath = filepath
|
||||
|
||||
def read_file_text(self):
|
||||
file_handle = None
|
||||
|
||||
with contextlib.suppress(TypeError):
|
||||
# original open
|
||||
file_handle = open(self.filepath, "r", encoding="utf-8")
|
||||
|
||||
if file_handle is None:
|
||||
try:
|
||||
# or maybe a `pathlib.Path` subset
|
||||
# or an `importlib.resources.abc.Traversable` like object
|
||||
# e.g. `zipfile.Path`
|
||||
file_handle = self.filepath.open(mode="r", encoding="utf-8") # type: ignore
|
||||
except Exception as e:
|
||||
raise ValueError("Invalid `filepath`.") from e
|
||||
|
||||
with file_handle:
|
||||
return file_handle.read()
|
||||
|
||||
def parse(self) -> List[DeclarativeBase]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def write_database(self, session: Session):
|
||||
results = self.parse()
|
||||
for result in results:
|
||||
session.merge(result)
|
72
src/arcaea_offline/external/arcaea/online.py
vendored
72
src/arcaea_offline/external/arcaea/online.py
vendored
@ -1,72 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Literal, Optional, TypedDict
|
||||
|
||||
from ...models import Score
|
||||
from .common import ArcaeaParser, fix_timestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TWebApiRatingMeScoreItem(TypedDict):
|
||||
song_id: str
|
||||
difficulty: int
|
||||
modifier: int
|
||||
rating: float
|
||||
score: int
|
||||
perfect_count: int
|
||||
near_count: int
|
||||
miss_count: int
|
||||
clear_type: int
|
||||
title: Dict[Literal["ja", "en"], str]
|
||||
artist: str
|
||||
time_played: int
|
||||
bg: str
|
||||
|
||||
|
||||
class TWebApiRatingMeValue(TypedDict):
|
||||
best_rated_scores: List[TWebApiRatingMeScoreItem]
|
||||
recent_rated_scores: List[TWebApiRatingMeScoreItem]
|
||||
|
||||
|
||||
class TWebApiRatingMeResult(TypedDict):
|
||||
success: bool
|
||||
error_code: Optional[int]
|
||||
value: Optional[TWebApiRatingMeValue]
|
||||
|
||||
|
||||
class ArcaeaOnlineParser(ArcaeaParser):
|
||||
def parse(self) -> List[Score]:
|
||||
api_result_root: TWebApiRatingMeResult = json.loads(self.read_file_text())
|
||||
|
||||
api_result_value = api_result_root.get("value")
|
||||
if not api_result_value:
|
||||
error_code = api_result_root.get("error_code")
|
||||
raise ValueError(f"Cannot parse API result, error code {error_code}")
|
||||
|
||||
best30_score_items = api_result_value.get("best_rated_scores", [])
|
||||
recent_score_items = api_result_value.get("recent_rated_scores", [])
|
||||
score_items = best30_score_items + recent_score_items
|
||||
|
||||
date_text = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
results: List[Score] = []
|
||||
for score_item in score_items:
|
||||
score = Score()
|
||||
score.song_id = score_item["song_id"]
|
||||
score.rating_class = score_item["difficulty"]
|
||||
score.score = score_item["score"]
|
||||
score.pure = score_item["perfect_count"]
|
||||
score.far = score_item["near_count"]
|
||||
score.lost = score_item["miss_count"]
|
||||
score.date = fix_timestamp(int(score_item["time_played"] / 1000))
|
||||
score.modifier = score_item["modifier"]
|
||||
score.clear_type = score_item["clear_type"]
|
||||
|
||||
if score.lost == 0:
|
||||
score.max_recall = score.pure + score.far
|
||||
|
||||
score.comment = f"Parsed from web API at {date_text}"
|
||||
results.append(score)
|
||||
return results
|
29
src/arcaea_offline/external/arcaea/packlist.py
vendored
29
src/arcaea_offline/external/arcaea/packlist.py
vendored
@ -1,29 +0,0 @@
|
||||
import json
|
||||
from typing import List, Union
|
||||
|
||||
from ...models.songs import Pack, PackLocalized
|
||||
from .common import ArcaeaParser, is_localized, set_model_localized_attrs
|
||||
|
||||
|
||||
class PacklistParser(ArcaeaParser):
|
||||
def parse(self) -> List[Union[Pack, PackLocalized]]:
|
||||
packlist_json_root = json.loads(self.read_file_text())
|
||||
|
||||
packlist_json = packlist_json_root["packs"]
|
||||
results: List[Union[Pack, PackLocalized]] = [
|
||||
Pack(id="single", name="Memory Archive")
|
||||
]
|
||||
for item in packlist_json:
|
||||
pack = Pack()
|
||||
pack.id = item["id"]
|
||||
pack.name = item["name_localized"]["en"]
|
||||
pack.description = item["description_localized"]["en"] or None
|
||||
results.append(pack)
|
||||
|
||||
if is_localized(item, "name") or is_localized(item, "description"):
|
||||
pack_localized = PackLocalized(id=pack.id)
|
||||
set_model_localized_attrs(pack_localized, item, "name")
|
||||
set_model_localized_attrs(pack_localized, item, "description")
|
||||
results.append(pack_localized)
|
||||
|
||||
return results
|
101
src/arcaea_offline/external/arcaea/songlist.py
vendored
101
src/arcaea_offline/external/arcaea/songlist.py
vendored
@ -1,101 +0,0 @@
|
||||
import json
|
||||
from typing import List, Union
|
||||
|
||||
from ...models.songs import Difficulty, DifficultyLocalized, Song, SongLocalized
|
||||
from .common import ArcaeaParser, is_localized, set_model_localized_attrs, to_db_value
|
||||
|
||||
|
||||
class SonglistParser(ArcaeaParser):
|
||||
def parse(
|
||||
self,
|
||||
) -> List[Union[Song, SongLocalized, Difficulty, DifficultyLocalized]]:
|
||||
songlist_json_root = json.loads(self.read_file_text())
|
||||
|
||||
songlist_json = songlist_json_root["songs"]
|
||||
results = []
|
||||
for item in songlist_json:
|
||||
song = Song()
|
||||
song.idx = item["idx"]
|
||||
song.id = item["id"]
|
||||
song.title = item["title_localized"]["en"]
|
||||
song.artist = item["artist"]
|
||||
song.bpm = item["bpm"]
|
||||
song.bpm_base = item["bpm_base"]
|
||||
song.set = item["set"]
|
||||
song.audio_preview = item["audioPreview"]
|
||||
song.audio_preview_end = item["audioPreviewEnd"]
|
||||
song.side = item["side"]
|
||||
song.version = item["version"]
|
||||
song.date = item["date"]
|
||||
song.bg = to_db_value(item.get("bg"))
|
||||
song.bg_inverse = to_db_value(item.get("bg_inverse"))
|
||||
if item.get("bg_daynight"):
|
||||
song.bg_day = to_db_value(item["bg_daynight"].get("day"))
|
||||
song.bg_night = to_db_value(item["bg_daynight"].get("night"))
|
||||
if item.get("source_localized"):
|
||||
song.source = item["source_localized"]["en"]
|
||||
song.source_copyright = to_db_value(item.get("source_copyright"))
|
||||
results.append(song)
|
||||
|
||||
if (
|
||||
is_localized(item, "title")
|
||||
or is_localized(item, "search_title", append_localized=False)
|
||||
or is_localized(item, "search_artist", append_localized=False)
|
||||
or is_localized(item, "source")
|
||||
):
|
||||
song_localized = SongLocalized(id=song.id)
|
||||
set_model_localized_attrs(song_localized, item, "title")
|
||||
set_model_localized_attrs(
|
||||
song_localized, item, "search_title", "search_title"
|
||||
)
|
||||
set_model_localized_attrs(
|
||||
song_localized, item, "search_artist", "search_artist"
|
||||
)
|
||||
set_model_localized_attrs(song_localized, item, "source")
|
||||
results.append(song_localized)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class SonglistDifficultiesParser(ArcaeaParser):
|
||||
def parse(self) -> List[Union[Difficulty, DifficultyLocalized]]:
|
||||
songlist_json_root = json.loads(self.read_file_text())
|
||||
|
||||
songlist_json = songlist_json_root["songs"]
|
||||
results = []
|
||||
for song_item in songlist_json:
|
||||
if not song_item.get("difficulties"):
|
||||
continue
|
||||
|
||||
for item in song_item["difficulties"]:
|
||||
if item["rating"] == 0:
|
||||
continue
|
||||
|
||||
chart = Difficulty(song_id=song_item["id"])
|
||||
chart.rating_class = item["ratingClass"]
|
||||
chart.rating = item["rating"]
|
||||
chart.rating_plus = item.get("ratingPlus") or False
|
||||
chart.chart_designer = item["chartDesigner"]
|
||||
chart.jacket_desginer = item.get("jacketDesigner") or None
|
||||
chart.audio_override = item.get("audioOverride") or False
|
||||
chart.jacket_override = item.get("jacketOverride") or False
|
||||
chart.jacket_night = item.get("jacketNight") or None
|
||||
chart.title = item.get("title_localized", {}).get("en") or None
|
||||
chart.artist = item.get("artist") or None
|
||||
chart.bg = item.get("bg") or None
|
||||
chart.bg_inverse = item.get("bg_inverse")
|
||||
chart.bpm = item.get("bpm") or None
|
||||
chart.bpm_base = item.get("bpm_base") or None
|
||||
chart.version = item.get("version") or None
|
||||
chart.date = item.get("date") or None
|
||||
results.append(chart)
|
||||
|
||||
if is_localized(item, "title") or is_localized(item, "artist"):
|
||||
chart_localized = DifficultyLocalized(
|
||||
song_id=chart.song_id, rating_class=chart.rating_class
|
||||
)
|
||||
set_model_localized_attrs(chart_localized, item, "title")
|
||||
set_model_localized_attrs(chart_localized, item, "artist")
|
||||
results.append(chart_localized)
|
||||
|
||||
return results
|
73
src/arcaea_offline/external/arcaea/st3.py
vendored
73
src/arcaea_offline/external/arcaea/st3.py
vendored
@ -1,73 +0,0 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models.scores import Score
|
||||
from .common import ArcaeaParser, fix_timestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class St3ScoreParser(ArcaeaParser):
|
||||
def parse(self) -> List[Score]:
|
||||
items = []
|
||||
with sqlite3.connect(self.filepath) as st3_conn:
|
||||
cursor = st3_conn.cursor()
|
||||
db_scores = cursor.execute(
|
||||
"SELECT songId, songDifficulty, score, perfectCount, nearCount, missCount, "
|
||||
"date, modifier FROM scores"
|
||||
).fetchall()
|
||||
for (
|
||||
song_id,
|
||||
rating_class,
|
||||
score,
|
||||
pure,
|
||||
far,
|
||||
lost,
|
||||
date,
|
||||
modifier,
|
||||
) in db_scores:
|
||||
clear_type = cursor.execute(
|
||||
"SELECT clearType FROM cleartypes WHERE songId = ? AND songDifficulty = ?",
|
||||
(song_id, rating_class),
|
||||
).fetchone()[0]
|
||||
|
||||
items.append(
|
||||
Score(
|
||||
song_id=song_id,
|
||||
rating_class=rating_class,
|
||||
score=score,
|
||||
pure=pure,
|
||||
far=far,
|
||||
lost=lost,
|
||||
date=fix_timestamp(date),
|
||||
modifier=modifier,
|
||||
clear_type=clear_type,
|
||||
comment="Parsed from st3",
|
||||
)
|
||||
)
|
||||
|
||||
return items
|
||||
|
||||
def write_database(self, session: Session, *, skip_duplicate=True):
|
||||
parsed_scores = self.parse()
|
||||
for parsed_score in parsed_scores:
|
||||
query_score = session.scalar(
|
||||
select(Score).where(
|
||||
(Score.song_id == parsed_score.song_id)
|
||||
& (Score.rating_class == parsed_score.rating_class)
|
||||
& (Score.score == parsed_score.score)
|
||||
)
|
||||
)
|
||||
|
||||
if query_score and skip_duplicate:
|
||||
logger.info(
|
||||
"%r skipped because potential duplicate item %r found.",
|
||||
parsed_score,
|
||||
query_score,
|
||||
)
|
||||
continue
|
||||
session.add(parsed_score)
|
@ -1 +0,0 @@
|
||||
from .arcsong_db import ArcsongDbParser
|
@ -1,34 +0,0 @@
|
||||
import sqlite3
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models.songs import ChartInfo
|
||||
|
||||
|
||||
class ArcsongDbParser:
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
|
||||
def parse(self) -> List[ChartInfo]:
|
||||
results = []
|
||||
with sqlite3.connect(self.filepath) as conn:
|
||||
cursor = conn.cursor()
|
||||
arcsong_db_results = cursor.execute(
|
||||
"SELECT song_id, rating_class, rating, note FROM charts"
|
||||
)
|
||||
for result in arcsong_db_results:
|
||||
chart = ChartInfo(
|
||||
song_id=result[0],
|
||||
rating_class=result[1],
|
||||
constant=result[2],
|
||||
notes=result[3] or None,
|
||||
)
|
||||
results.append(chart)
|
||||
|
||||
return results
|
||||
|
||||
def write_database(self, session: Session):
|
||||
results = self.parse()
|
||||
for result in results:
|
||||
session.merge(result)
|
157
src/arcaea_offline/external/arcsong/arcsong_json.py
vendored
157
src/arcaea_offline/external/arcsong/arcsong_json.py
vendored
@ -1,157 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import List, Optional, TypedDict
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models import (
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
DifficultyLocalized,
|
||||
Pack,
|
||||
Song,
|
||||
SongLocalized,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TArcSongJsonDifficultyItem(TypedDict):
|
||||
name_en: str
|
||||
name_jp: str
|
||||
artist: str
|
||||
bpm: str
|
||||
bpm_base: float
|
||||
set: str
|
||||
set_friendly: str
|
||||
time: int
|
||||
side: int
|
||||
world_unlock: bool
|
||||
remote_download: bool
|
||||
bg: str
|
||||
date: int
|
||||
version: str
|
||||
difficulty: int
|
||||
rating: int
|
||||
note: int
|
||||
chart_designer: str
|
||||
jacket_designer: str
|
||||
jacket_override: bool
|
||||
audio_override: bool
|
||||
|
||||
|
||||
class TArcSongJsonSongItem(TypedDict):
|
||||
song_id: str
|
||||
difficulties: List[TArcSongJsonDifficultyItem]
|
||||
alias: List[str]
|
||||
|
||||
|
||||
class TArcSongJson(TypedDict):
|
||||
songs: List[TArcSongJsonSongItem]
|
||||
|
||||
|
||||
class ArcSongJsonBuilder:
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
def get_difficulty_item(
|
||||
self,
|
||||
difficulty: Difficulty,
|
||||
song: Song,
|
||||
pack: Pack,
|
||||
song_localized: Optional[SongLocalized],
|
||||
) -> TArcSongJsonDifficultyItem:
|
||||
if "_append_" in pack.id:
|
||||
base_pack = self.session.scalar(
|
||||
select(Pack).where(Pack.id == re.sub(r"_append_.*$", "", pack.id))
|
||||
)
|
||||
else:
|
||||
base_pack = None
|
||||
|
||||
difficulty_localized = self.session.scalar(
|
||||
select(DifficultyLocalized).where(
|
||||
(DifficultyLocalized.song_id == difficulty.song_id)
|
||||
& (DifficultyLocalized.rating_class == difficulty.rating_class)
|
||||
)
|
||||
)
|
||||
chart_info = self.session.scalar(
|
||||
select(ChartInfo).where(
|
||||
(ChartInfo.song_id == difficulty.song_id)
|
||||
& (ChartInfo.rating_class == difficulty.rating_class)
|
||||
)
|
||||
)
|
||||
|
||||
if difficulty_localized:
|
||||
name_jp = difficulty_localized.title_ja or ""
|
||||
elif song_localized:
|
||||
name_jp = song_localized.title_ja or ""
|
||||
else:
|
||||
name_jp = ""
|
||||
|
||||
return {
|
||||
"name_en": difficulty.title or song.title,
|
||||
"name_jp": name_jp,
|
||||
"artist": difficulty.artist or song.artist,
|
||||
"bpm": difficulty.bpm or song.bpm or "",
|
||||
"bpm_base": difficulty.bpm_base or song.bpm_base or 0.0,
|
||||
"set": song.set,
|
||||
"set_friendly": f"{base_pack.name} - {pack.name}"
|
||||
if base_pack
|
||||
else pack.name,
|
||||
"time": 0,
|
||||
"side": song.side or 0,
|
||||
"world_unlock": False,
|
||||
"remote_download": False,
|
||||
"bg": difficulty.bg or song.bg or "",
|
||||
"date": difficulty.date or song.date or 0,
|
||||
"version": difficulty.version or song.version or "",
|
||||
"difficulty": difficulty.rating * 2 + int(difficulty.rating_plus),
|
||||
"rating": chart_info.constant or 0 if chart_info else 0,
|
||||
"note": chart_info.notes or 0 if chart_info else 0,
|
||||
"chart_designer": difficulty.chart_designer or "",
|
||||
"jacket_designer": difficulty.jacket_desginer or "",
|
||||
"jacket_override": difficulty.jacket_override,
|
||||
"audio_override": difficulty.audio_override,
|
||||
}
|
||||
|
||||
def get_song_item(self, song: Song) -> TArcSongJsonSongItem:
|
||||
difficulties = self.session.scalars(
|
||||
select(Difficulty).where(Difficulty.song_id == song.id)
|
||||
)
|
||||
|
||||
pack = self.session.scalar(select(Pack).where(Pack.id == song.set))
|
||||
if not pack:
|
||||
logger.warning(
|
||||
'Cannot find pack "%s", using placeholder instead.', song.set
|
||||
)
|
||||
pack = Pack(id="unknown", name="Unknown", description="__PLACEHOLDER__")
|
||||
song_localized = self.session.scalar(
|
||||
select(SongLocalized).where(SongLocalized.id == song.id)
|
||||
)
|
||||
|
||||
return {
|
||||
"song_id": song.id,
|
||||
"difficulties": [
|
||||
self.get_difficulty_item(difficulty, song, pack, song_localized)
|
||||
for difficulty in difficulties
|
||||
],
|
||||
"alias": [],
|
||||
}
|
||||
|
||||
def generate_arcsong_json(self) -> TArcSongJson:
|
||||
songs = self.session.scalars(select(Song))
|
||||
arcsong_songs = []
|
||||
for song in songs:
|
||||
proceed = self.session.scalar(
|
||||
select(func.count(Difficulty.rating_class)).where(
|
||||
Difficulty.song_id == song.id
|
||||
)
|
||||
)
|
||||
|
||||
if not proceed:
|
||||
continue
|
||||
|
||||
arcsong_songs.append(self.get_song_item(song))
|
||||
|
||||
return {"songs": arcsong_songs}
|
@ -1 +0,0 @@
|
||||
from .parser import ChartInfoDbParser
|
@ -1,35 +0,0 @@
|
||||
import contextlib
|
||||
import sqlite3
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models.songs import ChartInfo
|
||||
|
||||
|
||||
class ChartInfoDbParser:
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
|
||||
def parse(self) -> List[ChartInfo]:
|
||||
results = []
|
||||
with sqlite3.connect(self.filepath) as conn:
|
||||
with contextlib.closing(conn.cursor()) as cursor:
|
||||
db_results = cursor.execute(
|
||||
"SELECT song_id, rating_class, constant, notes FROM charts_info"
|
||||
).fetchall()
|
||||
for result in db_results:
|
||||
chart = ChartInfo(
|
||||
song_id=result[0],
|
||||
rating_class=result[1],
|
||||
constant=result[2],
|
||||
notes=result[3] or None,
|
||||
)
|
||||
results.append(chart)
|
||||
|
||||
return results
|
||||
|
||||
def write_database(self, session: Session):
|
||||
results = self.parse()
|
||||
for result in results:
|
||||
session.merge(result)
|
3
src/arcaea_offline/external/exporters/andreal/__init__.py
vendored
Normal file
3
src/arcaea_offline/external/exporters/andreal/__init__.py
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
from .api_data import AndrealImageGeneratorApiDataExporter
|
||||
|
||||
__all__ = ["AndrealImageGeneratorApiDataExporter"]
|
172
src/arcaea_offline/external/exporters/andreal/api_data.py
vendored
Normal file
172
src/arcaea_offline/external/exporters/andreal/api_data.py
vendored
Normal file
@ -0,0 +1,172 @@
|
||||
import statistics
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import (
|
||||
PlayResultBest,
|
||||
PlayResultCalculated,
|
||||
)
|
||||
|
||||
from .definitions import (
|
||||
AndrealImageGeneratorApiDataAccountInfo,
|
||||
AndrealImageGeneratorApiDataRoot,
|
||||
AndrealImageGeneratorApiDataScoreItem,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AndrealImageGeneratorAccount:
|
||||
name: str = "Player"
|
||||
code: int = 123456789
|
||||
rating: int = -1
|
||||
character: int = 5
|
||||
character_uncapped: bool = False
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataExporter:
|
||||
@staticmethod
|
||||
def craft_account_info(
|
||||
account: AndrealImageGeneratorAccount,
|
||||
) -> AndrealImageGeneratorApiDataAccountInfo:
|
||||
return {
|
||||
"code": account.code,
|
||||
"name": account.name,
|
||||
"is_char_uncapped": account.character_uncapped,
|
||||
"rating": account.rating,
|
||||
"character": account.character,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def craft_score_item(
|
||||
play_result: Union[PlayResultCalculated, PlayResultBest],
|
||||
) -> AndrealImageGeneratorApiDataScoreItem:
|
||||
modifier = play_result.modifier.value if play_result.modifier else 0
|
||||
clear_type = play_result.clear_type.value if play_result.clear_type else 0
|
||||
|
||||
return {
|
||||
"score": play_result.score,
|
||||
"health": 75,
|
||||
"rating": play_result.potential,
|
||||
"song_id": play_result.song_id,
|
||||
"modifier": modifier,
|
||||
"difficulty": play_result.rating_class.value,
|
||||
"clear_type": clear_type,
|
||||
"best_clear_type": clear_type,
|
||||
"time_played": int(play_result.date.timestamp() * 1000)
|
||||
if play_result.date
|
||||
else 0,
|
||||
"near_count": play_result.far,
|
||||
"miss_count": play_result.lost,
|
||||
"perfect_count": play_result.pure,
|
||||
"shiny_perfect_count": play_result.shiny_pure,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def user_info(
|
||||
cls,
|
||||
play_result_calculated: PlayResultCalculated,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
) -> AndrealImageGeneratorApiDataRoot:
|
||||
return {
|
||||
"content": {
|
||||
"account_info": cls.craft_account_info(account),
|
||||
"recent_score": [cls.craft_score_item(play_result_calculated)],
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def user_best(
|
||||
cls,
|
||||
play_result_best: PlayResultBest,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
) -> AndrealImageGeneratorApiDataRoot:
|
||||
return {
|
||||
"content": {
|
||||
"account_info": cls.craft_account_info(account),
|
||||
"record": cls.craft_score_item(play_result_best),
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def user_best30(
|
||||
cls,
|
||||
play_results_best: List[PlayResultBest],
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
) -> AndrealImageGeneratorApiDataRoot:
|
||||
play_results_best_sorted = sorted(
|
||||
play_results_best, key=lambda it: it.potential, reverse=True
|
||||
)
|
||||
|
||||
best30_list = play_results_best_sorted[:30]
|
||||
best30_overflow = play_results_best_sorted[30:]
|
||||
|
||||
best30_avg = statistics.fmean([it.potential for it in best30_list])
|
||||
|
||||
return {
|
||||
"content": {
|
||||
"account_info": cls.craft_account_info(account),
|
||||
"best30_avg": best30_avg,
|
||||
"best30_list": [cls.craft_score_item(it) for it in best30_list],
|
||||
"best30_overflow": [cls.craft_score_item(it) for it in best30_overflow],
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def craft_user_info(
|
||||
cls,
|
||||
session: Session,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
) -> Optional[AndrealImageGeneratorApiDataRoot]:
|
||||
play_result_calculated = session.scalar(
|
||||
select(PlayResultCalculated)
|
||||
.order_by(PlayResultCalculated.date.desc())
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if play_result_calculated is None:
|
||||
return None
|
||||
|
||||
return cls.user_info(play_result_calculated, account)
|
||||
|
||||
@classmethod
|
||||
def craft_user_best(
|
||||
cls,
|
||||
session: Session,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
*,
|
||||
song_id: str,
|
||||
rating_class: ArcaeaRatingClass,
|
||||
):
|
||||
play_result_best = session.scalar(
|
||||
select(PlayResultBest).where(
|
||||
(PlayResultBest.song_id == song_id)
|
||||
& (PlayResultBest.rating_class == rating_class)
|
||||
)
|
||||
)
|
||||
|
||||
if play_result_best is None:
|
||||
return None
|
||||
|
||||
return cls.user_best(play_result_best, account)
|
||||
|
||||
@classmethod
|
||||
def craft(
|
||||
cls,
|
||||
session: Session,
|
||||
account: AndrealImageGeneratorAccount = AndrealImageGeneratorAccount(),
|
||||
*,
|
||||
limit: int = 40,
|
||||
) -> Optional[AndrealImageGeneratorApiDataRoot]:
|
||||
play_results_best = list(
|
||||
session.scalars(
|
||||
select(PlayResultBest)
|
||||
.order_by(PlayResultBest.potential.desc())
|
||||
.limit(limit)
|
||||
).all()
|
||||
)
|
||||
|
||||
return cls.user_best30(play_results_best, account)
|
38
src/arcaea_offline/external/exporters/andreal/definitions.py
vendored
Normal file
38
src/arcaea_offline/external/exporters/andreal/definitions.py
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
from typing import List, Optional, TypedDict
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataAccountInfo(TypedDict):
|
||||
name: str
|
||||
code: int
|
||||
rating: int
|
||||
character: int
|
||||
is_char_uncapped: bool
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataScoreItem(TypedDict):
|
||||
score: int
|
||||
health: int
|
||||
rating: float
|
||||
song_id: str
|
||||
modifier: int
|
||||
difficulty: int
|
||||
clear_type: int
|
||||
best_clear_type: int
|
||||
time_played: int
|
||||
near_count: Optional[int]
|
||||
miss_count: Optional[int]
|
||||
perfect_count: Optional[int]
|
||||
shiny_perfect_count: Optional[int]
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataContent(TypedDict, total=False):
|
||||
account_info: AndrealImageGeneratorApiDataAccountInfo
|
||||
recent_score: List[AndrealImageGeneratorApiDataScoreItem]
|
||||
record: AndrealImageGeneratorApiDataScoreItem
|
||||
best30_avg: float
|
||||
best30_list: List[AndrealImageGeneratorApiDataScoreItem]
|
||||
best30_overflow: List[AndrealImageGeneratorApiDataScoreItem]
|
||||
|
||||
|
||||
class AndrealImageGeneratorApiDataRoot(TypedDict):
|
||||
content: AndrealImageGeneratorApiDataContent
|
3
src/arcaea_offline/external/exporters/arcsong/__init__.py
vendored
Normal file
3
src/arcaea_offline/external/exporters/arcsong/__init__.py
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
from .json import ArcsongJsonExporter
|
||||
|
||||
__all__ = ["ArcsongJsonExporter"]
|
35
src/arcaea_offline/external/exporters/arcsong/definitions.py
vendored
Normal file
35
src/arcaea_offline/external/exporters/arcsong/definitions.py
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
from typing import List, TypedDict
|
||||
|
||||
|
||||
class ArcsongJsonDifficultyItem(TypedDict):
|
||||
name_en: str
|
||||
name_jp: str
|
||||
artist: str
|
||||
bpm: str
|
||||
bpm_base: float
|
||||
set: str
|
||||
set_friendly: str
|
||||
time: int
|
||||
side: int
|
||||
world_unlock: bool
|
||||
remote_download: bool
|
||||
bg: str
|
||||
date: int
|
||||
version: str
|
||||
difficulty: int
|
||||
rating: int
|
||||
note: int
|
||||
chart_designer: str
|
||||
jacket_designer: str
|
||||
jacket_override: bool
|
||||
audio_override: bool
|
||||
|
||||
|
||||
class ArcsongJsonSongItem(TypedDict):
|
||||
song_id: str
|
||||
difficulties: List[ArcsongJsonDifficultyItem]
|
||||
alias: List[str]
|
||||
|
||||
|
||||
class ArcsongJsonRoot(TypedDict):
|
||||
songs: List[ArcsongJsonSongItem]
|
105
src/arcaea_offline/external/exporters/arcsong/json.py
vendored
Normal file
105
src/arcaea_offline/external/exporters/arcsong/json.py
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaLanguage
|
||||
from arcaea_offline.database.models import Difficulty, Pack, Song
|
||||
|
||||
from .definitions import ArcsongJsonDifficultyItem, ArcsongJsonRoot, ArcsongJsonSongItem
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArcsongJsonExporter:
|
||||
@staticmethod
|
||||
def craft_difficulty_item(
|
||||
difficulty: Difficulty, *, base_pack: Optional[Pack]
|
||||
) -> ArcsongJsonDifficultyItem:
|
||||
song = difficulty.song
|
||||
pack = song.pack
|
||||
chart_info = difficulty.chart_info
|
||||
|
||||
song_localized_ja = next(
|
||||
(lo for lo in song.localized_objects if lo.lang == ArcaeaLanguage.JA),
|
||||
None,
|
||||
)
|
||||
difficulty_localized_ja = next(
|
||||
(lo for lo in difficulty.localized_objects if lo.lang == ArcaeaLanguage.JA),
|
||||
None,
|
||||
)
|
||||
|
||||
if difficulty_localized_ja:
|
||||
name_jp = difficulty_localized_ja.title or ""
|
||||
elif song_localized_ja:
|
||||
name_jp = song_localized_ja.title or ""
|
||||
else:
|
||||
name_jp = ""
|
||||
|
||||
if difficulty.date is not None:
|
||||
date = int(difficulty.date.timestamp())
|
||||
elif song.date is not None:
|
||||
date = int(song.date.timestamp())
|
||||
else:
|
||||
date = 0
|
||||
|
||||
return {
|
||||
"name_en": difficulty.title or song.title,
|
||||
"name_jp": name_jp,
|
||||
"artist": difficulty.artist or song.artist,
|
||||
"bpm": difficulty.bpm or song.bpm or "",
|
||||
"bpm_base": difficulty.bpm_base or song.bpm_base or 0.0,
|
||||
"set": song.pack_id,
|
||||
"set_friendly": f"{base_pack.name} - {pack.name}"
|
||||
if base_pack
|
||||
else pack.name,
|
||||
"time": 0,
|
||||
"side": song.side or 0,
|
||||
"world_unlock": False,
|
||||
"remote_download": False,
|
||||
"bg": difficulty.bg or song.bg or "",
|
||||
"date": date,
|
||||
"version": difficulty.version or song.version or "",
|
||||
"difficulty": difficulty.rating * 2 + int(difficulty.rating_plus),
|
||||
"rating": chart_info.constant or 0 if chart_info else 0,
|
||||
"note": chart_info.notes or 0 if chart_info else 0,
|
||||
"chart_designer": difficulty.chart_designer or "",
|
||||
"jacket_designer": difficulty.jacket_desginer or "",
|
||||
"jacket_override": difficulty.jacket_override,
|
||||
"audio_override": difficulty.audio_override,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def craft(cls, session: Session) -> ArcsongJsonRoot:
|
||||
songs = session.scalars(select(Song))
|
||||
|
||||
arcsong_songs: List[ArcsongJsonSongItem] = []
|
||||
for song in songs:
|
||||
if len(song.difficulties) == 0:
|
||||
continue
|
||||
|
||||
pack = song.pack
|
||||
if "_append_" in pack.id:
|
||||
base_pack = session.scalar(
|
||||
select(Pack).where(Pack.id == re.sub(r"_append_.*$", "", pack.id))
|
||||
)
|
||||
else:
|
||||
base_pack = None
|
||||
|
||||
arcsong_difficulties = []
|
||||
for difficulty in song.difficulties:
|
||||
arcsong_difficulties.append(
|
||||
cls.craft_difficulty_item(difficulty, base_pack=base_pack)
|
||||
)
|
||||
|
||||
arcsong_songs.append(
|
||||
{
|
||||
"song_id": song.id,
|
||||
"difficulties": arcsong_difficulties,
|
||||
"alias": [],
|
||||
}
|
||||
)
|
||||
|
||||
return {"songs": arcsong_songs}
|
0
src/arcaea_offline/external/exporters/defv2/__init__.py
vendored
Normal file
0
src/arcaea_offline/external/exporters/defv2/__init__.py
vendored
Normal file
@ -1,22 +1,7 @@
|
||||
from typing import List, Literal, Optional, TypedDict
|
||||
|
||||
|
||||
class ScoreExport(TypedDict):
|
||||
id: int
|
||||
song_id: str
|
||||
rating_class: int
|
||||
score: int
|
||||
pure: Optional[int]
|
||||
far: Optional[int]
|
||||
lost: Optional[int]
|
||||
date: Optional[int]
|
||||
max_recall: Optional[int]
|
||||
modifier: Optional[int]
|
||||
clear_type: Optional[int]
|
||||
comment: Optional[str]
|
||||
|
||||
|
||||
class ArcaeaOfflineDEFV2_ScoreItem(TypedDict, total=False):
|
||||
class ArcaeaOfflineDEFv2PlayResultItem(TypedDict, total=False):
|
||||
id: Optional[int]
|
||||
songId: str
|
||||
ratingClass: int
|
||||
@ -32,14 +17,14 @@ class ArcaeaOfflineDEFV2_ScoreItem(TypedDict, total=False):
|
||||
comment: Optional[str]
|
||||
|
||||
|
||||
ArcaeaOfflineDEFV2_Score = TypedDict(
|
||||
"ArcaeaOfflineDEFV2_Score",
|
||||
ArcaeaOfflineDEFv2PlayResultRoot = TypedDict(
|
||||
"ArcaeaOfflineDEFv2PlayResultRoot",
|
||||
{
|
||||
"$schema": Literal[
|
||||
"https://arcaeaoffline.sevive.xyz/schemas/def/v2/score.schema.json"
|
||||
],
|
||||
"type": Literal["score"],
|
||||
"version": Literal[2],
|
||||
"scores": List[ArcaeaOfflineDEFV2_ScoreItem],
|
||||
"scores": List[ArcaeaOfflineDEFv2PlayResultItem],
|
||||
},
|
||||
)
|
42
src/arcaea_offline/external/exporters/defv2/play_result.py
vendored
Normal file
42
src/arcaea_offline/external/exporters/defv2/play_result.py
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
from typing import List
|
||||
|
||||
from arcaea_offline.database.models import PlayResult
|
||||
|
||||
from .definitions import (
|
||||
ArcaeaOfflineDEFv2PlayResultItem,
|
||||
ArcaeaOfflineDEFv2PlayResultRoot,
|
||||
)
|
||||
|
||||
|
||||
class ArcaeaOfflineDEFv2PlayResultExporter:
|
||||
def export(self, items: List[PlayResult]) -> ArcaeaOfflineDEFv2PlayResultRoot:
|
||||
export_items = []
|
||||
for item in items:
|
||||
export_item: ArcaeaOfflineDEFv2PlayResultItem = {
|
||||
"id": item.id,
|
||||
"songId": item.song_id,
|
||||
"ratingClass": item.rating_class.value,
|
||||
"score": item.score,
|
||||
"pure": item.pure,
|
||||
"far": item.far,
|
||||
"lost": item.lost,
|
||||
"date": int(item.date.timestamp() * 1000) if item.date else 0,
|
||||
"maxRecall": item.max_recall,
|
||||
"modifier": (
|
||||
item.modifier.value if item.modifier is not None else None
|
||||
),
|
||||
"clearType": (
|
||||
item.clear_type.value if item.clear_type is not None else None
|
||||
),
|
||||
"source": "https://arcaeaoffline.sevive.xyz/python",
|
||||
"comment": item.comment,
|
||||
}
|
||||
|
||||
export_items.append(export_item)
|
||||
|
||||
return {
|
||||
"$schema": "https://arcaeaoffline.sevive.xyz/schemas/def/v2/score.schema.json",
|
||||
"type": "score",
|
||||
"version": 2,
|
||||
"scores": export_items,
|
||||
}
|
75
src/arcaea_offline/external/exporters/smartrte.py
vendored
Normal file
75
src/arcaea_offline/external/exporters/smartrte.py
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import (
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
PlayResultBest,
|
||||
Song,
|
||||
)
|
||||
from arcaea_offline.utils.formatters.rating_class import RatingClassFormatter
|
||||
|
||||
|
||||
class SmartRteBest30CsvExporter:
|
||||
CSV_ROWS = [
|
||||
"SongName",
|
||||
"SongId",
|
||||
"Difficulty",
|
||||
"Score",
|
||||
"Perfect",
|
||||
"Perfect+",
|
||||
"Far",
|
||||
"Lost",
|
||||
"Constant",
|
||||
"PlayRating",
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def rows(cls, session: Session) -> List:
|
||||
results: List[
|
||||
Tuple[str, str, ArcaeaRatingClass, int, int, int, int, int, float]
|
||||
] = (
|
||||
session.query(
|
||||
func.coalesce(Difficulty.title, Song.title),
|
||||
PlayResultBest.song_id,
|
||||
PlayResultBest.rating_class,
|
||||
PlayResultBest.score,
|
||||
PlayResultBest.pure,
|
||||
PlayResultBest.shiny_pure,
|
||||
PlayResultBest.far,
|
||||
PlayResultBest.lost,
|
||||
ChartInfo.constant,
|
||||
PlayResultBest.potential,
|
||||
)
|
||||
.join(
|
||||
ChartInfo,
|
||||
(ChartInfo.song_id == PlayResultBest.song_id)
|
||||
& (ChartInfo.rating_class == PlayResultBest.rating_class),
|
||||
)
|
||||
.join(Song, (Song.id == PlayResultBest.song_id))
|
||||
.join(
|
||||
Difficulty,
|
||||
(Difficulty.song_id == PlayResultBest.song_id)
|
||||
& (Difficulty.rating_class == PlayResultBest.rating_class),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
csv_rows = []
|
||||
csv_rows.append(cls.CSV_ROWS.copy())
|
||||
for _result in results:
|
||||
result = list(_result)
|
||||
|
||||
# replace the comma in song title because the target project
|
||||
# cannot handle quoted string
|
||||
result[0] = result[0].replace(",", "") # type: ignore
|
||||
result[2] = RatingClassFormatter.name(result[2]) # type: ignore
|
||||
result[-2] = result[-2] / 10 # type: ignore
|
||||
result[-1] = round(result[-1], 5) # type: ignore
|
||||
|
||||
csv_rows.append(result)
|
||||
|
||||
return csv_rows
|
@ -1,2 +0,0 @@
|
||||
from . import exporters
|
||||
from .types import ArcaeaOfflineDEFV2_Score, ScoreExport
|
37
src/arcaea_offline/external/exports/exporters.py
vendored
37
src/arcaea_offline/external/exports/exporters.py
vendored
@ -1,37 +0,0 @@
|
||||
from ...models import Score
|
||||
from .types import ArcaeaOfflineDEFV2_ScoreItem, ScoreExport
|
||||
|
||||
|
||||
def score(score: Score) -> ScoreExport:
|
||||
return {
|
||||
"id": score.id,
|
||||
"song_id": score.song_id,
|
||||
"rating_class": score.rating_class,
|
||||
"score": score.score,
|
||||
"pure": score.pure,
|
||||
"far": score.far,
|
||||
"lost": score.lost,
|
||||
"date": score.date,
|
||||
"max_recall": score.max_recall,
|
||||
"modifier": score.modifier,
|
||||
"clear_type": score.clear_type,
|
||||
"comment": score.comment,
|
||||
}
|
||||
|
||||
|
||||
def score_def_v2(score: Score) -> ArcaeaOfflineDEFV2_ScoreItem:
|
||||
return {
|
||||
"id": score.id,
|
||||
"songId": score.song_id,
|
||||
"ratingClass": score.rating_class,
|
||||
"score": score.score,
|
||||
"pure": score.pure,
|
||||
"far": score.far,
|
||||
"lost": score.lost,
|
||||
"date": score.date,
|
||||
"maxRecall": score.max_recall,
|
||||
"modifier": score.modifier,
|
||||
"clearType": score.clear_type,
|
||||
"source": None,
|
||||
"comment": score.comment,
|
||||
}
|
10
src/arcaea_offline/external/importers/arcaea/__init__.py
vendored
Normal file
10
src/arcaea_offline/external/importers/arcaea/__init__.py
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
from .lists import ArcaeaPacklistParser, ArcaeaSonglistParser
|
||||
from .online import ArcaeaOnlineApiParser
|
||||
from .st3 import ArcaeaSt3Parser
|
||||
|
||||
__all__ = [
|
||||
"ArcaeaPacklistParser",
|
||||
"ArcaeaSonglistParser",
|
||||
"ArcaeaOnlineApiParser",
|
||||
"ArcaeaSt3Parser",
|
||||
]
|
31
src/arcaea_offline/external/importers/arcaea/common.py
vendored
Normal file
31
src/arcaea_offline/external/importers/arcaea/common.py
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
from typing import Union
|
||||
|
||||
|
||||
def fix_timestamp(timestamp: int) -> Union[int, None]:
|
||||
"""
|
||||
Some of the `date` column in st3 are unexpectedly truncated. For example,
|
||||
a `1670283375` may be truncated to `167028`, even a single `1`.
|
||||
|
||||
To properly handle this:
|
||||
|
||||
If `timestamp > 1489017600` (the release date of Arcaea), consider it's ok.
|
||||
|
||||
Otherwise, if the timestamp is 'fixable'
|
||||
(`1489 <= timestamp <= 9999` or `timestamp > 14889`),
|
||||
pad zeros to the end of timestamp.
|
||||
For example, a `1566` will be padded to `1566000000`.
|
||||
|
||||
Otherwise, treat the timestamp as `None`.
|
||||
|
||||
:param timestamp: `date` value
|
||||
"""
|
||||
if timestamp > 1489017600: # noqa: PLR2004
|
||||
return timestamp
|
||||
|
||||
timestamp_fixable = 1489 <= timestamp <= 9999 or timestamp > 14889 # noqa: PLR2004
|
||||
if not timestamp_fixable:
|
||||
return None
|
||||
|
||||
timestamp_str = str(timestamp)
|
||||
timestamp_str = timestamp_str.ljust(10, "0")
|
||||
return int(timestamp_str, 10)
|
180
src/arcaea_offline/external/importers/arcaea/lists.py
vendored
Normal file
180
src/arcaea_offline/external/importers/arcaea/lists.py
vendored
Normal file
@ -0,0 +1,180 @@
|
||||
"""
|
||||
packlist and songlist parsers
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Union
|
||||
|
||||
from arcaea_offline.constants.enums import (
|
||||
ArcaeaLanguage,
|
||||
ArcaeaRatingClass,
|
||||
ArcaeaSongSide,
|
||||
)
|
||||
from arcaea_offline.database.models import (
|
||||
Difficulty,
|
||||
DifficultyLocalization,
|
||||
Pack,
|
||||
PackLocalization,
|
||||
Song,
|
||||
SongLocalization,
|
||||
)
|
||||
|
||||
|
||||
class ArcaeaListParser:
|
||||
def __init__(self, list_text: str):
|
||||
self.list_text = list_text
|
||||
|
||||
|
||||
class ArcaeaPacklistParser(ArcaeaListParser):
|
||||
def parse(self) -> List[Union[Pack, PackLocalization]]:
|
||||
root = json.loads(self.list_text)
|
||||
|
||||
packs = root["packs"]
|
||||
results: List[Union[Pack, PackLocalization]] = [
|
||||
Pack(id="single", name="Memory Archive")
|
||||
]
|
||||
for item in packs:
|
||||
pack = Pack()
|
||||
pack.id = item["id"]
|
||||
pack.name = item["name_localized"]["en"]
|
||||
pack.description = item["description_localized"]["en"] or None
|
||||
results.append(pack)
|
||||
|
||||
for key in ArcaeaLanguage:
|
||||
name_localized = item["name_localized"].get(key.value, None)
|
||||
description_localized = item["description_localized"].get(
|
||||
key.value, None
|
||||
)
|
||||
|
||||
if name_localized or description_localized:
|
||||
pack_localized = PackLocalization(id=pack.id)
|
||||
pack_localized.lang = key.value
|
||||
pack_localized.name = name_localized
|
||||
pack_localized.description = description_localized
|
||||
results.append(pack_localized)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class ArcaeaSonglistParser(ArcaeaListParser):
|
||||
def parse_songs(self) -> List[Union[Song, SongLocalization]]:
|
||||
root = json.loads(self.list_text)
|
||||
|
||||
songs = root["songs"]
|
||||
results = []
|
||||
for item in songs:
|
||||
song = Song()
|
||||
song.idx = item["idx"]
|
||||
song.id = item["id"]
|
||||
song.title = item["title_localized"]["en"]
|
||||
song.artist = item["artist"]
|
||||
song.bpm = item["bpm"]
|
||||
song.bpm_base = item["bpm_base"]
|
||||
song.pack_id = item["set"]
|
||||
song.side = ArcaeaSongSide(item["side"])
|
||||
song.version = item["version"]
|
||||
song.added_at = datetime.fromtimestamp(item["date"], tz=timezone.utc)
|
||||
song.bg = item.get("bg")
|
||||
song.bg_inverse = item.get("bg_inverse")
|
||||
if item.get("bg_daynight"):
|
||||
song.bg_day = item["bg_daynight"].get("day")
|
||||
song.bg_night = item["bg_daynight"].get("night")
|
||||
if item.get("source_localized"):
|
||||
song.source = item["source_localized"]["en"]
|
||||
song.source_copyright = item.get("source_copyright")
|
||||
results.append(song)
|
||||
|
||||
for lang in ArcaeaLanguage:
|
||||
# SongLocalized objects
|
||||
title_localized = item["title_localized"].get(lang.value, None)
|
||||
source_localized = item.get("source_localized", {}).get(
|
||||
lang.value, None
|
||||
)
|
||||
|
||||
if title_localized or source_localized:
|
||||
song_localized = SongLocalization(id=song.id)
|
||||
song_localized.lang = lang.value
|
||||
song_localized.title = title_localized
|
||||
song_localized.source = source_localized
|
||||
results.append(song_localized)
|
||||
|
||||
# TODO: SongSearchTitle?
|
||||
# search_titles = item.get("search_title", {}).get(lang.value, None)
|
||||
# if search_titles:
|
||||
# for search_title in search_titles:
|
||||
# song_search_word = SongSearchWord(
|
||||
# id=song.id, lang=lang.value, type=1, value=search_title
|
||||
# )
|
||||
# results.append(song_search_word)
|
||||
|
||||
# search_artists = item.get("search_artist", {}).get(lang.value, None)
|
||||
# if search_artists:
|
||||
# for search_artist in search_artists:
|
||||
# song_search_word = SongSearchWord(
|
||||
# id=song.id, lang=lang.value, type=2, value=search_artist
|
||||
# )
|
||||
# results.append(song_search_word)
|
||||
|
||||
return results
|
||||
|
||||
def parse_difficulties(self) -> List[Union[Difficulty, DifficultyLocalization]]:
|
||||
root = json.loads(self.list_text)
|
||||
|
||||
songs = root["songs"]
|
||||
results = []
|
||||
for song in songs:
|
||||
difficulties = song.get("difficulties")
|
||||
if not difficulties:
|
||||
continue
|
||||
|
||||
for item in difficulties:
|
||||
if item["rating"] == 0:
|
||||
continue
|
||||
|
||||
difficulty = Difficulty()
|
||||
difficulty.song_id = song["id"]
|
||||
difficulty.rating_class = ArcaeaRatingClass(item["ratingClass"])
|
||||
difficulty.rating = item["rating"]
|
||||
difficulty.is_rating_plus = item.get("ratingPlus") or False
|
||||
difficulty.chart_designer = item["chartDesigner"]
|
||||
difficulty.jacket_designer = item.get("jacketDesigner") or None
|
||||
difficulty.has_overriding_audio = item.get("audioOverride") or False
|
||||
difficulty.has_overriding_jacket = item.get("jacketOverride") or False
|
||||
difficulty.jacket_night = item.get("jacketNight") or None
|
||||
difficulty.title = item.get("title_localized", {}).get("en") or None
|
||||
difficulty.artist = item.get("artist") or None
|
||||
difficulty.bg = item.get("bg") or None
|
||||
difficulty.bg_inverse = item.get("bg_inverse")
|
||||
difficulty.bpm = item.get("bpm") or None
|
||||
difficulty.bpm_base = item.get("bpm_base") or None
|
||||
difficulty.version = item.get("version") or None
|
||||
difficulty.added_at = (
|
||||
datetime.fromtimestamp(item["date"], tz=timezone.utc)
|
||||
if item.get("date") is not None
|
||||
else None
|
||||
)
|
||||
results.append(difficulty)
|
||||
|
||||
for lang in ArcaeaLanguage:
|
||||
title_localized = item.get("title_localized", {}).get(
|
||||
lang.value, None
|
||||
)
|
||||
artist_localized = item.get("artist_localized", {}).get(
|
||||
lang.value, None
|
||||
)
|
||||
|
||||
if title_localized or artist_localized:
|
||||
difficulty_localized = DifficultyLocalization(
|
||||
song_id=difficulty.song_id,
|
||||
rating_class=difficulty.rating_class,
|
||||
)
|
||||
difficulty_localized.lang = lang.value
|
||||
difficulty_localized.title = title_localized
|
||||
difficulty_localized.artist = artist_localized
|
||||
results.append(difficulty_localized)
|
||||
|
||||
return results
|
||||
|
||||
def parse_all(self):
|
||||
return self.parse_songs() + self.parse_difficulties()
|
97
src/arcaea_offline/external/importers/arcaea/online.py
vendored
Normal file
97
src/arcaea_offline/external/importers/arcaea/online.py
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Literal, Optional, TypedDict
|
||||
|
||||
from arcaea_offline.constants.enums import (
|
||||
ArcaeaPlayResultClearType,
|
||||
ArcaeaPlayResultModifier,
|
||||
ArcaeaRatingClass,
|
||||
)
|
||||
from arcaea_offline.database.models import PlayResult
|
||||
|
||||
from .common import fix_timestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _RatingMePlayResultItem(TypedDict):
|
||||
song_id: str
|
||||
difficulty: int
|
||||
modifier: int
|
||||
rating: float
|
||||
score: int
|
||||
perfect_count: int
|
||||
near_count: int
|
||||
miss_count: int
|
||||
clear_type: int
|
||||
title: Dict[Literal["ja", "en"], str]
|
||||
artist: str
|
||||
time_played: int
|
||||
bg: str
|
||||
|
||||
|
||||
class _RatingMeValue(TypedDict):
|
||||
best_rated_scores: List[_RatingMePlayResultItem]
|
||||
recent_rated_scores: List[_RatingMePlayResultItem]
|
||||
|
||||
|
||||
class _RatingMeResponse(TypedDict):
|
||||
success: bool
|
||||
error_code: Optional[int]
|
||||
value: Optional[_RatingMeValue]
|
||||
|
||||
|
||||
class ArcaeaOnlineApiParser:
|
||||
def __init__(self, api_result_text: str):
|
||||
self.api_result_text = api_result_text
|
||||
self.api_result: _RatingMeResponse = json.loads(api_result_text)
|
||||
|
||||
def parse(self) -> List[PlayResult]:
|
||||
api_result_value = self.api_result.get("value")
|
||||
if not api_result_value:
|
||||
error_code = self.api_result.get("error_code")
|
||||
raise ValueError(
|
||||
f"Cannot parse Arcaea Online API result, error code {error_code}"
|
||||
)
|
||||
|
||||
best30_items = api_result_value.get("best_rated_scores", [])
|
||||
recent_items = api_result_value.get("recent_rated_scores", [])
|
||||
items = best30_items + recent_items
|
||||
|
||||
date_text = (
|
||||
datetime.now(tz=timezone.utc).astimezone().isoformat(timespec="seconds")
|
||||
)
|
||||
|
||||
results: List[PlayResult] = []
|
||||
results_time_played = []
|
||||
for item in items:
|
||||
date_millis = fix_timestamp(item["time_played"])
|
||||
|
||||
if date_millis in results_time_played:
|
||||
# filter out duplicate play results
|
||||
continue
|
||||
|
||||
if date_millis:
|
||||
date = datetime.fromtimestamp(date_millis / 1000).astimezone()
|
||||
results_time_played.append(date_millis)
|
||||
else:
|
||||
date = None
|
||||
|
||||
play_result = PlayResult()
|
||||
play_result.song_id = item["song_id"]
|
||||
play_result.rating_class = ArcaeaRatingClass(item["difficulty"])
|
||||
play_result.score = item["score"]
|
||||
play_result.pure = item["perfect_count"]
|
||||
play_result.far = item["near_count"]
|
||||
play_result.lost = item["miss_count"]
|
||||
play_result.played_at = date
|
||||
play_result.modifier = ArcaeaPlayResultModifier(item["modifier"])
|
||||
play_result.clear_type = ArcaeaPlayResultClearType(item["clear_type"])
|
||||
|
||||
if play_result.lost == 0:
|
||||
play_result.max_recall = play_result.pure + play_result.far
|
||||
|
||||
play_result.comment = f"Parsed from web API at {date_text}"
|
||||
results.append(play_result)
|
||||
return results
|
117
src/arcaea_offline/external/importers/arcaea/st3.py
vendored
Normal file
117
src/arcaea_offline/external/importers/arcaea/st3.py
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
"""
|
||||
Game database play results importer
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, overload
|
||||
|
||||
from arcaea_offline.constants.enums import (
|
||||
ArcaeaPlayResultClearType,
|
||||
ArcaeaPlayResultModifier,
|
||||
ArcaeaRatingClass,
|
||||
)
|
||||
from arcaea_offline.database.models import PlayResult
|
||||
|
||||
from .common import fix_timestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArcaeaSt3Parser:
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, db: sqlite3.Connection) -> List[PlayResult]: ...
|
||||
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, db: sqlite3.Cursor) -> List[PlayResult]: ...
|
||||
|
||||
@classmethod
|
||||
def parse(cls, db) -> List[PlayResult]:
|
||||
if isinstance(db, sqlite3.Connection):
|
||||
return cls.parse(db.cursor())
|
||||
|
||||
if not isinstance(db, sqlite3.Cursor):
|
||||
raise TypeError(
|
||||
"Unknown overload of `db`. Expected `sqlite3.Connection` or `sqlite3.Cursor`."
|
||||
)
|
||||
|
||||
entities = []
|
||||
query_results = db.execute("""
|
||||
SELECT s.id AS _id, s.songId, s.songDifficulty AS ratingClass, s.score,
|
||||
s.perfectCount AS pure, s.nearCount AS far, s.missCount AS lost,
|
||||
s.`date`, s.modifier, ct.clearType
|
||||
FROM scores s JOIN cleartypes ct
|
||||
ON s.songId = ct.songId AND s.songDifficulty = ct.songDifficulty""")
|
||||
# maybe `s.id = ct.id`?
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
import_comment = (
|
||||
f"Imported from st3 at {now.astimezone().isoformat(timespec='seconds')}"
|
||||
)
|
||||
for result in query_results:
|
||||
(
|
||||
_id,
|
||||
song_id,
|
||||
rating_class,
|
||||
score,
|
||||
pure,
|
||||
far,
|
||||
lost,
|
||||
date,
|
||||
modifier,
|
||||
clear_type,
|
||||
) = result
|
||||
|
||||
try:
|
||||
rating_class_enum = ArcaeaRatingClass(rating_class)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Unknown rating class [%r] at entry id %d, skipping!",
|
||||
rating_class,
|
||||
_id,
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
clear_type_enum = ArcaeaPlayResultClearType(clear_type)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Unknown clear type [%r] at entry id %d, falling back to `None`!",
|
||||
clear_type,
|
||||
_id,
|
||||
)
|
||||
clear_type_enum = None
|
||||
|
||||
try:
|
||||
modifier_enum = ArcaeaPlayResultModifier(modifier)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Unknown modifier [%r] at entry id %d, falling back to `None`!",
|
||||
modifier,
|
||||
_id,
|
||||
)
|
||||
modifier_enum = None
|
||||
|
||||
if date := fix_timestamp(date):
|
||||
date = datetime.fromtimestamp(date).astimezone()
|
||||
else:
|
||||
date = None
|
||||
|
||||
play_result = PlayResult()
|
||||
play_result.song_id = song_id
|
||||
play_result.rating_class = rating_class_enum
|
||||
play_result.score = score
|
||||
play_result.pure = pure
|
||||
play_result.far = far
|
||||
play_result.lost = lost
|
||||
play_result.played_at = date
|
||||
play_result.modifier = modifier_enum
|
||||
play_result.clear_type = clear_type_enum
|
||||
play_result.comment = import_comment
|
||||
|
||||
entities.append(play_result)
|
||||
|
||||
return entities
|
38
src/arcaea_offline/external/importers/arcsong.py
vendored
Normal file
38
src/arcaea_offline/external/importers/arcsong.py
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
import sqlite3
|
||||
from typing import List, overload
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import ChartInfo
|
||||
|
||||
|
||||
class ArcsongDatabaseImporter:
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, conn: sqlite3.Connection) -> List[ChartInfo]: ...
|
||||
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, conn: sqlite3.Cursor) -> List[ChartInfo]: ...
|
||||
|
||||
@classmethod
|
||||
def parse(cls, conn) -> List[ChartInfo]:
|
||||
if isinstance(conn, sqlite3.Connection):
|
||||
return cls.parse(conn.cursor())
|
||||
|
||||
assert isinstance(conn, sqlite3.Cursor)
|
||||
|
||||
results = []
|
||||
db_results = conn.execute(
|
||||
"SELECT song_id, rating_class, rating, note FROM charts"
|
||||
)
|
||||
for result in db_results:
|
||||
results.append(
|
||||
ChartInfo(
|
||||
song_id=result[0],
|
||||
rating_class=ArcaeaRatingClass(result[1]),
|
||||
constant=result[2],
|
||||
notes=result[3] or None,
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
42
src/arcaea_offline/external/importers/chart_info_database.py
vendored
Normal file
42
src/arcaea_offline/external/importers/chart_info_database.py
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
import sqlite3
|
||||
from contextlib import closing
|
||||
from typing import List, overload
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import ChartInfo
|
||||
|
||||
|
||||
class ChartInfoDatabaseParser:
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, conn: sqlite3.Connection) -> List[ChartInfo]: ...
|
||||
|
||||
@classmethod
|
||||
@overload
|
||||
def parse(cls, conn: sqlite3.Cursor) -> List[ChartInfo]: ...
|
||||
|
||||
@classmethod
|
||||
def parse(cls, conn) -> List[ChartInfo]:
|
||||
if isinstance(conn, sqlite3.Connection):
|
||||
with closing(conn.cursor()) as cur:
|
||||
return cls.parse(cur)
|
||||
|
||||
if not isinstance(conn, sqlite3.Cursor):
|
||||
raise ValueError("conn must be sqlite3.Connection or sqlite3.Cursor!")
|
||||
|
||||
db_items = conn.execute(
|
||||
"SELECT song_id, rating_class, constant, notes FROM charts_info"
|
||||
).fetchall()
|
||||
|
||||
results: List[ChartInfo] = []
|
||||
for item in db_items:
|
||||
(song_id, rating_class, constant, notes) = item
|
||||
|
||||
chart_info = ChartInfo()
|
||||
chart_info.song_id = song_id
|
||||
chart_info.rating_class = ArcaeaRatingClass(rating_class)
|
||||
chart_info.constant = constant
|
||||
chart_info.notes = notes
|
||||
|
||||
results.append(chart_info)
|
||||
return results
|
@ -1 +0,0 @@
|
||||
from .b30_csv import SmartRteB30CsvConverter
|
64
src/arcaea_offline/external/smartrte/b30_csv.py
vendored
64
src/arcaea_offline/external/smartrte/b30_csv.py
vendored
@ -1,64 +0,0 @@
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ...models import Chart, ScoreBest
|
||||
from ...utils.rating import rating_class_to_text
|
||||
|
||||
|
||||
class SmartRteB30CsvConverter:
|
||||
CSV_ROWS = [
|
||||
"songname",
|
||||
"songId",
|
||||
"Difficulty",
|
||||
"score",
|
||||
"Perfect",
|
||||
"criticalPerfect",
|
||||
"Far",
|
||||
"Lost",
|
||||
"Constant",
|
||||
"singlePTT",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: Session,
|
||||
):
|
||||
self.session = session
|
||||
|
||||
def rows(self) -> list:
|
||||
csv_rows = [self.CSV_ROWS.copy()]
|
||||
|
||||
with self.session as session:
|
||||
results = (
|
||||
session.query(
|
||||
Chart.title,
|
||||
ScoreBest.song_id,
|
||||
ScoreBest.rating_class,
|
||||
ScoreBest.score,
|
||||
ScoreBest.pure,
|
||||
ScoreBest.shiny_pure,
|
||||
ScoreBest.far,
|
||||
ScoreBest.lost,
|
||||
Chart.constant,
|
||||
ScoreBest.potential,
|
||||
)
|
||||
.join(
|
||||
Chart,
|
||||
(Chart.song_id == ScoreBest.song_id)
|
||||
& (Chart.rating_class == ScoreBest.rating_class),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
for result in results:
|
||||
# replace the comma in song title because the target project
|
||||
# cannot handle quoted string
|
||||
result = list(result)
|
||||
result[0] = result[0].replace(",", "")
|
||||
result[2] = rating_class_to_text(result[2])
|
||||
# divide constant to float
|
||||
result[-2] = result[-2] / 10
|
||||
# round potential
|
||||
result[-1] = round(result[-1], 5)
|
||||
csv_rows.append(result)
|
||||
|
||||
return csv_rows
|
@ -1,12 +0,0 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class Singleton(type, Generic[T]):
|
||||
_instance = None
|
||||
|
||||
def __call__(cls, *args, **kwargs) -> T:
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__call__(*args, **kwargs)
|
||||
return cls._instance
|
@ -1,2 +1,7 @@
|
||||
from .play_result import PlayResultFormatter
|
||||
from .rating_class import RatingClassFormatter
|
||||
|
||||
__all__ = [
|
||||
"PlayResultFormatter",
|
||||
"RatingClassFormatter",
|
||||
]
|
||||
|
@ -1,4 +1,4 @@
|
||||
from typing import Any, Literal, overload
|
||||
from typing import Any, Dict, Literal, overload
|
||||
|
||||
from arcaea_offline.constants.enums import (
|
||||
ArcaeaPlayResultClearType,
|
||||
@ -10,8 +10,8 @@ from arcaea_offline.constants.play_result import ScoreLowerLimits
|
||||
class PlayResultFormatter:
|
||||
SCORE_GRADE_FORMAT_RESULTS = Literal["EX+", "EX", "AA", "A", "B", "C", "D"]
|
||||
|
||||
@staticmethod
|
||||
def score_grade(score: int) -> SCORE_GRADE_FORMAT_RESULTS:
|
||||
@classmethod
|
||||
def score_grade(cls, score: int) -> SCORE_GRADE_FORMAT_RESULTS:
|
||||
"""
|
||||
Returns the score grade, e.g. EX+.
|
||||
|
||||
@ -20,23 +20,21 @@ class PlayResultFormatter:
|
||||
if not isinstance(score, int):
|
||||
raise TypeError(f"Unsupported type {type(score)}, cannot format")
|
||||
|
||||
if score >= ScoreLowerLimits.EX_PLUS:
|
||||
return "EX+"
|
||||
elif score >= ScoreLowerLimits.EX:
|
||||
return "EX"
|
||||
elif score >= ScoreLowerLimits.AA:
|
||||
return "AA"
|
||||
elif score >= ScoreLowerLimits.A:
|
||||
return "A"
|
||||
elif score >= ScoreLowerLimits.B:
|
||||
return "B"
|
||||
elif score >= ScoreLowerLimits.C:
|
||||
return "C"
|
||||
elif score >= ScoreLowerLimits.D:
|
||||
return "D"
|
||||
else:
|
||||
if score < 0:
|
||||
raise ValueError("score cannot be negative")
|
||||
|
||||
score_grades: Dict[int, Literal["EX+", "EX", "AA", "A", "B", "C", "D"]] = {
|
||||
ScoreLowerLimits.EX_PLUS: "EX+",
|
||||
ScoreLowerLimits.EX: "EX",
|
||||
ScoreLowerLimits.AA: "AA",
|
||||
ScoreLowerLimits.A: "A",
|
||||
ScoreLowerLimits.B: "B",
|
||||
ScoreLowerLimits.C: "C",
|
||||
ScoreLowerLimits.D: "D",
|
||||
}
|
||||
|
||||
return next(value for limit, value in score_grades.items() if score >= limit)
|
||||
|
||||
CLEAR_TYPE_FORMAT_RESULTS = Literal[
|
||||
"TRACK LOST",
|
||||
"NORMAL CLEAR",
|
||||
@ -56,7 +54,6 @@ class PlayResultFormatter:
|
||||
"""
|
||||
Returns the uppercased clear type name, e.g. NORMAL CLEAR.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -69,7 +66,6 @@ class PlayResultFormatter:
|
||||
|
||||
Raises `ValueError` if the integer is negative.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -77,7 +73,6 @@ class PlayResultFormatter:
|
||||
"""
|
||||
Returns "None"
|
||||
"""
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def clear_type(cls, clear_type: Any) -> CLEAR_TYPE_FORMAT_RESULTS:
|
||||
@ -103,7 +98,6 @@ class PlayResultFormatter:
|
||||
"""
|
||||
Returns the uppercased clear type name, e.g. NORMAL CLEAR.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -116,7 +110,6 @@ class PlayResultFormatter:
|
||||
|
||||
Raises `ValueError` if the integer is negative.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -124,7 +117,6 @@ class PlayResultFormatter:
|
||||
"""
|
||||
Returns "None"
|
||||
"""
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def modifier(cls, modifier: Any) -> MODIFIER_FORMAT_RESULTS:
|
||||
|
@ -22,7 +22,6 @@ class RatingClassFormatter:
|
||||
"""
|
||||
Returns the capitalized rating class name, e.g. Future.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -33,7 +32,6 @@ class RatingClassFormatter:
|
||||
The integer will be converted to `ArcaeaRatingClass` enum,
|
||||
and will return "Unknown" if the convertion fails.
|
||||
"""
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def name(cls, rating_class: Any) -> NAME_FORMAT_RESULTS:
|
||||
@ -57,7 +55,6 @@ class RatingClassFormatter:
|
||||
"""
|
||||
Returns the uppercased rating class name, e.g. FTR.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
@ -68,7 +65,6 @@ class RatingClassFormatter:
|
||||
The integer will be converted to `ArcaeaRatingClass` enum,
|
||||
and will return "UNK" if the convertion fails.
|
||||
"""
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def abbreviation(cls, rating_class: Any) -> ABBREVIATION_FORMAT_RESULTS:
|
||||
|
@ -3,13 +3,9 @@ from enum import IntEnum
|
||||
|
||||
|
||||
class KanaeDayNight(IntEnum):
|
||||
Day = 0
|
||||
Night = 1
|
||||
DAY = 0
|
||||
NIGHT = 1
|
||||
|
||||
|
||||
def kanae_day_night(timestamp: int) -> KanaeDayNight:
|
||||
"""
|
||||
:param timestamp: POSIX timestamp, which is passed to `datetime.fromtimestamp(timestamp)`.
|
||||
"""
|
||||
dt = datetime.fromtimestamp(timestamp)
|
||||
return KanaeDayNight.Day if 6 <= dt.hour <= 19 else KanaeDayNight.Night
|
||||
@staticmethod
|
||||
def from_datetime(dt: datetime) -> "KanaeDayNight":
|
||||
return KanaeDayNight.DAY if 6 <= dt.hour <= 19 else KanaeDayNight.NIGHT # noqa: PLR2004
|
||||
|
@ -21,7 +21,8 @@ class TestPlayResultCalculators:
|
||||
Decimal("-0.00")
|
||||
) == Decimal("-31.67")
|
||||
|
||||
pytest.raises(ValueError, PlayResultCalculators.score_modifier, -1)
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultCalculators.score_modifier(-1)
|
||||
|
||||
pytest.raises(TypeError, PlayResultCalculators.score_modifier, "9800000")
|
||||
pytest.raises(TypeError, PlayResultCalculators.score_modifier, None)
|
||||
@ -38,5 +39,8 @@ class TestPlayResultCalculators:
|
||||
|
||||
pytest.raises(TypeError, PlayResultCalculators.play_rating, 10002221, None)
|
||||
|
||||
pytest.raises(ValueError, PlayResultCalculators.play_rating, -1, 120)
|
||||
pytest.raises(ValueError, PlayResultCalculators.play_rating, 10002221, -1)
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultCalculators.play_rating(-1, 120)
|
||||
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultCalculators.play_rating(10002221, -1)
|
||||
|
@ -1,27 +1,53 @@
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# region sqlalchemy fixtures
|
||||
# from https://medium.com/@vittorio.camisa/agile-database-integration-tests-with-python-sqlalchemy-and-factory-boy-6824e8fe33a1
|
||||
engine = create_engine("sqlite:///:memory:")
|
||||
Session = sessionmaker()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@pytest.fixture(scope="session")
|
||||
def db_conn():
|
||||
connection = engine.connect()
|
||||
yield connection
|
||||
connection.close()
|
||||
conn = engine.connect()
|
||||
yield conn
|
||||
conn.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@pytest.fixture
|
||||
def db_session(db_conn):
|
||||
transaction = db_conn.begin()
|
||||
session = Session(bind=db_conn)
|
||||
yield session
|
||||
session.close()
|
||||
transaction.rollback()
|
||||
|
||||
# drop everything
|
||||
query_tables = db_conn.execute(
|
||||
text("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
).fetchall()
|
||||
for row in query_tables:
|
||||
table_name = row[0]
|
||||
db_conn.execute(text(f"DROP TABLE {table_name}"))
|
||||
|
||||
query_views = db_conn.execute(
|
||||
text("SELECT name FROM sqlite_master WHERE type='view'")
|
||||
).fetchall()
|
||||
for row in query_views:
|
||||
view_name = row[0]
|
||||
db_conn.execute(text(f"DROP VIEW {view_name}"))
|
||||
|
||||
query_indexes = db_conn.execute(
|
||||
text("SELECT name FROM sqlite_master WHERE type='index'")
|
||||
).fetchall()
|
||||
for row in query_indexes:
|
||||
index_name = row[0]
|
||||
db_conn.execute(text(f"DROP INDEX {index_name}"))
|
||||
|
||||
query_triggers = db_conn.execute(
|
||||
text("SELECT name FROM sqlite_master WHERE type='trigger'")
|
||||
).fetchall()
|
||||
for row in query_triggers:
|
||||
trigger_name = row[0]
|
||||
db_conn.execute(text(f"DROP TRIGGER {trigger_name}"))
|
||||
|
||||
|
||||
# endregion
|
||||
|
@ -1,5 +0,0 @@
|
||||
from sqlalchemy import Engine, create_engine, inspect
|
||||
|
||||
|
||||
def create_engine_in_memory():
|
||||
return create_engine("sqlite:///:memory:")
|
0
tests/db/models/relationships/__init__.py
Normal file
0
tests/db/models/relationships/__init__.py
Normal file
148
tests/db/models/relationships/test_common.py
Normal file
148
tests/db/models/relationships/test_common.py
Normal file
@ -0,0 +1,148 @@
|
||||
"""
|
||||
Database model v5 common relationships
|
||||
|
||||
┌──────┐ ┌──────┐ ┌────────────┐ ┌────────────┐
|
||||
│ Pack ◄───► Song ◄───► Difficulty ◄───┤ PlayResult │
|
||||
└──────┘ └──┬───┘ └─────▲──────┘ └────────────┘
|
||||
│ │
|
||||
│ ┌─────▼─────┐
|
||||
└───────► ChartInfo │
|
||||
└───────────┘
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from arcaea_offline.constants.enums import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import (
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
ModelBase,
|
||||
Pack,
|
||||
PlayResult,
|
||||
Song,
|
||||
)
|
||||
|
||||
|
||||
class TestSongRelationships:
|
||||
@staticmethod
|
||||
def init_db(session):
|
||||
ModelBase.metadata.create_all(session.bind)
|
||||
|
||||
def test_relationships(self, db_session):
|
||||
self.init_db(db_session)
|
||||
|
||||
song_id = "test_song"
|
||||
title_en = "Test Lorem Ipsum"
|
||||
artist_en = "Test Artist"
|
||||
|
||||
pack = Pack(
|
||||
id="test_pack",
|
||||
name="Test Pack",
|
||||
description="This is a test pack.",
|
||||
)
|
||||
|
||||
song = Song(
|
||||
idx=1,
|
||||
id=song_id,
|
||||
title=title_en,
|
||||
artist=artist_en,
|
||||
pack_id=pack.id,
|
||||
added_at=datetime(2024, 7, 5, tzinfo=timezone.utc),
|
||||
)
|
||||
|
||||
difficulty_pst = Difficulty(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.PAST,
|
||||
rating=2,
|
||||
is_rating_plus=False,
|
||||
)
|
||||
chart_info_pst = ChartInfo(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.PAST,
|
||||
constant=20,
|
||||
notes=200,
|
||||
added_at=datetime(2024, 7, 12, tzinfo=timezone.utc),
|
||||
)
|
||||
|
||||
difficulty_prs = Difficulty(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.PRESENT,
|
||||
rating=7,
|
||||
is_rating_plus=True,
|
||||
)
|
||||
chart_info_prs = ChartInfo(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.PRESENT,
|
||||
constant=78,
|
||||
notes=780,
|
||||
added_at=datetime(2024, 7, 12, tzinfo=timezone.utc),
|
||||
)
|
||||
|
||||
difficulty_ftr = Difficulty(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.FUTURE,
|
||||
rating=10,
|
||||
is_rating_plus=True,
|
||||
)
|
||||
chart_info_ftr = ChartInfo(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.FUTURE,
|
||||
constant=109,
|
||||
notes=1090,
|
||||
added_at=datetime(2024, 7, 12, tzinfo=timezone.utc),
|
||||
)
|
||||
|
||||
difficulty_etr = Difficulty(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.ETERNAL,
|
||||
rating=9,
|
||||
is_rating_plus=True,
|
||||
)
|
||||
|
||||
play_result_ftr = PlayResult(
|
||||
song_id=song.id,
|
||||
rating_class=ArcaeaRatingClass.FUTURE,
|
||||
score=123456,
|
||||
)
|
||||
|
||||
db_session.add_all(
|
||||
[
|
||||
pack,
|
||||
song,
|
||||
difficulty_pst,
|
||||
chart_info_pst,
|
||||
difficulty_prs,
|
||||
chart_info_prs,
|
||||
difficulty_ftr,
|
||||
chart_info_ftr,
|
||||
difficulty_etr,
|
||||
play_result_ftr,
|
||||
]
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
assert pack.songs == [song]
|
||||
|
||||
assert song.pack == pack
|
||||
assert song.difficulties == [
|
||||
difficulty_pst,
|
||||
difficulty_prs,
|
||||
difficulty_ftr,
|
||||
difficulty_etr,
|
||||
]
|
||||
|
||||
assert difficulty_pst.song == song
|
||||
assert difficulty_prs.song == song
|
||||
assert difficulty_ftr.song == song
|
||||
assert difficulty_etr.song == song
|
||||
|
||||
assert difficulty_pst.chart_info_list == [chart_info_pst]
|
||||
assert difficulty_prs.chart_info_list == [chart_info_prs]
|
||||
assert difficulty_ftr.chart_info_list == [chart_info_ftr]
|
||||
assert difficulty_etr.chart_info_list == []
|
||||
|
||||
assert chart_info_pst.difficulty == difficulty_pst
|
||||
assert chart_info_prs.difficulty == difficulty_prs
|
||||
assert chart_info_ftr.difficulty == difficulty_ftr
|
||||
|
||||
# assert play_result_ftr.difficulty == difficulty_ftr
|
66
tests/db/models/relationships/test_pack.py
Normal file
66
tests/db/models/relationships/test_pack.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""
|
||||
Database model v5 relationships
|
||||
|
||||
Pack <> PackLocalized
|
||||
"""
|
||||
|
||||
from arcaea_offline.constants.enums import ArcaeaLanguage
|
||||
from arcaea_offline.database.models import ModelBase, Pack, PackLocalization
|
||||
|
||||
|
||||
class TestPackRelationships:
|
||||
@staticmethod
|
||||
def init_db(session):
|
||||
ModelBase.metadata.create_all(session.bind)
|
||||
|
||||
def test_localized_objects(self, db_session):
|
||||
self.init_db(db_session)
|
||||
|
||||
pack_id = "test_pack"
|
||||
name_en = "Test Pack"
|
||||
description_en = "Travel through common database models\nfrom the unpopular framework 'Arcaea Offline'\ntogether with an ordinary partner '∅'."
|
||||
|
||||
pack = Pack(
|
||||
id=pack_id,
|
||||
name=name_en,
|
||||
description=description_en,
|
||||
)
|
||||
|
||||
description_ja = "普通のパートナー「∅」と一緒に、\n不人気フレームワーク「Arcaea Offline」より、\n一般的なデータベース・モデルを通過する。"
|
||||
pack_localized_ja = PackLocalization(
|
||||
id=pack_id,
|
||||
lang=ArcaeaLanguage.JA.value,
|
||||
name=None,
|
||||
description=description_ja,
|
||||
)
|
||||
|
||||
description_zh_hans = "与平凡的「∅」一起,\n在没人用的「Arcaea Offline」框架里,\n一同探索随处可见的数据库模型。"
|
||||
pack_localized_zh_hans = PackLocalization(
|
||||
id=pack_id,
|
||||
lang=ArcaeaLanguage.ZH_HANS.value,
|
||||
name=None,
|
||||
description=description_zh_hans,
|
||||
)
|
||||
|
||||
db_session.add_all([pack, pack_localized_ja])
|
||||
db_session.commit()
|
||||
|
||||
assert len(pack.localized_entries) == len([pack_localized_ja])
|
||||
|
||||
assert pack_localized_ja.pack.description == pack.description
|
||||
|
||||
# test back populates
|
||||
new_pack = Pack(
|
||||
id=f"{pack_id}_new",
|
||||
name="NEW",
|
||||
description="new new pack",
|
||||
)
|
||||
db_session.add(new_pack)
|
||||
|
||||
pack_localized_ja.pack = new_pack
|
||||
pack.localized_entries.append(pack_localized_zh_hans)
|
||||
db_session.commit()
|
||||
|
||||
assert pack_localized_ja.pack.id == new_pack.id
|
||||
# TODO: this failes but why
|
||||
assert len(pack.localized_entries) == 2
|
118
tests/db/models/test_chart.py
Normal file
118
tests/db/models/test_chart.py
Normal file
@ -0,0 +1,118 @@
|
||||
"""
|
||||
Database models v5
|
||||
|
||||
Chart functionalities
|
||||
- basic data handling
|
||||
- Difficulty song info overriding
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import (
|
||||
Chart,
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
ModelBase,
|
||||
ModelViewBase,
|
||||
Pack,
|
||||
Song,
|
||||
)
|
||||
|
||||
|
||||
class TestChart:
|
||||
def init_db(self, session):
|
||||
ModelBase.metadata.create_all(session.bind)
|
||||
ModelViewBase.metadata.create_all(session.bind)
|
||||
|
||||
def test_basic(self, db_session):
|
||||
self.init_db(db_session)
|
||||
|
||||
pack_id = "test_pack"
|
||||
song_id = "test_song"
|
||||
rating_class = ArcaeaRatingClass.FUTURE
|
||||
|
||||
pack = Pack(id=pack_id, name="Test Pack")
|
||||
song = Song(
|
||||
idx=2,
|
||||
id=song_id,
|
||||
title="~TEST~",
|
||||
artist="~test~",
|
||||
pack_id=pack_id,
|
||||
added_at=datetime(2024, 7, 4, tzinfo=timezone.utc),
|
||||
)
|
||||
difficulty = Difficulty(
|
||||
song_id=song_id,
|
||||
rating_class=rating_class,
|
||||
rating=9,
|
||||
is_rating_plus=True,
|
||||
)
|
||||
chart_info = ChartInfo(
|
||||
song_id=song_id,
|
||||
rating_class=rating_class,
|
||||
constant=98,
|
||||
notes=980,
|
||||
added_at=datetime(2024, 7, 12, tzinfo=timezone.utc),
|
||||
)
|
||||
db_session.add_all([pack, song, difficulty, chart_info])
|
||||
db_session.commit()
|
||||
|
||||
chart: Chart = (
|
||||
db_session.query(Chart)
|
||||
.where((Chart.song_id == song_id) & (Chart.rating_class == rating_class))
|
||||
.one()
|
||||
)
|
||||
|
||||
# `song_id` and `rating_class` are guarded by the WHERE clause above
|
||||
assert chart.song_idx == song.idx
|
||||
assert chart.title == song.title
|
||||
assert chart.artist == song.artist
|
||||
assert chart.pack_id == song.pack_id
|
||||
assert chart.rating == difficulty.rating
|
||||
assert chart.is_rating_plus == difficulty.is_rating_plus
|
||||
assert chart.constant == chart_info.constant
|
||||
assert chart.notes == chart_info.notes
|
||||
|
||||
def test_difficulty_override(self, db_session):
|
||||
self.init_db(db_session)
|
||||
|
||||
pack_id = "test_pack"
|
||||
song_id = "test_song"
|
||||
rating_class = ArcaeaRatingClass.FUTURE
|
||||
|
||||
pack = Pack(id=pack_id, name="Test Pack")
|
||||
song = Song(
|
||||
idx=2,
|
||||
id=song_id,
|
||||
title="~TEST~",
|
||||
artist="~test~",
|
||||
pack_id=pack_id,
|
||||
added_at=datetime(2024, 7, 4, tzinfo=timezone.utc),
|
||||
)
|
||||
difficulty = Difficulty(
|
||||
song_id=song_id,
|
||||
rating_class=rating_class,
|
||||
rating=9,
|
||||
is_rating_plus=True,
|
||||
title="~TEST DIFF~",
|
||||
artist="~diff~",
|
||||
)
|
||||
chart_info = ChartInfo(
|
||||
song_id=song_id,
|
||||
rating_class=rating_class,
|
||||
constant=98,
|
||||
notes=980,
|
||||
added_at=datetime(2024, 7, 12, tzinfo=timezone.utc),
|
||||
)
|
||||
db_session.add_all([pack, song, difficulty, chart_info])
|
||||
db_session.commit()
|
||||
|
||||
chart: Chart = (
|
||||
db_session.query(Chart)
|
||||
.where((Chart.song_id == song_id) & (Chart.rating_class == rating_class))
|
||||
.one()
|
||||
)
|
||||
|
||||
assert chart.song_idx == song.idx
|
||||
assert chart.title == difficulty.title
|
||||
assert chart.artist == difficulty.artist
|
@ -2,10 +2,9 @@ from datetime import datetime, timedelta, timezone
|
||||
from enum import IntEnum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
from arcaea_offline.database.models._custom_types import DbIntEnum, TZDateTime
|
||||
from arcaea_offline.database.models._types import ForceTimezoneDateTime
|
||||
|
||||
|
||||
class TestIntEnum(IntEnum):
|
||||
@ -22,43 +21,19 @@ class TestBase(DeclarativeBase):
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
|
||||
class IntEnumTestModel(TestBase):
|
||||
__tablename__ = "test_int_enum"
|
||||
value: Mapped[Optional[TestIntEnum]] = mapped_column(DbIntEnum(TestIntEnum))
|
||||
|
||||
|
||||
class TZDatetimeTestModel(TestBase):
|
||||
class ForceTimezoneDatetimeTestModel(TestBase):
|
||||
__tablename__ = "test_tz_datetime"
|
||||
value: Mapped[Optional[datetime]] = mapped_column(TZDateTime)
|
||||
value: Mapped[Optional[datetime]] = mapped_column(ForceTimezoneDateTime)
|
||||
|
||||
|
||||
class TestCustomTypes:
|
||||
def test_int_enum(self, db_session):
|
||||
def _query_value(_id: int):
|
||||
return db_session.execute(
|
||||
text(
|
||||
f"SELECT value FROM {IntEnumTestModel.__tablename__} WHERE id = {_id}"
|
||||
)
|
||||
).one()[0]
|
||||
|
||||
TestBase.metadata.create_all(db_session.bind)
|
||||
|
||||
basic_obj = IntEnumTestModel(id=1, value=TestIntEnum.TWO)
|
||||
null_obj = IntEnumTestModel(id=2, value=None)
|
||||
db_session.add(basic_obj)
|
||||
db_session.add(null_obj)
|
||||
db_session.commit()
|
||||
|
||||
assert _query_value(1) == TestIntEnum.TWO.value
|
||||
assert _query_value(2) is None
|
||||
|
||||
def test_tz_datetime(self, db_session):
|
||||
TestBase.metadata.create_all(db_session.bind)
|
||||
def test_force_timezone_datetime(self, db_session):
|
||||
TestBase.metadata.create_all(db_session.bind, checkfirst=False)
|
||||
|
||||
dt1 = datetime.now(tz=timezone(timedelta(hours=8)))
|
||||
|
||||
basic_obj = TZDatetimeTestModel(id=1, value=dt1)
|
||||
null_obj = TZDatetimeTestModel(id=2, value=None)
|
||||
basic_obj = ForceTimezoneDatetimeTestModel(id=1, value=dt1)
|
||||
null_obj = ForceTimezoneDatetimeTestModel(id=2, value=None)
|
||||
db_session.add(basic_obj)
|
||||
db_session.add(null_obj)
|
||||
db_session.commit()
|
||||
|
@ -1,118 +0,0 @@
|
||||
from sqlalchemy import Engine
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from arcaea_offline.models.songs import (
|
||||
Chart,
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
Pack,
|
||||
Song,
|
||||
SongsBase,
|
||||
SongsViewBase,
|
||||
)
|
||||
|
||||
from ..db import create_engine_in_memory
|
||||
|
||||
|
||||
def _song(**kw):
|
||||
defaults = {"artist": "test"}
|
||||
defaults.update(kw)
|
||||
return Song(**defaults)
|
||||
|
||||
|
||||
def _difficulty(**kw):
|
||||
defaults = {"rating_plus": False, "audio_override": False, "jacket_override": False}
|
||||
defaults.update(kw)
|
||||
return Difficulty(**defaults)
|
||||
|
||||
|
||||
class Test_Chart:
|
||||
def init_db(self, engine: Engine):
|
||||
SongsBase.metadata.create_all(engine)
|
||||
SongsViewBase.metadata.create_all(engine)
|
||||
|
||||
def db(self):
|
||||
db = create_engine_in_memory()
|
||||
self.init_db(db)
|
||||
return db
|
||||
|
||||
def test_chart_info(self):
|
||||
pre_entites = [
|
||||
Pack(id="test", name="Test Pack"),
|
||||
_song(idx=0, id="song0", set="test", title="Full Chart Info"),
|
||||
_song(idx=1, id="song1", set="test", title="Partial Chart Info"),
|
||||
_song(idx=2, id="song2", set="test", title="No Chart Info"),
|
||||
_difficulty(song_id="song0", rating_class=2, rating=9),
|
||||
_difficulty(song_id="song1", rating_class=2, rating=9),
|
||||
_difficulty(song_id="song2", rating_class=2, rating=9),
|
||||
ChartInfo(song_id="song0", rating_class=2, constant=90, notes=1234),
|
||||
ChartInfo(song_id="song1", rating_class=2, constant=90),
|
||||
]
|
||||
|
||||
db = self.db()
|
||||
with Session(db) as session:
|
||||
session.add_all(pre_entites)
|
||||
session.commit()
|
||||
|
||||
chart_song0_ratingclass2 = (
|
||||
session.query(Chart)
|
||||
.where((Chart.song_id == "song0") & (Chart.rating_class == 2))
|
||||
.one()
|
||||
)
|
||||
|
||||
assert chart_song0_ratingclass2.constant == 90
|
||||
assert chart_song0_ratingclass2.notes == 1234
|
||||
|
||||
chart_song1_ratingclass2 = (
|
||||
session.query(Chart)
|
||||
.where((Chart.song_id == "song1") & (Chart.rating_class == 2))
|
||||
.one()
|
||||
)
|
||||
|
||||
assert chart_song1_ratingclass2.constant == 90
|
||||
assert chart_song1_ratingclass2.notes is None
|
||||
|
||||
chart_song2_ratingclass2 = (
|
||||
session.query(Chart)
|
||||
.where((Chart.song_id == "song2") & (Chart.rating_class == 2))
|
||||
.first()
|
||||
)
|
||||
|
||||
assert chart_song2_ratingclass2 is None
|
||||
|
||||
def test_difficulty_title_override(self):
|
||||
pre_entites = [
|
||||
Pack(id="test", name="Test Pack"),
|
||||
_song(idx=0, id="test", set="test", title="Test"),
|
||||
_difficulty(song_id="test", rating_class=0, rating=2),
|
||||
_difficulty(song_id="test", rating_class=1, rating=5),
|
||||
_difficulty(song_id="test", rating_class=2, rating=8),
|
||||
_difficulty(
|
||||
song_id="test", rating_class=3, rating=10, title="TEST ~REVIVE~"
|
||||
),
|
||||
ChartInfo(song_id="test", rating_class=0, constant=10),
|
||||
ChartInfo(song_id="test", rating_class=1, constant=10),
|
||||
ChartInfo(song_id="test", rating_class=2, constant=10),
|
||||
ChartInfo(song_id="test", rating_class=3, constant=10),
|
||||
]
|
||||
|
||||
db = self.db()
|
||||
with Session(db) as session:
|
||||
session.add_all(pre_entites)
|
||||
session.commit()
|
||||
|
||||
charts_original_title = (
|
||||
session.query(Chart)
|
||||
.where((Chart.song_id == "test") & (Chart.rating_class in [0, 1, 2]))
|
||||
.all()
|
||||
)
|
||||
|
||||
assert all(chart.title == "Test" for chart in charts_original_title)
|
||||
|
||||
chart_overrided_title = (
|
||||
session.query(Chart)
|
||||
.where((Chart.song_id == "test") & (Chart.rating_class == 3))
|
||||
.one()
|
||||
)
|
||||
|
||||
assert chart_overrided_title.title == "TEST ~REVIVE~"
|
88
tests/external/importers/arcaea/test_online.py
vendored
Normal file
88
tests/external/importers/arcaea/test_online.py
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from arcaea_offline.constants.enums.arcaea import (
|
||||
ArcaeaPlayResultClearType,
|
||||
ArcaeaPlayResultModifier,
|
||||
ArcaeaRatingClass,
|
||||
)
|
||||
from arcaea_offline.database.models.play_result import PlayResult
|
||||
from arcaea_offline.external.importers.arcaea.online import ArcaeaOnlineApiParser
|
||||
|
||||
API_RESULT = {
|
||||
"success": True,
|
||||
"value": {
|
||||
"best_rated_scores": [
|
||||
{
|
||||
"song_id": "test1",
|
||||
"difficulty": 2,
|
||||
"modifier": 0,
|
||||
"rating": 12.5,
|
||||
"score": 9908123,
|
||||
"perfect_count": 1234,
|
||||
"near_count": 12,
|
||||
"miss_count": 4,
|
||||
"clear_type": 1,
|
||||
"title": {"ja": "テスト1", "en": "Test 1"},
|
||||
"artist": "pytest",
|
||||
"time_played": 1704067200000, # 2024-01-01 00:00:00 UTC
|
||||
"bg": "abcdefg123456hijklmn7890123opqrs",
|
||||
},
|
||||
{
|
||||
"song_id": "test2",
|
||||
"difficulty": 2,
|
||||
"modifier": 0,
|
||||
"rating": 12.0,
|
||||
"score": 9998123,
|
||||
"perfect_count": 1234,
|
||||
"near_count": 1,
|
||||
"miss_count": 0,
|
||||
"clear_type": 1,
|
||||
"title": {"ja": "テスト2", "en": "Test 2"},
|
||||
"artist": "pytest",
|
||||
"time_played": 1704067200000,
|
||||
"bg": "abcdefg123456hijklmn7890123opqrs",
|
||||
},
|
||||
],
|
||||
"recent_rated_scores": [
|
||||
{
|
||||
"song_id": "test2",
|
||||
"difficulty": 2,
|
||||
"modifier": 0,
|
||||
"rating": 12.0,
|
||||
"score": 9998123,
|
||||
"perfect_count": 1234,
|
||||
"near_count": 1,
|
||||
"miss_count": 0,
|
||||
"clear_type": 1,
|
||||
"title": {"ja": "テスト2", "en": "Test 2"},
|
||||
"artist": "pytest",
|
||||
"time_played": 1704153600000, # 2024-01-02 00:00:00 UTC
|
||||
"bg": "abcdefg123456hijklmn7890123opqrs",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class TestArcaeaOnlineApiParser:
|
||||
API_RESULT_CONTENT = json.dumps(API_RESULT, ensure_ascii=False)
|
||||
|
||||
def test_parse(self):
|
||||
play_results = ArcaeaOnlineApiParser(self.API_RESULT_CONTENT).parse()
|
||||
assert all(isinstance(item, PlayResult) for item in play_results)
|
||||
|
||||
assert len(play_results) == 2
|
||||
|
||||
test1 = next(filter(lambda x: x.song_id == "test1", play_results))
|
||||
assert test1.rating_class is ArcaeaRatingClass.FUTURE
|
||||
assert test1.score == 9908123
|
||||
assert test1.pure == 1234
|
||||
assert test1.far == 12
|
||||
assert test1.lost == 4
|
||||
assert test1.played_at == datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
|
||||
assert test1.clear_type is ArcaeaPlayResultClearType.NORMAL_CLEAR
|
||||
assert test1.modifier is ArcaeaPlayResultModifier.NORMAL
|
||||
|
||||
test2 = next(filter(lambda x: x.song_id == "test2", play_results))
|
||||
assert test2.played_at == datetime(2024, 1, 2, 0, 0, 0, tzinfo=timezone.utc)
|
55
tests/external/importers/arcaea/test_st3.py
vendored
Normal file
55
tests/external/importers/arcaea/test_st3.py
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
import tests.resources
|
||||
from arcaea_offline.constants.enums.arcaea import (
|
||||
ArcaeaPlayResultClearType,
|
||||
ArcaeaPlayResultModifier,
|
||||
ArcaeaRatingClass,
|
||||
)
|
||||
from arcaea_offline.external.importers.arcaea.st3 import ArcaeaSt3Parser
|
||||
|
||||
db = sqlite3.connect(":memory:")
|
||||
db.executescript(tests.resources.get_resource("st3.sql").read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
class TestArcaeaSt3Parser:
|
||||
@property
|
||||
def play_results(self):
|
||||
return ArcaeaSt3Parser.parse(db)
|
||||
|
||||
def test_basic(self):
|
||||
play_results = self.play_results
|
||||
|
||||
assert len(play_results) == 4
|
||||
|
||||
test1 = next(filter(lambda x: x.song_id == "test1", play_results))
|
||||
assert test1.rating_class is ArcaeaRatingClass.FUTURE
|
||||
assert test1.score == 9441167
|
||||
assert test1.pure == 895
|
||||
assert test1.far == 32
|
||||
assert test1.lost == 22
|
||||
assert test1.played_at == datetime.fromtimestamp(1722100000).astimezone()
|
||||
assert test1.clear_type is ArcaeaPlayResultClearType.TRACK_LOST
|
||||
assert test1.modifier is ArcaeaPlayResultModifier.HARD
|
||||
|
||||
def test_corrupt_handling(self):
|
||||
play_results = self.play_results
|
||||
|
||||
corrupt1 = filter(lambda x: x.song_id == "corrupt1", play_results)
|
||||
# `rating_class` out of range, so this should be ignored during parsing,
|
||||
# thus does not present in the result.
|
||||
assert len(list(corrupt1)) == 0
|
||||
|
||||
corrupt2 = next(filter(lambda x: x.song_id == "corrupt2", play_results))
|
||||
assert corrupt2.clear_type is None
|
||||
assert corrupt2.modifier is None
|
||||
|
||||
date1 = next(filter(lambda x: x.song_id == "date1", play_results))
|
||||
assert date1.played_at is None
|
||||
|
||||
def test_invalid_input(self):
|
||||
pytest.raises(TypeError, ArcaeaSt3Parser.parse, "abcdefghijklmn")
|
||||
pytest.raises(TypeError, ArcaeaSt3Parser.parse, 123456)
|
45
tests/external/importers/test_arcsong.py
vendored
Normal file
45
tests/external/importers/test_arcsong.py
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import sqlite3
|
||||
|
||||
import tests.resources
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import ChartInfo
|
||||
from arcaea_offline.external.importers.arcsong import (
|
||||
ArcsongDatabaseImporter,
|
||||
)
|
||||
|
||||
db = sqlite3.connect(":memory:")
|
||||
db.executescript(
|
||||
tests.resources.get_resource("arcsong.sql").read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
|
||||
class TestArcsongDatabaseImporter:
|
||||
def test_parse(self):
|
||||
items = ArcsongDatabaseImporter.parse(db)
|
||||
|
||||
assert all(isinstance(item, ChartInfo) for item in items)
|
||||
assert len(items) == 3
|
||||
|
||||
base1_pst = next(
|
||||
it
|
||||
for it in items
|
||||
if it.song_id == "base1" and it.rating_class is ArcaeaRatingClass.PAST
|
||||
)
|
||||
assert base1_pst.constant == 30
|
||||
assert base1_pst.notes == 500
|
||||
|
||||
base1_prs = next(
|
||||
it
|
||||
for it in items
|
||||
if it.song_id == "base1" and it.rating_class is ArcaeaRatingClass.PRESENT
|
||||
)
|
||||
assert base1_prs.constant == 60
|
||||
assert base1_prs.notes == 700
|
||||
|
||||
base1_ftr = next(
|
||||
it
|
||||
for it in items
|
||||
if it.song_id == "base1" and it.rating_class is ArcaeaRatingClass.FUTURE
|
||||
)
|
||||
assert base1_ftr.constant == 90
|
||||
assert base1_ftr.notes == 1000
|
34
tests/external/importers/test_chart_info_database.py
vendored
Normal file
34
tests/external/importers/test_chart_info_database.py
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
import sqlite3
|
||||
|
||||
import tests.resources
|
||||
from arcaea_offline.constants.enums.arcaea import ArcaeaRatingClass
|
||||
from arcaea_offline.database.models import ChartInfo
|
||||
from arcaea_offline.external.importers.chart_info_database import (
|
||||
ChartInfoDatabaseParser,
|
||||
)
|
||||
|
||||
db = sqlite3.connect(":memory:")
|
||||
db.executescript(tests.resources.get_resource("cidb.sql").read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
class TestChartInfoDatabaseParser:
|
||||
def test_parse(self):
|
||||
items = ChartInfoDatabaseParser.parse(db)
|
||||
assert all(isinstance(item, ChartInfo) for item in items)
|
||||
|
||||
assert len(items) == 3
|
||||
|
||||
test1 = next(filter(lambda x: x.song_id == "test1", items))
|
||||
assert test1.rating_class is ArcaeaRatingClass.PRESENT
|
||||
assert test1.constant == 90
|
||||
assert test1.notes == 900
|
||||
|
||||
test2 = next(filter(lambda x: x.song_id == "test2", items))
|
||||
assert test2.rating_class is ArcaeaRatingClass.FUTURE
|
||||
assert test2.constant == 95
|
||||
assert test2.notes == 950
|
||||
|
||||
test3 = next(filter(lambda x: x.song_id == "test3", items))
|
||||
assert test3.rating_class is ArcaeaRatingClass.BEYOND
|
||||
assert test3.constant == 100
|
||||
assert test3.notes is None
|
16
tests/resources/__init__.py
Normal file
16
tests/resources/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
import importlib.resources
|
||||
import sys
|
||||
|
||||
|
||||
def get_resource(path: str):
|
||||
"""
|
||||
A wrapper for `importlib.resources.files()` since it's not available in Python 3.8.
|
||||
"""
|
||||
if sys.version_info >= (3, 9, 0):
|
||||
with importlib.resources.as_file(
|
||||
importlib.resources.files(__name__).joinpath(path)
|
||||
) as resource_path:
|
||||
return resource_path
|
||||
|
||||
with importlib.resources.path(__name__, path) as resource_path:
|
||||
return resource_path
|
40
tests/resources/arcsong.sql
Normal file
40
tests/resources/arcsong.sql
Normal file
@ -0,0 +1,40 @@
|
||||
CREATE TABLE packages(
|
||||
`id` TEXT PRIMARY KEY NOT NULL,
|
||||
`name` TEXT NOT NULL DEFAULT ""
|
||||
);
|
||||
|
||||
CREATE TABLE charts(
|
||||
song_id TEXT NOT NULL DEFAULT '',
|
||||
rating_class INTEGER NOT NULL DEFAULT 0,
|
||||
name_en TEXT NOT NULL DEFAULT '',
|
||||
name_jp TEXT DEFAULT '',
|
||||
artist TEXT NOT NULL DEFAULT '',
|
||||
bpm TEXT NOT NULL DEFAULT '',
|
||||
bpm_base DOUBLE NOT NULL DEFAULT 0,
|
||||
`set` TEXT NOT NULL DEFAULT '',
|
||||
`time` INTEGER DEFAULT 0,
|
||||
side INTEGER NOT NULL DEFAULT 0,
|
||||
world_unlock BOOLEAN NOT NULL DEFAULT 0,
|
||||
remote_download BOOLEAN DEFAULT '',
|
||||
bg TEXT NOT NULL DEFAULT '',
|
||||
`date` INTEGER NOT NULL DEFAULT 0,
|
||||
`version` TEXT NOT NULL DEFAULT '',
|
||||
difficulty INTEGER NOT NULL DEFAULT 0,
|
||||
rating INTEGER NOT NULL DEFAULT 0,
|
||||
note INTEGER NOT NULL DEFAULT 0,
|
||||
chart_designer TEXT DEFAULT '',
|
||||
jacket_designer TEXT DEFAULT '',
|
||||
jacket_override BOOLEAN NOT NULL DEFAULT 0,
|
||||
audio_override BOOLEAN NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY(song_id, rating_class)
|
||||
);
|
||||
|
||||
|
||||
INSERT INTO packages ("id", "name") VALUES
|
||||
('base', 'Base Pack'),
|
||||
('core', 'Core Pack');
|
||||
|
||||
INSERT INTO charts ("song_id", "rating_class", "name_en", "name_jp", "artist", "bpm", "bpm_base", "set", "time", "side", "world_unlock", "remote_download", "bg", "date", "version", "difficulty", "rating", "note", "chart_designer", "jacket_designer", "jacket_override", "audio_override") VALUES
|
||||
('base1', '0', 'Base song 1', 'ベース・ソング・ワン', 'Artist', '1024', '1024.0', 'base', '1024', '1', '1', '0', '', '1400067914', '1.0', '6', '30', '500', 'Charter', '78rwey63a', '0', '0'),
|
||||
('base1', '1', 'Base song 1', 'ベース・ソング・ワン', 'Artist', '1024', '1024.0', 'base', '1024', '1', '1', '0', '', '1400067914', '1.0', '12', '60', '700', 'Charter', '78rwey63b', '0', '0'),
|
||||
('base1', '2', 'Base song 1', 'ベース・ソング・ワン', 'Artist', '1024', '1024.0', 'base', '1024', '1', '1', '0', '', '1400067914', '1.0', '18', '90', '1000', 'Charter', '78rwey63c', '0', '0');
|
11
tests/resources/cidb.sql
Normal file
11
tests/resources/cidb.sql
Normal file
@ -0,0 +1,11 @@
|
||||
CREATE TABLE charts_info (
|
||||
song_id TEXT NOT NULL,
|
||||
rating_class INTEGER NOT NULL,
|
||||
constant INTEGER NOT NULL,
|
||||
notes INTEGER
|
||||
);
|
||||
|
||||
INSERT INTO charts_info (song_id, rating_class, constant, notes) VALUES
|
||||
("test1", 1, 90, 900),
|
||||
("test2", 2, 95, 950),
|
||||
("test3", 3, 100, NULL);
|
37
tests/resources/st3.sql
Normal file
37
tests/resources/st3.sql
Normal file
@ -0,0 +1,37 @@
|
||||
CREATE TABLE scores (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
version INTEGER,
|
||||
score INTEGER,
|
||||
shinyPerfectCount INTEGER,
|
||||
perfectCount INTEGER,
|
||||
nearCount INTEGER,
|
||||
missCount INTEGER,
|
||||
date INTEGER,
|
||||
songId TEXT,
|
||||
songDifficulty INTEGER,
|
||||
modifier INTEGER,
|
||||
health INTEGER,
|
||||
ct INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE cleartypes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
songId TEXT,
|
||||
songDifficulty INTEGER,
|
||||
clearType INTEGER,
|
||||
ct INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
INSERT INTO scores ("id", "version", "score", "shinyPerfectCount", "perfectCount", "nearCount", "missCount", "date", "songId", "songDifficulty", "modifier", "health", "ct") VALUES
|
||||
('1', '1', '9441167', '753', '895', '32', '22', '1722100000', 'test1', '2', '2', '0', '0'),
|
||||
('2', '1', '9752087', '914', '1024', '29', '12', '1722200000', 'test2', '2', '0', '100', '0'),
|
||||
('3', '1', '9750000', '900', '1000', '20', '10', '1722200000', 'corrupt1', '5', '0', '0', '0'),
|
||||
('4', '1', '9750000', '900', '1000', '20', '10', '1722200000', 'corrupt2', '2', '9', '0', '0'),
|
||||
('5', '1', '9750000', '900', '1000', '20', '10', '1', 'date1', '2', '0', '0', '0');
|
||||
|
||||
INSERT INTO cleartypes ("id", "songId", "songDifficulty", "clearType", "ct") VALUES
|
||||
('1', 'test1', '2', '0', '0'),
|
||||
('2', 'test2', '2', '1', '0'),
|
||||
('3', 'corrupt1', '5', '0', '0'),
|
||||
('4', 'corrupt2', '2', '7', '0'),
|
||||
('5', 'date1', '2', '1', '0');
|
@ -73,7 +73,9 @@ class TestPlayResultFormatter:
|
||||
assert PlayResultFormatter.score_grade(5500000) == "D"
|
||||
assert PlayResultFormatter.score_grade(0) == "D"
|
||||
|
||||
pytest.raises(ValueError, PlayResultFormatter.score_grade, -1)
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultFormatter.score_grade(-1)
|
||||
|
||||
pytest.raises(TypeError, PlayResultFormatter.score_grade, "10001284")
|
||||
pytest.raises(TypeError, PlayResultFormatter.score_grade, [])
|
||||
pytest.raises(TypeError, PlayResultFormatter.score_grade, None)
|
||||
@ -108,7 +110,9 @@ class TestPlayResultFormatter:
|
||||
assert PlayResultFormatter.clear_type(1) == "NORMAL CLEAR"
|
||||
assert PlayResultFormatter.clear_type(6) == "UNKNOWN"
|
||||
|
||||
pytest.raises(ValueError, PlayResultFormatter.clear_type, -1)
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultFormatter.clear_type(-1)
|
||||
|
||||
pytest.raises(TypeError, PlayResultFormatter.clear_type, "1")
|
||||
pytest.raises(TypeError, PlayResultFormatter.clear_type, [])
|
||||
|
||||
@ -121,6 +125,8 @@ class TestPlayResultFormatter:
|
||||
assert PlayResultFormatter.modifier(1) == "EASY"
|
||||
assert PlayResultFormatter.modifier(6) == "UNKNOWN"
|
||||
|
||||
pytest.raises(ValueError, PlayResultFormatter.modifier, -1)
|
||||
with pytest.raises(ValueError, match="negative"):
|
||||
PlayResultFormatter.modifier(-1)
|
||||
|
||||
pytest.raises(TypeError, PlayResultFormatter.modifier, "1")
|
||||
pytest.raises(TypeError, PlayResultFormatter.modifier, [])
|
||||
|
Reference in New Issue
Block a user