mirror of
https://github.com/283375/arcaea-offline.git
synced 2025-04-09 17:40:17 +00:00
init
This commit is contained in:
commit
d16c25726a
9
.editorconfig
Normal file
9
.editorconfig
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*]
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
165
.gitignore
vendored
Normal file
165
.gitignore
vendored
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
__debug*
|
||||||
|
arcsong.db
|
||||||
|
arcaea_offline.db
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
19
README.zh_CN.md
Normal file
19
README.zh_CN.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Arcaea Offline
|
||||||
|
|
||||||
|
> 接受 <i>lr<sub>lowiro</sub></i> 的一切
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## WIP
|
||||||
|
|
||||||
|
This project is under active development, thus it is unstable and API may change frequently.
|
||||||
|
|
||||||
|
## 这事什么?
|
||||||
|
|
||||||
|
这是用于计算 [Arcaea](https://arcaea.lowiro.com/) 中,玩家潜力值的 [B30 和 R10](https://wiki.arcaea.cn/潜力值#整体潜力值计算) 部分的程序。
|
||||||
|
|
||||||
|
## 这怎么用?
|
||||||
|
|
||||||
|
这个存储库是相对基础的,提供操作数据库等“底层”操作的 python 库。要使用该库,请查阅 API 手册(还没写)。
|
||||||
|
|
||||||
|
如果您正寻找 GUI,请前往 [283375/arcaea-offline-pyside-ui](https://github.com/283375/arcaea-offline-pyside-ui) 了解详情。
|
BIN
assets/excel_eternity.png
Normal file
BIN
assets/excel_eternity.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 338 KiB |
27
pyproject.toml
Normal file
27
pyproject.toml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "arcaea-offline"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = [{ name = "283375", email = "log_283375@163.com" }]
|
||||||
|
description = "Caculate your B30 & R10 locally."
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
dependencies = []
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://github.com/283375/arcaea-offline"
|
||||||
|
"Bug Tracker" = "https://github.com/283375/arcaea-offline/issues"
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
src_paths = ["src/arcaea_offline"]
|
||||||
|
|
||||||
|
[tool.pyright]
|
||||||
|
ignore = ["**/__debug*.*"]
|
0
src/arcaea_offline/__init__.py
Normal file
0
src/arcaea_offline/__init__.py
Normal file
5
src/arcaea_offline/caculate.py
Normal file
5
src/arcaea_offline/caculate.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from .models import Score, Caculated
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
def calculate_b30(caculated_list: List[Caculated]):
|
||||||
|
scores = []
|
351
src/arcaea_offline/database.py
Normal file
351
src/arcaea_offline/database.py
Normal file
@ -0,0 +1,351 @@
|
|||||||
|
import os
|
||||||
|
from .utils.singleton import Singleton
|
||||||
|
from .models import DbChartRow, DbPackageRow, DbScoreRow, DbCaculatedRow, DbAliasRow
|
||||||
|
import sqlite3
|
||||||
|
from typing import Union, List, Optional
|
||||||
|
from dataclasses import fields, is_dataclass
|
||||||
|
|
||||||
|
|
||||||
|
class Database(metaclass=Singleton):
|
||||||
|
dbDir = os.getcwd()
|
||||||
|
dbFilename = "arcaea_offline.db"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__conn = sqlite3.connect(os.path.join(self.dbDir, self.dbFilename))
|
||||||
|
self.__conn.execute("PRAGMA journal_mode = WAL;")
|
||||||
|
self.__conn.execute("PRAGMA foreign_keys = ON;")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def conn(self):
|
||||||
|
return self.__conn
|
||||||
|
|
||||||
|
def validate_song_id(self, song_id):
|
||||||
|
with self.conn:
|
||||||
|
result = self.conn.execute(
|
||||||
|
"SELECT song_id FROM charts WHERE song_id = ?", (song_id,)
|
||||||
|
).fetchall()
|
||||||
|
return len(result) > 0
|
||||||
|
|
||||||
|
def update_arcsong_db(self, path: Union[str, bytes]):
|
||||||
|
arcsong_conn = sqlite3.connect(path)
|
||||||
|
data = {}
|
||||||
|
with arcsong_conn:
|
||||||
|
data["charts"] = arcsong_conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT song_id, rating_class, name_en, name_jp, artist, bpm, bpm_base, "set", time, side, world_unlock, remote_download, bg, date, version, difficulty, rating, note, chart_designer, jacket_designer, jacket_override, audio_override
|
||||||
|
FROM charts
|
||||||
|
"""
|
||||||
|
).fetchall()
|
||||||
|
data["aliases"] = arcsong_conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT sid, alias
|
||||||
|
FROM alias
|
||||||
|
"""
|
||||||
|
).fetchall()
|
||||||
|
data["packages"] = arcsong_conn.execute(
|
||||||
|
"""
|
||||||
|
SELECT id, name
|
||||||
|
FROM packages
|
||||||
|
"""
|
||||||
|
).fetchall()
|
||||||
|
|
||||||
|
with self.conn as conn:
|
||||||
|
for table in data:
|
||||||
|
columns = [
|
||||||
|
row[0]
|
||||||
|
for row in conn.execute(
|
||||||
|
f"SELECT * FROM {table} LIMIT 1"
|
||||||
|
).description
|
||||||
|
]
|
||||||
|
column_count = len(columns)
|
||||||
|
assert column_count == len(data[table][0])
|
||||||
|
columns_insert_str = ", ".join(columns)
|
||||||
|
values_insert_str = ", ".join("?" * column_count)
|
||||||
|
update_clauses = ", ".join(
|
||||||
|
[f"{column} = excluded.{column}" for column in columns]
|
||||||
|
)
|
||||||
|
conn.executemany(
|
||||||
|
f"INSERT INTO {table} ({columns_insert_str}) VALUES ({values_insert_str}) ON CONFLICT DO UPDATE SET {update_clauses}",
|
||||||
|
data[table],
|
||||||
|
)
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
with self.conn as conn:
|
||||||
|
create_sqls = [
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS charts (
|
||||||
|
song_id TEXT NOT NULL,
|
||||||
|
rating_class INTEGER NOT NULL,
|
||||||
|
name_en TEXT NOT NULL,
|
||||||
|
name_jp TEXT,
|
||||||
|
artist TEXT NOT NULL,
|
||||||
|
bpm TEXT NOT NULL,
|
||||||
|
bpm_base REAL NOT NULL,
|
||||||
|
package_id TEXT NOT NULL,
|
||||||
|
time INTEGER,
|
||||||
|
side INTEGER NOT NULL,
|
||||||
|
world_unlock BOOLEAN NOT NULL,
|
||||||
|
remote_download BOOLEAN,
|
||||||
|
bg TEXT NOT NULL,
|
||||||
|
date INTEGER NOT NULL,
|
||||||
|
version TEXT NOT NULL,
|
||||||
|
difficulty INTEGER NOT NULL,
|
||||||
|
rating INTEGER NOT NULL,
|
||||||
|
note INTEGER NOT NULL,
|
||||||
|
chart_designer TEXT,
|
||||||
|
jacket_designer TEXT,
|
||||||
|
jacket_override BOOLEAN NOT NULL,
|
||||||
|
audio_override BOOLEAN NOT NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY (song_id, rating_class)
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS aliases (
|
||||||
|
song_id TEXT NOT NULL,
|
||||||
|
alias TEXT NOT NULL
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS packages (
|
||||||
|
package_id TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS scores (
|
||||||
|
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
|
song_id TEXT NOT NULL,
|
||||||
|
rating_class INTEGER NOT NULL,
|
||||||
|
score INTEGER NOT NULL,
|
||||||
|
pure INTEGER NOT NULL,
|
||||||
|
far INTEGER NOT NULL,
|
||||||
|
lost INTEGER NOT NULL,
|
||||||
|
time INTEGER NOT NULL,
|
||||||
|
max_recall INTEGER,
|
||||||
|
|
||||||
|
FOREIGN KEY (song_id, rating_class) REFERENCES charts(song_id, rating_class) ON UPDATE CASCADE ON DELETE NO ACTION
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS properties (
|
||||||
|
key TEXT NOT NULL UNIQUE,
|
||||||
|
value TEXT NOT NULL
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
CREATE VIEW IF NOT EXISTS caculated AS
|
||||||
|
SELECT
|
||||||
|
scores.song_id,
|
||||||
|
scores.rating_class,
|
||||||
|
scores.score,
|
||||||
|
scores.pure,
|
||||||
|
scores.far,
|
||||||
|
scores.lost,
|
||||||
|
scores.time,
|
||||||
|
charts.rating,
|
||||||
|
charts.note,
|
||||||
|
CAST ( ROUND( score - ( pure * 10000000 / note ) - ( far * 0.5 * 10000000 / note ) ) AS INTEGER ) AS pure_small,
|
||||||
|
CASE
|
||||||
|
WHEN score >= 10000000 THEN
|
||||||
|
rating / 10.0 + 2
|
||||||
|
WHEN score >= 9800000 THEN
|
||||||
|
rating / 10.0 + 1 + ( score - 9800000 ) / 200000.0 ELSE MAX( rating / 10.0, 0 ) + ( score - 9500000 ) / 300000.0
|
||||||
|
END AS potential
|
||||||
|
FROM
|
||||||
|
scores
|
||||||
|
LEFT JOIN charts ON scores.rating_class = charts.rating_class
|
||||||
|
AND scores.song_id = charts.song_id
|
||||||
|
GROUP BY
|
||||||
|
scores.song_id,
|
||||||
|
scores.rating_class
|
||||||
|
""",
|
||||||
|
]
|
||||||
|
|
||||||
|
for sql in create_sqls:
|
||||||
|
conn.execute(sql)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def __get_columns_from_dataclass(self, dataclass) -> List[str]:
|
||||||
|
if is_dataclass(dataclass):
|
||||||
|
dc_fields = fields(dataclass)
|
||||||
|
return [field.name for field in dc_fields]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def __get_columns_clause(self, columns: List[str]):
|
||||||
|
return ", ".join([f'"{column}"' for column in columns])
|
||||||
|
|
||||||
|
def get_packages(self):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbPackageRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbPackageRow(*row)
|
||||||
|
for row in conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM packages"
|
||||||
|
).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_aliases(self):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbAliasRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbAliasRow(*row)
|
||||||
|
for row in conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM aliases"
|
||||||
|
).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_aliases_by_song_id(self, song_id: str):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbAliasRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbAliasRow(*row)
|
||||||
|
for row in (
|
||||||
|
conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM aliases WHERE song_id = ?",
|
||||||
|
(song_id,),
|
||||||
|
).fetchall()
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_charts(self):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbChartRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbChartRow(*row)
|
||||||
|
for row in conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM charts"
|
||||||
|
).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_charts_by_song_id(self, song_id: str):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbChartRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbChartRow(*row)
|
||||||
|
for row in (
|
||||||
|
conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM charts WHERE song_id = ?",
|
||||||
|
(song_id,),
|
||||||
|
).fetchall()
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_charts_by_package_id(self, package_id: str):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns_clause = self.__get_columns_clause(
|
||||||
|
self.__get_columns_from_dataclass(DbChartRow)
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
DbChartRow(*row)
|
||||||
|
for row in conn.execute(
|
||||||
|
f"SELECT {columns_clause} FROM charts WHERE package_id = ?",
|
||||||
|
(package_id,),
|
||||||
|
).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_scores(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
song_id: Optional[List[str]] = None,
|
||||||
|
rating_class: Optional[List[int]] = None,
|
||||||
|
):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns = ",".join([f"[{field.name}]" for field in fields(DbScoreRow)])
|
||||||
|
where_clauses = []
|
||||||
|
params = []
|
||||||
|
if song_id:
|
||||||
|
where_clauses.append(f"song_id IN ({','.join('?'*len(song_id))})")
|
||||||
|
params.extend(song_id)
|
||||||
|
if rating_class:
|
||||||
|
where_clauses.append(
|
||||||
|
f"rating_class IN ({','.join('?'*len(rating_class))})"
|
||||||
|
)
|
||||||
|
params.extend(rating_class)
|
||||||
|
final_sql = f"SELECT {columns} FROM scores"
|
||||||
|
if where_clauses:
|
||||||
|
final_sql += " WHERE "
|
||||||
|
final_sql += " AND ".join(where_clauses)
|
||||||
|
return [
|
||||||
|
DbScoreRow(*row) for row in conn.execute(final_sql, params).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_caculated(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
song_id: Optional[List[str]] = None,
|
||||||
|
rating_class: Optional[List[int]] = None,
|
||||||
|
):
|
||||||
|
with self.conn as conn:
|
||||||
|
columns = ",".join([f"[{field.name}]" for field in fields(DbCaculatedRow)])
|
||||||
|
where_clauses = []
|
||||||
|
params = []
|
||||||
|
if song_id:
|
||||||
|
where_clauses.append(f"song_id IN ({','.join('?'*len(song_id))})")
|
||||||
|
params.extend(song_id)
|
||||||
|
if rating_class:
|
||||||
|
where_clauses.append(
|
||||||
|
f"rating_class IN ({','.join('?'*len(rating_class))})"
|
||||||
|
)
|
||||||
|
params.extend(rating_class)
|
||||||
|
final_sql = f"SELECT {columns} FROM caculated"
|
||||||
|
if where_clauses:
|
||||||
|
final_sql += " WHERE "
|
||||||
|
final_sql += " AND ".join(where_clauses)
|
||||||
|
return [
|
||||||
|
DbCaculatedRow(*row)
|
||||||
|
for row in conn.execute(final_sql, params).fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_b30(self) -> float:
|
||||||
|
with self.conn as conn:
|
||||||
|
return conn.execute(
|
||||||
|
"""
|
||||||
|
WITH max_potential AS (
|
||||||
|
SELECT
|
||||||
|
song_id,
|
||||||
|
rating_class,
|
||||||
|
MAX(potential) AS max_potential
|
||||||
|
FROM
|
||||||
|
caculated
|
||||||
|
GROUP BY
|
||||||
|
song_id,
|
||||||
|
rating_class
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
SUM(potential) / (COUNT(potential) * 1.0) AS b30
|
||||||
|
FROM
|
||||||
|
(
|
||||||
|
SELECT
|
||||||
|
c.*
|
||||||
|
FROM
|
||||||
|
caculated c
|
||||||
|
JOIN max_potential m ON c.song_id = m.song_id AND c.rating_class = m.rating_class AND c.potential = m.max_potential
|
||||||
|
ORDER BY
|
||||||
|
potential DESC
|
||||||
|
LIMIT 30
|
||||||
|
) AS top_30
|
||||||
|
|
||||||
|
"""
|
||||||
|
).fetchone()[0]
|
||||||
|
|
||||||
|
def insert_score(self, score: DbScoreRow):
|
||||||
|
columns = self.__get_columns_from_dataclass(DbScoreRow)
|
||||||
|
columns_clause = self.__get_columns_clause(columns)
|
||||||
|
params = [score.__getattribute__(column) for column in columns]
|
||||||
|
with self.conn as conn:
|
||||||
|
conn.execute(
|
||||||
|
f"INSERT INTO scores({columns_clause}) VALUES ({', '.join('?' * len(params))})",
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
conn.commit()
|
157
src/arcaea_offline/models.py
Normal file
157
src/arcaea_offline/models.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
from dataclasses import dataclass, asdict
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DbChartRow:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
name_en: str
|
||||||
|
name_jp: Optional[str]
|
||||||
|
artist: str
|
||||||
|
bpm: str
|
||||||
|
bpm_base: float
|
||||||
|
package_id: str
|
||||||
|
time: Optional[int]
|
||||||
|
side: int
|
||||||
|
world_unlock: bool
|
||||||
|
remote_download: Optional[bool]
|
||||||
|
bg: str
|
||||||
|
date: int
|
||||||
|
version: str
|
||||||
|
difficulty: int
|
||||||
|
rating: int
|
||||||
|
note: int
|
||||||
|
chart_designer: Optional[str]
|
||||||
|
jacket_designer: Optional[str]
|
||||||
|
jacket_override: bool
|
||||||
|
audio_override: bool
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Chart:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
name_en: str
|
||||||
|
name_jp: Optional[str]
|
||||||
|
artist: str
|
||||||
|
bpm: str
|
||||||
|
bpm_base: float
|
||||||
|
package_id: str
|
||||||
|
time: Optional[int]
|
||||||
|
side: int
|
||||||
|
world_unlock: bool
|
||||||
|
remote_download: Optional[bool]
|
||||||
|
bg: str
|
||||||
|
date: int
|
||||||
|
version: str
|
||||||
|
difficulty: int
|
||||||
|
rating: int
|
||||||
|
note: int
|
||||||
|
chart_designer: Optional[str]
|
||||||
|
jacket_designer: Optional[str]
|
||||||
|
jacket_override: bool
|
||||||
|
audio_override: bool
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db_row(cls, row: DbChartRow):
|
||||||
|
return cls(**asdict(row))
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DbAliasRow:
|
||||||
|
song_id: str
|
||||||
|
alias: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Alias:
|
||||||
|
song_id: str
|
||||||
|
alias: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db_row(cls, row: DbAliasRow):
|
||||||
|
return cls(song_id=row.song_id, alias=row.alias)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DbPackageRow:
|
||||||
|
package_id: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Package:
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db_row(cls, row: DbPackageRow):
|
||||||
|
return cls(id=row.package_id, name=row.name)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DbScoreRow:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
score: int
|
||||||
|
pure: int
|
||||||
|
far: int
|
||||||
|
lost: int
|
||||||
|
time: int
|
||||||
|
max_recall: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Score:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
score: int
|
||||||
|
pure: int
|
||||||
|
far: int
|
||||||
|
lost: int
|
||||||
|
time: int
|
||||||
|
max_recall: Optional[int] = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db_row(cls, row: DbScoreRow):
|
||||||
|
return cls(**asdict(row))
|
||||||
|
|
||||||
|
def to_db_row(self):
|
||||||
|
keys = list(self.__dataclass_fields__)
|
||||||
|
values = [self.__getattribute__(key) for key in keys]
|
||||||
|
return DbChartRow(*values)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DbCaculatedRow:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
score: int
|
||||||
|
pure: int
|
||||||
|
far: int
|
||||||
|
lost: int
|
||||||
|
time: int
|
||||||
|
rating: int
|
||||||
|
note: int
|
||||||
|
pure_small: int
|
||||||
|
potential: float
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Caculated:
|
||||||
|
song_id: str
|
||||||
|
rating_class: int
|
||||||
|
score: int
|
||||||
|
pure: int
|
||||||
|
far: int
|
||||||
|
lost: int
|
||||||
|
time: int
|
||||||
|
rating: int
|
||||||
|
note: int
|
||||||
|
pure_small: int
|
||||||
|
potential: float
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_db_row(cls, row: DbCaculatedRow):
|
||||||
|
return cls(**asdict(row))
|
12
src/arcaea_offline/utils/singleton.py
Normal file
12
src/arcaea_offline/utils/singleton.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from typing import TypeVar, Generic
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class Singleton(type, Generic[T]):
|
||||||
|
_instance = None
|
||||||
|
|
||||||
|
def __call__(cls, *args, **kwargs) -> T:
|
||||||
|
if cls._instance is None:
|
||||||
|
cls._instance = super().__call__(*args, **kwargs)
|
||||||
|
return cls._instance
|
Loading…
x
Reference in New Issue
Block a user