mirror of
https://github.com/283375/arcaea-offline.git
synced 2025-07-01 20:26:27 +00:00
Compare commits
4 Commits
bb39a5912b
...
master
Author | SHA1 | Date | |
---|---|---|---|
908613306f
|
|||
38e0d7f8d1
|
|||
937bbe2eee
|
|||
e7398be07e
|
40
.github/workflows/main.yml
vendored
Normal file
40
.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
name: test & lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: 'pip install .[dev]'
|
||||
- name: Run tests
|
||||
run: 'pytest -v'
|
||||
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: 'pip install .[dev]'
|
||||
- name: Run linter
|
||||
run: 'ruff check'
|
23
.github/workflows/test.yml
vendored
23
.github/workflows/test.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Run tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- run: 'pip install -r requirements.dev.txt .'
|
||||
- run: 'pytest -v'
|
@ -4,11 +4,10 @@ repos:
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- id: ruff
|
||||
args: ["--fix"]
|
||||
- id: ruff-format
|
||||
|
@ -10,9 +10,14 @@ English | [简体中文](./README.zh_Hans.md)
|
||||
|
||||
## WIP
|
||||
|
||||
> **Warning**
|
||||
> [!CAUTION]
|
||||
> This project is under active development, thus it is unstable and API may change frequently.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> v0.3.0 is under development, check out [this branch](https://github.com/283375/arcaea-offline/tree/0.3.0-refactor)!
|
||||
>
|
||||
> Once v0.3.0 is ready for release, this repository will be transferred to *[ArcaeaOffline](https://github.com/ArcaeaOffline)/core-python*
|
||||
|
||||
## What is this?
|
||||
|
||||
This is the core library of `Arcaea Offline`, designed to manage player scores, calculate their potential, and provide various useful tools.
|
||||
|
@ -8,9 +8,14 @@
|
||||
|
||||
## WIP
|
||||
|
||||
> **Warning**
|
||||
> [!CAUTION]
|
||||
> 该项目正处于早期开发阶段,不能保证稳定性,且 API 可能随时变动。
|
||||
|
||||
> [!IMPORTANT]
|
||||
> v0.3.0 正在[此分支](https://github.com/283375/arcaea-offline/tree/0.3.0-refactor)下开发!
|
||||
>
|
||||
> 在 v0.3.0 准备好发布后,此存储库将被迁移至 *[ArcaeaOffline](https://github.com/ArcaeaOffline)/core-python*。
|
||||
|
||||
## 这是什么?
|
||||
|
||||
这是 `Arcaea Offline` 的核心依赖库,用于维护分数数据库、计算潜力值,并提供一些实用工具。
|
||||
|
@ -10,32 +10,44 @@ description = "Manage your local Arcaea score database."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
dependencies = [
|
||||
"beautifulsoup4==4.12.2",
|
||||
"SQLAlchemy==2.0.20",
|
||||
"SQLAlchemy-Utils==0.41.1",
|
||||
"Whoosh==2.7.4",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Programming Language :: Python :: 3",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["ruff~=0.4", "pre-commit~=3.3", "pytest~=7.4", "tox~=4.11"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/283375/arcaea-offline"
|
||||
"Bug Tracker" = "https://github.com/283375/arcaea-offline/issues"
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
src_paths = ["src/arcaea_offline"]
|
||||
|
||||
[tool.pyright]
|
||||
ignore = ["build/"]
|
||||
|
||||
[tool.pylint.main]
|
||||
jobs = 0
|
||||
|
||||
[tool.pylint.logging]
|
||||
disable = [
|
||||
"missing-module-docstring",
|
||||
"missing-class-docstring",
|
||||
"missing-function-docstring",
|
||||
"not-callable", # false positive to sqlalchemy `func.*`, remove this when pylint-dev/pylint(#8138) closed
|
||||
[tool.ruff.lint]
|
||||
# Full list: https://docs.astral.sh/ruff/rules
|
||||
select = [
|
||||
"E", # pycodestyle (Error)
|
||||
"W", # pycodestyle (Warning)
|
||||
"F", # pyflakes
|
||||
"I", # isort
|
||||
"PL", # pylint
|
||||
"N", # pep8-naming
|
||||
"FBT", # flake8-boolean-trap
|
||||
"A", # flake8-builtins
|
||||
"DTZ", # flake8-datetimez
|
||||
"LOG", # flake8-logging
|
||||
"Q", # flake8-quotes
|
||||
"G", # flake8-logging-format
|
||||
"PIE", # flake8-pie
|
||||
"PT", # flake8-pytest-style
|
||||
]
|
||||
ignore = [
|
||||
"E501", # line-too-long
|
||||
]
|
||||
|
@ -1,6 +1,4 @@
|
||||
black==23.3.0
|
||||
isort==5.12.0
|
||||
pre-commit==3.3.1
|
||||
pylint==3.0.2
|
||||
pytest==7.4.3
|
||||
tox==4.11.3
|
||||
ruff~=0.4
|
||||
pre-commit~=3.3
|
||||
pytest~=7.4
|
||||
tox~=4.11
|
||||
|
@ -1,2 +1,4 @@
|
||||
beautifulsoup4==4.12.2
|
||||
SQLAlchemy==2.0.20
|
||||
SQLAlchemy-Utils==0.41.1
|
||||
Whoosh==2.7.4
|
||||
|
@ -1,3 +0,0 @@
|
||||
from .clear_type import ArcaeaScoreClearType
|
||||
from .modifier import ArcaeaScoreModifier
|
||||
from .rating_class import ArcaeaRatingClass
|
@ -1,10 +0,0 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class ArcaeaScoreClearType(IntEnum):
|
||||
TRACK_LOST = 0
|
||||
NORMAL_CLEAR = 1
|
||||
FULL_RECALL = 2
|
||||
PURE_MEMORY = 3
|
||||
HARD_CLEAR = 4
|
||||
EASY_CLEAR = 5
|
@ -1,7 +0,0 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class ArcaeaScoreModifier(IntEnum):
|
||||
NORMAL = 0
|
||||
EASY = 1
|
||||
HARD = 2
|
@ -1,9 +0,0 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class ArcaeaRatingClass(IntEnum):
|
||||
PAST = 0
|
||||
PRESENT = 1
|
||||
FUTURE = 2
|
||||
BEYOND = 3
|
||||
ETERNAL = 4
|
@ -5,16 +5,10 @@ from typing import Iterable, List, Optional, Type, Union
|
||||
from sqlalchemy import Engine, func, inspect, select
|
||||
from sqlalchemy.orm import DeclarativeBase, InstrumentedAttribute, sessionmaker
|
||||
|
||||
from arcaea_offline.external.arcsong.arcsong_json import ArcSongJsonBuilder
|
||||
from arcaea_offline.external.exports import (
|
||||
ArcaeaOfflineDEFV2_Score,
|
||||
ScoreExport,
|
||||
exporters,
|
||||
)
|
||||
from arcaea_offline.singleton import Singleton
|
||||
|
||||
from .models.v4.config import ConfigBase, Property
|
||||
from .models.v4.scores import (
|
||||
from .external.arcsong.arcsong_json import ArcSongJsonBuilder
|
||||
from .external.exports import ArcaeaOfflineDEFV2_Score, ScoreExport, exporters
|
||||
from .models.config import ConfigBase, Property
|
||||
from .models.scores import (
|
||||
CalculatedPotential,
|
||||
Score,
|
||||
ScoreBest,
|
||||
@ -22,7 +16,7 @@ from .models.v4.scores import (
|
||||
ScoresBase,
|
||||
ScoresViewBase,
|
||||
)
|
||||
from .models.v4.songs import (
|
||||
from .models.songs import (
|
||||
Chart,
|
||||
ChartInfo,
|
||||
Difficulty,
|
||||
@ -34,6 +28,7 @@ from .models.v4.songs import (
|
||||
SongsBase,
|
||||
SongsViewBase,
|
||||
)
|
||||
from .singleton import Singleton
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -1 +0,0 @@
|
||||
from .db import Database
|
111
src/arcaea_offline/searcher.py
Normal file
111
src/arcaea_offline/searcher.py
Normal file
@ -0,0 +1,111 @@
|
||||
from typing import List, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from whoosh.analysis import NgramFilter, StandardAnalyzer
|
||||
from whoosh.fields import ID, KEYWORD, TEXT, Schema
|
||||
from whoosh.filedb.filestore import RamStorage
|
||||
from whoosh.qparser import FuzzyTermPlugin, MultifieldParser, OrGroup
|
||||
|
||||
from .models.songs import Song, SongLocalized
|
||||
from .utils.search_title import recover_search_title
|
||||
|
||||
|
||||
class Searcher:
|
||||
def __init__(self):
|
||||
self.text_analyzer = StandardAnalyzer() | NgramFilter(minsize=2, maxsize=5)
|
||||
self.song_schema = Schema(
|
||||
song_id=ID(stored=True, unique=True),
|
||||
title=TEXT(analyzer=self.text_analyzer, spelling=True),
|
||||
artist=TEXT(analyzer=self.text_analyzer, spelling=True),
|
||||
source=TEXT(analyzer=self.text_analyzer, spelling=True),
|
||||
keywords=KEYWORD(lowercase=True, stored=True, scorable=True),
|
||||
)
|
||||
self.storage = RamStorage()
|
||||
self.index = self.storage.create_index(self.song_schema)
|
||||
|
||||
self.default_query_parser = MultifieldParser(
|
||||
["song_id", "title", "artist", "source", "keywords"],
|
||||
self.song_schema,
|
||||
group=OrGroup,
|
||||
)
|
||||
self.default_query_parser.add_plugin(FuzzyTermPlugin())
|
||||
|
||||
def import_songs(self, session: Session):
|
||||
writer = self.index.writer()
|
||||
songs = list(session.scalars(select(Song)))
|
||||
song_localize_stmt = select(SongLocalized)
|
||||
for song in songs:
|
||||
stmt = song_localize_stmt.where(SongLocalized.id == song.id)
|
||||
sl = session.scalar(stmt)
|
||||
song_id = song.id
|
||||
possible_titles: List[Union[str, None]] = [song.title]
|
||||
possible_artists: List[Union[str, None]] = [song.artist]
|
||||
possible_sources: List[Union[str, None]] = [song.source]
|
||||
if sl:
|
||||
possible_titles.extend(
|
||||
[sl.title_ja, sl.title_ko, sl.title_zh_hans, sl.title_zh_hant]
|
||||
)
|
||||
possible_titles.extend(
|
||||
recover_search_title(sl.search_title_ja)
|
||||
+ recover_search_title(sl.search_title_ko)
|
||||
+ recover_search_title(sl.search_title_zh_hans)
|
||||
+ recover_search_title(sl.search_title_zh_hant)
|
||||
)
|
||||
possible_artists.extend(
|
||||
recover_search_title(sl.search_artist_ja)
|
||||
+ recover_search_title(sl.search_artist_ko)
|
||||
+ recover_search_title(sl.search_artist_zh_hans)
|
||||
+ recover_search_title(sl.search_artist_zh_hant)
|
||||
)
|
||||
possible_sources.extend(
|
||||
[
|
||||
sl.source_ja,
|
||||
sl.source_ko,
|
||||
sl.source_zh_hans,
|
||||
sl.source_zh_hant,
|
||||
]
|
||||
)
|
||||
|
||||
# remove empty items in list
|
||||
titles = [t for t in possible_titles if t != "" and t is not None]
|
||||
artists = [t for t in possible_artists if t != "" and t is not None]
|
||||
sources = [t for t in possible_sources if t != "" and t is not None]
|
||||
|
||||
writer.update_document(
|
||||
song_id=song_id,
|
||||
title=" ".join(titles),
|
||||
artist=" ".join(artists),
|
||||
source=" ".join(sources),
|
||||
keywords=" ".join([song_id] + titles + artists + sources),
|
||||
)
|
||||
|
||||
writer.commit()
|
||||
|
||||
def did_you_mean(self, string: str):
|
||||
results = set()
|
||||
|
||||
with self.index.searcher() as searcher:
|
||||
corrector_keywords = searcher.corrector("keywords") # type: ignore
|
||||
corrector_song_id = searcher.corrector("song_id") # type: ignore
|
||||
corrector_title = searcher.corrector("title") # type: ignore
|
||||
corrector_artist = searcher.corrector("artist") # type: ignore
|
||||
corrector_source = searcher.corrector("source") # type: ignore
|
||||
|
||||
results.update(corrector_keywords.suggest(string))
|
||||
results.update(corrector_song_id.suggest(string))
|
||||
results.update(corrector_title.suggest(string))
|
||||
results.update(corrector_artist.suggest(string))
|
||||
results.update(corrector_source.suggest(string))
|
||||
|
||||
if string in results:
|
||||
results.remove(string)
|
||||
|
||||
return list(results)
|
||||
|
||||
def search(self, string: str, *, limit: int = 10):
|
||||
query_string = f"{string}"
|
||||
query = self.default_query_parser.parse(query_string)
|
||||
with self.index.searcher() as searcher:
|
||||
results = searcher.search(query, limit=limit)
|
||||
return [result.get("song_id") for result in results]
|
Reference in New Issue
Block a user