From 769ed3558817138f9037ef1107157ff1eeef4d93 Mon Sep 17 00:00:00 2001 From: 283375 Date: Sun, 16 Jul 2023 14:24:27 +0800 Subject: [PATCH] wip: split different versions of device --- src/arcaea_offline_ocr/crop.py | 79 ++++++++++--------- src/arcaea_offline_ocr/device/__init__.py | 0 src/arcaea_offline_ocr/device/v1/crop.py | 64 +++++++++++++++ .../device/v1/definition.py | 37 +++++++++ src/arcaea_offline_ocr/device/v2/__init__.py | 0 src/arcaea_offline_ocr/recognize.py | 5 +- src/arcaea_offline_ocr/types.py | 9 +++ 7 files changed, 155 insertions(+), 39 deletions(-) create mode 100644 src/arcaea_offline_ocr/device/__init__.py create mode 100644 src/arcaea_offline_ocr/device/v1/crop.py create mode 100644 src/arcaea_offline_ocr/device/v1/definition.py create mode 100644 src/arcaea_offline_ocr/device/v2/__init__.py diff --git a/src/arcaea_offline_ocr/crop.py b/src/arcaea_offline_ocr/crop.py index 95b95c9..1ab7fa3 100644 --- a/src/arcaea_offline_ocr/crop.py +++ b/src/arcaea_offline_ocr/crop.py @@ -1,53 +1,56 @@ -from typing import Any, Tuple +from math import floor +from typing import Tuple + +from numpy import all, array, count_nonzero -from .device import Device from .types import Mat -__all__ = [ - "crop_img", - "crop_from_device_attr", - "crop_to_pure", - "crop_to_far", - "crop_to_lost", - "crop_to_max_recall", - "crop_to_rating_class", - "crop_to_score", - "crop_to_title", -] +__all__ = ["crop_xywh", "crop_black_edges"] -def crop_img(img: Mat, *, top: int, left: int, bottom: int, right: int): - return img[top:bottom, left:right] - - -def crop_from_device_attr(img: Mat, rect: Tuple[int, int, int, int]): +def crop_xywh(mat: Mat, rect: Tuple[int, int, int, int]): x, y, w, h = rect - return crop_img(img, top=y, left=x, bottom=y + h, right=x + w) + return mat[y : y + h, x : x + w] -def crop_to_pure(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.pure) +def is_black_edge(list_of_pixels: Mat, black_pixel=None): + if black_pixel is None: + black_pixel = array([0, 0, 0], list_of_pixels.dtype) + pixels = list_of_pixels.reshape([-1, 3]) + return count_nonzero(all(pixels < black_pixel, axis=1)) > floor(len(pixels) * 0.6) -def crop_to_far(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.far) +def crop_black_edges(screenshot: Mat): + cropped = screenshot.copy() + black_pixel = array([50, 50, 50], screenshot.dtype) + height, width = screenshot.shape[:2] + left = 0 + right = width + top = 0 + bottom = height + for i in range(width): + column = cropped[:, i] + if not is_black_edge(column, black_pixel): + break + left += 1 -def crop_to_lost(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.lost) + for i in sorted(range(width), reverse=True): + column = cropped[:, width - i] + if i <= left + 1 or not is_black_edge(column, black_pixel): + break + right -= 1 + for i in range(height): + row = cropped[i] + if not is_black_edge(row, black_pixel): + break + top += 1 -def crop_to_max_recall(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.max_recall) + for i in sorted(range(height), reverse=True): + row = cropped[height - i] + if i <= top + 1 or not is_black_edge(row, black_pixel): + break + bottom -= 1 - -def crop_to_rating_class(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.rating_class) - - -def crop_to_score(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.score) - - -def crop_to_title(screenshot: Mat, device: Device): - return crop_from_device_attr(screenshot, device.title) + return cropped[top:bottom, left:right] diff --git a/src/arcaea_offline_ocr/device/__init__.py b/src/arcaea_offline_ocr/device/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/arcaea_offline_ocr/device/v1/crop.py b/src/arcaea_offline_ocr/device/v1/crop.py new file mode 100644 index 0000000..5977d15 --- /dev/null +++ b/src/arcaea_offline_ocr/device/v1/crop.py @@ -0,0 +1,64 @@ +from math import floor +from typing import Any, Tuple + +from numpy import all, array, count_nonzero + +from ...types import Mat +from .definition import Device + +__all__ = [ + "crop_img", + "crop_from_device_attr", + "crop_to_pure", + "crop_to_far", + "crop_to_lost", + "crop_to_max_recall", + "crop_to_rating_class", + "crop_to_score", + "crop_to_title", + "crop_black_edges", +] + + +def crop_img(img: Mat, *, top: int, left: int, bottom: int, right: int): + return img[top:bottom, left:right] + + +def crop_from_device_attr(img: Mat, rect: Tuple[int, int, int, int]): + x, y, w, h = rect + return crop_img(img, top=y, left=x, bottom=y + h, right=x + w) + + +def crop_to_pure(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.pure) + + +def crop_to_far(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.far) + + +def crop_to_lost(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.lost) + + +def crop_to_max_recall(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.max_recall) + + +def crop_to_rating_class(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.rating_class) + + +def crop_to_score(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.score) + + +def crop_to_title(screenshot: Mat, device: Device): + return crop_from_device_attr(screenshot, device.title) + + +def is_black_edge(list_of_pixels: Mat, black_pixel=None): + if black_pixel is None: + black_pixel = array([0, 0, 0], list_of_pixels.dtype) + pixels = list_of_pixels.reshape([-1, 3]) + return count_nonzero(all(pixels < black_pixel, axis=1)) > floor(len(pixels) * 0.6) diff --git a/src/arcaea_offline_ocr/device/v1/definition.py b/src/arcaea_offline_ocr/device/v1/definition.py new file mode 100644 index 0000000..7f12de1 --- /dev/null +++ b/src/arcaea_offline_ocr/device/v1/definition.py @@ -0,0 +1,37 @@ +from dataclasses import dataclass +from typing import Any, Dict, Tuple + +__all__ = ["Device"] + + +@dataclass(kw_only=True) +class Device: + version: int + uuid: str + name: str + pure: Tuple[int, int, int, int] + far: Tuple[int, int, int, int] + lost: Tuple[int, int, int, int] + max_recall: Tuple[int, int, int, int] + rating_class: Tuple[int, int, int, int] + score: Tuple[int, int, int, int] + title: Tuple[int, int, int, int] + + @classmethod + def from_json_object(cls, json_dict: Dict[str, Any]): + if json_dict["version"] == 1: + return cls( + version=1, + uuid=json_dict["uuid"], + name=json_dict["name"], + pure=json_dict["pure"], + far=json_dict["far"], + lost=json_dict["lost"], + max_recall=json_dict["max_recall"], + rating_class=json_dict["rating_class"], + score=json_dict["score"], + title=json_dict["title"], + ) + + def repr_info(self): + return f"Device(version={self.version}, uuid={repr(self.uuid)}, name={repr(self.name)})" diff --git a/src/arcaea_offline_ocr/device/v2/__init__.py b/src/arcaea_offline_ocr/device/v2/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/arcaea_offline_ocr/recognize.py b/src/arcaea_offline_ocr/recognize.py index 20a3e8a..8c666e3 100644 --- a/src/arcaea_offline_ocr/recognize.py +++ b/src/arcaea_offline_ocr/recognize.py @@ -4,12 +4,15 @@ from typing import Callable, Optional from cv2 import COLOR_BGR2HSV, GaussianBlur, cvtColor, imread from .crop import * -from .device import Device + +# from .device import Device from .mask import * from .ocr import * from .types import Mat from .utils import imread_unicode +Device = None + __all__ = [ "process_digits_ocr_img", "process_tesseract_ocr_img", diff --git a/src/arcaea_offline_ocr/types.py b/src/arcaea_offline_ocr/types.py index 68ff8c4..bac409b 100644 --- a/src/arcaea_offline_ocr/types.py +++ b/src/arcaea_offline_ocr/types.py @@ -1,4 +1,13 @@ +from typing import NamedTuple + import numpy as np # from pylance Mat = np.ndarray[int, np.dtype[np.generic]] + + +class XYWHRect(NamedTuple): + x: int + y: int + w: int + h: int