mirror of
https://github.com/283375/arcaea-offline-ocr.git
synced 2025-04-19 05:20:17 +00:00
wip: split different versions of device
This commit is contained in:
parent
d9f18a3ad8
commit
769ed35588
@ -1,53 +1,56 @@
|
|||||||
from typing import Any, Tuple
|
from math import floor
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
from numpy import all, array, count_nonzero
|
||||||
|
|
||||||
from .device import Device
|
|
||||||
from .types import Mat
|
from .types import Mat
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["crop_xywh", "crop_black_edges"]
|
||||||
"crop_img",
|
|
||||||
"crop_from_device_attr",
|
|
||||||
"crop_to_pure",
|
|
||||||
"crop_to_far",
|
|
||||||
"crop_to_lost",
|
|
||||||
"crop_to_max_recall",
|
|
||||||
"crop_to_rating_class",
|
|
||||||
"crop_to_score",
|
|
||||||
"crop_to_title",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def crop_img(img: Mat, *, top: int, left: int, bottom: int, right: int):
|
def crop_xywh(mat: Mat, rect: Tuple[int, int, int, int]):
|
||||||
return img[top:bottom, left:right]
|
|
||||||
|
|
||||||
|
|
||||||
def crop_from_device_attr(img: Mat, rect: Tuple[int, int, int, int]):
|
|
||||||
x, y, w, h = rect
|
x, y, w, h = rect
|
||||||
return crop_img(img, top=y, left=x, bottom=y + h, right=x + w)
|
return mat[y : y + h, x : x + w]
|
||||||
|
|
||||||
|
|
||||||
def crop_to_pure(screenshot: Mat, device: Device):
|
def is_black_edge(list_of_pixels: Mat, black_pixel=None):
|
||||||
return crop_from_device_attr(screenshot, device.pure)
|
if black_pixel is None:
|
||||||
|
black_pixel = array([0, 0, 0], list_of_pixels.dtype)
|
||||||
|
pixels = list_of_pixels.reshape([-1, 3])
|
||||||
|
return count_nonzero(all(pixels < black_pixel, axis=1)) > floor(len(pixels) * 0.6)
|
||||||
|
|
||||||
|
|
||||||
def crop_to_far(screenshot: Mat, device: Device):
|
def crop_black_edges(screenshot: Mat):
|
||||||
return crop_from_device_attr(screenshot, device.far)
|
cropped = screenshot.copy()
|
||||||
|
black_pixel = array([50, 50, 50], screenshot.dtype)
|
||||||
|
height, width = screenshot.shape[:2]
|
||||||
|
left = 0
|
||||||
|
right = width
|
||||||
|
top = 0
|
||||||
|
bottom = height
|
||||||
|
|
||||||
|
for i in range(width):
|
||||||
|
column = cropped[:, i]
|
||||||
|
if not is_black_edge(column, black_pixel):
|
||||||
|
break
|
||||||
|
left += 1
|
||||||
|
|
||||||
def crop_to_lost(screenshot: Mat, device: Device):
|
for i in sorted(range(width), reverse=True):
|
||||||
return crop_from_device_attr(screenshot, device.lost)
|
column = cropped[:, width - i]
|
||||||
|
if i <= left + 1 or not is_black_edge(column, black_pixel):
|
||||||
|
break
|
||||||
|
right -= 1
|
||||||
|
|
||||||
|
for i in range(height):
|
||||||
|
row = cropped[i]
|
||||||
|
if not is_black_edge(row, black_pixel):
|
||||||
|
break
|
||||||
|
top += 1
|
||||||
|
|
||||||
def crop_to_max_recall(screenshot: Mat, device: Device):
|
for i in sorted(range(height), reverse=True):
|
||||||
return crop_from_device_attr(screenshot, device.max_recall)
|
row = cropped[height - i]
|
||||||
|
if i <= top + 1 or not is_black_edge(row, black_pixel):
|
||||||
|
break
|
||||||
|
bottom -= 1
|
||||||
|
|
||||||
|
return cropped[top:bottom, left:right]
|
||||||
def crop_to_rating_class(screenshot: Mat, device: Device):
|
|
||||||
return crop_from_device_attr(screenshot, device.rating_class)
|
|
||||||
|
|
||||||
|
|
||||||
def crop_to_score(screenshot: Mat, device: Device):
|
|
||||||
return crop_from_device_attr(screenshot, device.score)
|
|
||||||
|
|
||||||
|
|
||||||
def crop_to_title(screenshot: Mat, device: Device):
|
|
||||||
return crop_from_device_attr(screenshot, device.title)
|
|
||||||
|
0
src/arcaea_offline_ocr/device/__init__.py
Normal file
0
src/arcaea_offline_ocr/device/__init__.py
Normal file
64
src/arcaea_offline_ocr/device/v1/crop.py
Normal file
64
src/arcaea_offline_ocr/device/v1/crop.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
from math import floor
|
||||||
|
from typing import Any, Tuple
|
||||||
|
|
||||||
|
from numpy import all, array, count_nonzero
|
||||||
|
|
||||||
|
from ...types import Mat
|
||||||
|
from .definition import Device
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"crop_img",
|
||||||
|
"crop_from_device_attr",
|
||||||
|
"crop_to_pure",
|
||||||
|
"crop_to_far",
|
||||||
|
"crop_to_lost",
|
||||||
|
"crop_to_max_recall",
|
||||||
|
"crop_to_rating_class",
|
||||||
|
"crop_to_score",
|
||||||
|
"crop_to_title",
|
||||||
|
"crop_black_edges",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def crop_img(img: Mat, *, top: int, left: int, bottom: int, right: int):
|
||||||
|
return img[top:bottom, left:right]
|
||||||
|
|
||||||
|
|
||||||
|
def crop_from_device_attr(img: Mat, rect: Tuple[int, int, int, int]):
|
||||||
|
x, y, w, h = rect
|
||||||
|
return crop_img(img, top=y, left=x, bottom=y + h, right=x + w)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_pure(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.pure)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_far(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.far)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_lost(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.lost)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_max_recall(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.max_recall)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_rating_class(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.rating_class)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_score(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.score)
|
||||||
|
|
||||||
|
|
||||||
|
def crop_to_title(screenshot: Mat, device: Device):
|
||||||
|
return crop_from_device_attr(screenshot, device.title)
|
||||||
|
|
||||||
|
|
||||||
|
def is_black_edge(list_of_pixels: Mat, black_pixel=None):
|
||||||
|
if black_pixel is None:
|
||||||
|
black_pixel = array([0, 0, 0], list_of_pixels.dtype)
|
||||||
|
pixels = list_of_pixels.reshape([-1, 3])
|
||||||
|
return count_nonzero(all(pixels < black_pixel, axis=1)) > floor(len(pixels) * 0.6)
|
37
src/arcaea_offline_ocr/device/v1/definition.py
Normal file
37
src/arcaea_offline_ocr/device/v1/definition.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict, Tuple
|
||||||
|
|
||||||
|
__all__ = ["Device"]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True)
|
||||||
|
class Device:
|
||||||
|
version: int
|
||||||
|
uuid: str
|
||||||
|
name: str
|
||||||
|
pure: Tuple[int, int, int, int]
|
||||||
|
far: Tuple[int, int, int, int]
|
||||||
|
lost: Tuple[int, int, int, int]
|
||||||
|
max_recall: Tuple[int, int, int, int]
|
||||||
|
rating_class: Tuple[int, int, int, int]
|
||||||
|
score: Tuple[int, int, int, int]
|
||||||
|
title: Tuple[int, int, int, int]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json_object(cls, json_dict: Dict[str, Any]):
|
||||||
|
if json_dict["version"] == 1:
|
||||||
|
return cls(
|
||||||
|
version=1,
|
||||||
|
uuid=json_dict["uuid"],
|
||||||
|
name=json_dict["name"],
|
||||||
|
pure=json_dict["pure"],
|
||||||
|
far=json_dict["far"],
|
||||||
|
lost=json_dict["lost"],
|
||||||
|
max_recall=json_dict["max_recall"],
|
||||||
|
rating_class=json_dict["rating_class"],
|
||||||
|
score=json_dict["score"],
|
||||||
|
title=json_dict["title"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def repr_info(self):
|
||||||
|
return f"Device(version={self.version}, uuid={repr(self.uuid)}, name={repr(self.name)})"
|
0
src/arcaea_offline_ocr/device/v2/__init__.py
Normal file
0
src/arcaea_offline_ocr/device/v2/__init__.py
Normal file
@ -4,12 +4,15 @@ from typing import Callable, Optional
|
|||||||
from cv2 import COLOR_BGR2HSV, GaussianBlur, cvtColor, imread
|
from cv2 import COLOR_BGR2HSV, GaussianBlur, cvtColor, imread
|
||||||
|
|
||||||
from .crop import *
|
from .crop import *
|
||||||
from .device import Device
|
|
||||||
|
# from .device import Device
|
||||||
from .mask import *
|
from .mask import *
|
||||||
from .ocr import *
|
from .ocr import *
|
||||||
from .types import Mat
|
from .types import Mat
|
||||||
from .utils import imread_unicode
|
from .utils import imread_unicode
|
||||||
|
|
||||||
|
Device = None
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"process_digits_ocr_img",
|
"process_digits_ocr_img",
|
||||||
"process_tesseract_ocr_img",
|
"process_tesseract_ocr_img",
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
|
from typing import NamedTuple
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
# from pylance
|
# from pylance
|
||||||
Mat = np.ndarray[int, np.dtype[np.generic]]
|
Mat = np.ndarray[int, np.dtype[np.generic]]
|
||||||
|
|
||||||
|
|
||||||
|
class XYWHRect(NamedTuple):
|
||||||
|
x: int
|
||||||
|
y: int
|
||||||
|
w: int
|
||||||
|
h: int
|
||||||
|
Loading…
x
Reference in New Issue
Block a user