From 67ae3009dcc3132538c8c624804bb352e12e6c0d Mon Sep 17 00:00:00 2001 From: ssttkkl Date: Sun, 26 Nov 2023 21:33:18 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E5=8D=95=E6=B5=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/pr-check.yml | 35 +- nonebot_plugin_nagabus/data/mjs.py | 49 ++- nonebot_plugin_nagabus/data/naga.py | 234 ++++++------ nonebot_plugin_nagabus/data/session.py | 28 -- nonebot_plugin_nagabus/data/utils/session.py | 25 ++ nonebot_plugin_nagabus/naga/fake_api.py | 9 +- nonebot_plugin_nagabus/naga/service.py | 360 +++++++++--------- poetry.lock | 211 +++++++++- pyproject.toml | 5 + .../mjs => tests}/__init__.py | 0 tests/conftest.py | 40 ++ tests/sample_majsoul_paipu.json | 1 + tests/test_naga_service.py | 42 ++ tests/utils/__init__.py | 0 tests/utils/ob11_event.py | 53 +++ 15 files changed, 717 insertions(+), 375 deletions(-) delete mode 100644 nonebot_plugin_nagabus/data/session.py create mode 100644 nonebot_plugin_nagabus/data/utils/session.py rename {nonebot_plugin_nagabus/mjs => tests}/__init__.py (100%) create mode 100644 tests/conftest.py create mode 100644 tests/sample_majsoul_paipu.json create mode 100644 tests/test_naga_service.py create mode 100644 tests/utils/__init__.py create mode 100644 tests/utils/ob11_event.py diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml index 6ca3d81..a18f4d5 100644 --- a/.github/workflows/pr-check.yml +++ b/.github/workflows/pr-check.yml @@ -7,7 +7,7 @@ on: pull_request: jobs: - test-api-only: + test: strategy: fail-fast: false @@ -29,36 +29,3 @@ jobs: pip install -r requirements.txt - name: Test with pytest run: pytest - -# test-full: -# -# strategy: -# fail-fast: false -# matrix: -# os: [ macos-latest, windows-latest, ubuntu-latest ] -# python-version: ["3.9", "3.10", "3.11", "3.12"] -# -# runs-on: ${{ matrix.os }} -# steps: -# - name: Checkout nonebot-plugin-access-control -# uses: actions/checkout@v3 -# with: -# repository: 'bot-ssttkkl/nonebot-plugin-access-control' -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v3 -# with: -# python-version: ${{ matrix.python-version }} -# - name: Install requirements -# run: | -# pip install poetry -# poetry export --with=dev --without-hashes -o requirements.txt -# pip install -r requirements.txt -# - name: Checkout nonebot-plugin-access-control-api -# uses: actions/checkout@v3 -# with: -# path: 'nonebot-plugin-access-control-api' -# - name: Install nonebot-plugin-access-control-api -# run: | -# pip install ./nonebot-plugin-access-control-api -# - name: Test with pytest -# run: pytest ./src/tests diff --git a/nonebot_plugin_nagabus/data/mjs.py b/nonebot_plugin_nagabus/data/mjs.py index 8a1c865..036280c 100644 --- a/nonebot_plugin_nagabus/data/mjs.py +++ b/nonebot_plugin_nagabus/data/mjs.py @@ -1,4 +1,6 @@ import json +from collections.abc import Awaitable +from typing import Any, Callable, Optional from nonebot import logger from sqlalchemy import select @@ -7,7 +9,7 @@ from .base import SqlModel from ..data.utils import insert -from ..data.session import get_session +from .utils.session import _use_session class MajsoulPaipuOrm(SqlModel): @@ -18,26 +20,39 @@ class MajsoulPaipuOrm(SqlModel): content: Mapped[str] +# 为了方便单测时mock实现 +_get_majsoul_paipu_delegate: Optional[Callable[[str], Awaitable[Any]]] = None + + +def _set_get_majsoul_paipu_delegate(get_majsoul_paipu_delegate): + global _get_majsoul_paipu_delegate + _get_majsoul_paipu_delegate = get_majsoul_paipu_delegate + + async def get_majsoul_paipu(uuid: str): - sess = get_session() + if _get_majsoul_paipu_delegate is not None: + return await _get_majsoul_paipu_delegate(uuid) - stmt = select(MajsoulPaipuOrm).where(MajsoulPaipuOrm.paipu_uuid == uuid).limit(1) - res = (await sess.execute(stmt)).scalar_one_or_none() + async with _use_session() as sess: + stmt = ( + select(MajsoulPaipuOrm).where(MajsoulPaipuOrm.paipu_uuid == uuid).limit(1) + ) + res = (await sess.execute(stmt)).scalar_one_or_none() - if res is not None: - logger.opt(colors=True).info(f"Use cached majsoul paipu {uuid}") - return json.loads(res.content) + if res is not None: + logger.opt(colors=True).info(f"Use cached majsoul paipu {uuid}") + return json.loads(res.content) - logger.opt(colors=True).info(f"Downloading majsoul paipu {uuid} ...") - data = await download_paipu(uuid) + logger.opt(colors=True).info(f"Downloading majsoul paipu {uuid} ...") + data = await download_paipu(uuid) - stmt = ( - insert(MajsoulPaipuOrm) - .values(paipu_uuid=uuid, content=json.dumps(data)) - .on_conflict_do_nothing(index_elements=[MajsoulPaipuOrm.paipu_uuid]) - ) + stmt = ( + insert(MajsoulPaipuOrm) + .values(paipu_uuid=uuid, content=json.dumps(data)) + .on_conflict_do_nothing(index_elements=[MajsoulPaipuOrm.paipu_uuid]) + ) - await sess.execute(stmt) - await sess.commit() + await sess.execute(stmt) + await sess.commit() - return data + return data diff --git a/nonebot_plugin_nagabus/data/naga.py b/nonebot_plugin_nagabus/data/naga.py index 3987e90..8540cca 100644 --- a/nonebot_plugin_nagabus/data/naga.py +++ b/nonebot_plugin_nagabus/data/naga.py @@ -9,7 +9,7 @@ from .base import SqlModel from .utils import UTCDateTime -from .session import get_session +from .utils.session import _use_session from ..naga.model import NagaReport, NagaGameRule, NagaOrderStatus @@ -55,48 +55,48 @@ class MajsoulOrderOrm(SqlModel): async def get_orders(t_begin: datetime, t_end: datetime) -> list[NagaOrderOrm]: - sess = get_session() - stmt = select(NagaOrderOrm).where( - NagaOrderOrm.create_time >= t_begin, - NagaOrderOrm.create_time < t_end, - NagaOrderOrm.status == NagaOrderStatus.ok, - ) - return list((await sess.execute(stmt)).scalars()) + async with _use_session() as sess: + stmt = select(NagaOrderOrm).where( + NagaOrderOrm.create_time >= t_begin, + NagaOrderOrm.create_time < t_end, + NagaOrderOrm.status == NagaOrderStatus.ok, + ) + return list((await sess.execute(stmt)).scalars()) async def get_local_majsoul_order( majsoul_uuid: str, kyoku: int, honba: int, model_type: str ) -> Optional[NagaOrderOrm]: - sess = get_session() - stmt = select(MajsoulOrderOrm).where( - MajsoulOrderOrm.paipu_uuid == majsoul_uuid, - MajsoulOrderOrm.kyoku == kyoku, - MajsoulOrderOrm.honba == honba, - MajsoulOrderOrm.model_type == model_type, - ) + async with _use_session() as sess: + stmt = select(MajsoulOrderOrm).where( + MajsoulOrderOrm.paipu_uuid == majsoul_uuid, + MajsoulOrderOrm.kyoku == kyoku, + MajsoulOrderOrm.honba == honba, + MajsoulOrderOrm.model_type == model_type, + ) - order_orm: Optional[MajsoulOrderOrm] = ( - await sess.execute(stmt) - ).scalar_one_or_none() - if order_orm is not None: - if ( - order_orm.order.status == NagaOrderStatus.ok - or datetime.now(tz=timezone.utc).timestamp() - - order_orm.order.update_time.timestamp() - < 90 - ): - return order_orm.order - else: # 超过90s仍未分析完成则删除重来 - logger.opt(colors=True).info( - f"Delete majsoul paipu {majsoul_uuid} " - f"(kyoku: {kyoku}, honba: {honba}) " - f"analyze order: {order_orm.naga_haihu_id}, " - f"because it takes over 90 seconds and still not done" - ) - await sess.delete(order_orm.order) - await sess.delete(order_orm) - await sess.commit() - return None + order_orm: Optional[MajsoulOrderOrm] = ( + await sess.execute(stmt) + ).scalar_one_or_none() + if order_orm is not None: + if ( + order_orm.order.status == NagaOrderStatus.ok + or datetime.now(tz=timezone.utc).timestamp() + - order_orm.order.update_time.timestamp() + < 90 + ): + return order_orm.order + else: # 超过90s仍未分析完成则删除重来 + logger.opt(colors=True).info( + f"Delete majsoul paipu {majsoul_uuid} " + f"(kyoku: {kyoku}, honba: {honba}) " + f"analyze order: {order_orm.naga_haihu_id}, " + f"because it takes over 90 seconds and still not done" + ) + await sess.delete(order_orm.order) + await sess.delete(order_orm) + await sess.commit() + return None async def new_local_majsoul_order( @@ -107,102 +107,104 @@ async def new_local_majsoul_order( honba: int, model_type: str, ): - sess = get_session() - order_orm = NagaOrderOrm( - haihu_id=haihu_id, - customer_id=customer_id, - cost_np=10, - source=NagaOrderSource.majsoul, - model_type=model_type, - status=NagaOrderStatus.analyzing, - create_time=datetime.now(tz=timezone.utc), - update_time=datetime.now(tz=timezone.utc), - ) + async with _use_session() as sess: + order_orm = NagaOrderOrm( + haihu_id=haihu_id, + customer_id=customer_id, + cost_np=10, + source=NagaOrderSource.majsoul, + model_type=model_type, + status=NagaOrderStatus.analyzing, + create_time=datetime.now(tz=timezone.utc), + update_time=datetime.now(tz=timezone.utc), + ) - majsoul_order_orm = MajsoulOrderOrm( - naga_haihu_id=haihu_id, - paipu_uuid=majsoul_uuid, - kyoku=kyoku, - honba=honba, - model_type=model_type, - order=order_orm, - ) + majsoul_order_orm = MajsoulOrderOrm( + naga_haihu_id=haihu_id, + paipu_uuid=majsoul_uuid, + kyoku=kyoku, + honba=honba, + model_type=model_type, + order=order_orm, + ) - sess.add(order_orm) - sess.add(majsoul_order_orm) - await sess.commit() + sess.add(order_orm) + sess.add(majsoul_order_orm) + await sess.commit() async def update_local_majsoul_order(haihu_id: str, report: NagaReport): - sess = get_session() - stmt = ( - update(NagaOrderOrm) - .where(NagaOrderOrm.haihu_id == haihu_id) - .values( - status=NagaOrderStatus.ok, - naga_report=json.dumps(report), - update_time=datetime.now(timezone.utc), + async with _use_session() as sess: + stmt = ( + update(NagaOrderOrm) + .where(NagaOrderOrm.haihu_id == haihu_id) + .values( + status=NagaOrderStatus.ok, + naga_report=json.dumps(report), + update_time=datetime.now(timezone.utc), + ) ) - ) - await sess.execute(stmt) - await sess.commit() + await sess.execute(stmt) + await sess.commit() async def get_local_order(haihu_id: str, model_type: str) -> Optional[NagaOrderOrm]: - sess = get_session() - stmt = select(NagaOrderOrm).where( - NagaOrderOrm.haihu_id == haihu_id, - NagaOrderOrm.model_type == model_type, - ) + async with _use_session() as sess: + stmt = select(NagaOrderOrm).where( + NagaOrderOrm.haihu_id == haihu_id, + NagaOrderOrm.model_type == model_type, + ) - order_orm: Optional[NagaOrderOrm] = (await sess.execute(stmt)).scalar_one_or_none() - if order_orm is not None: - if ( - order_orm.status == NagaOrderStatus.ok - or datetime.now(tz=timezone.utc).timestamp() - - order_orm.update_time.timestamp() - < 300 - ): - return order_orm - else: # 超过90s仍未分析完成则删除重来 - logger.opt(colors=True).info( - f"Delete tenhou paipu {haihu_id} analyze order " - f"because it takes over 90 seconds and still not done" - ) - await sess.delete(order_orm) - await sess.commit() - return None + order_orm: Optional[NagaOrderOrm] = ( + await sess.execute(stmt) + ).scalar_one_or_none() + if order_orm is not None: + if ( + order_orm.status == NagaOrderStatus.ok + or datetime.now(tz=timezone.utc).timestamp() + - order_orm.update_time.timestamp() + < 300 + ): + return order_orm + else: # 超过90s仍未分析完成则删除重来 + logger.opt(colors=True).info( + f"Delete tenhou paipu {haihu_id} analyze order " + f"because it takes over 90 seconds and still not done" + ) + await sess.delete(order_orm) + await sess.commit() + return None async def new_local_order( haihu_id: str, customer_id: int, rule: NagaGameRule, model_type: str ): - sess = get_session() - order_orm = NagaOrderOrm( - haihu_id=haihu_id, - customer_id=customer_id, - cost_np=50 if rule == NagaGameRule.hanchan else 30, - source=NagaOrderSource.tenhou, - model_type=model_type, - status=NagaOrderStatus.analyzing, - create_time=datetime.now(tz=timezone.utc), - update_time=datetime.now(tz=timezone.utc), - ) + async with _use_session() as sess: + order_orm = NagaOrderOrm( + haihu_id=haihu_id, + customer_id=customer_id, + cost_np=50 if rule == NagaGameRule.hanchan else 30, + source=NagaOrderSource.tenhou, + model_type=model_type, + status=NagaOrderStatus.analyzing, + create_time=datetime.now(tz=timezone.utc), + update_time=datetime.now(tz=timezone.utc), + ) - sess.add(order_orm) - await sess.commit() + sess.add(order_orm) + await sess.commit() async def update_local_order(haihu_id: str, report: NagaReport): - sess = get_session() - stmt = ( - update(NagaOrderOrm) - .where(NagaOrderOrm.haihu_id == haihu_id) - .values( - status=NagaOrderStatus.ok, - naga_report=json.dumps(report), - update_time=datetime.now(timezone.utc), + async with _use_session() as sess: + stmt = ( + update(NagaOrderOrm) + .where(NagaOrderOrm.haihu_id == haihu_id) + .values( + status=NagaOrderStatus.ok, + naga_report=json.dumps(report), + update_time=datetime.now(timezone.utc), + ) ) - ) - await sess.execute(stmt) - await sess.commit() + await sess.execute(stmt) + await sess.commit() diff --git a/nonebot_plugin_nagabus/data/session.py b/nonebot_plugin_nagabus/data/session.py deleted file mode 100644 index 7bc8824..0000000 --- a/nonebot_plugin_nagabus/data/session.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Optional - -from sqlalchemy.orm import sessionmaker -from nonebot.message import run_postprocessor -from nonebot.internal.matcher import current_matcher -from nonebot_plugin_datastore.db import get_engine, post_db_init -from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session - -_session: Optional[async_scoped_session] = None - - -@post_db_init -async def _(): - global _session - session_factory = sessionmaker( - get_engine(), expire_on_commit=False, class_=AsyncSession - ) - _session = async_scoped_session(session_factory, scopefunc=current_matcher.get) - - -@run_postprocessor -async def postprocessor(): - if _session is not None: - await _session.remove() - - -def get_session() -> AsyncSession: - return _session() diff --git a/nonebot_plugin_nagabus/data/utils/session.py b/nonebot_plugin_nagabus/data/utils/session.py new file mode 100644 index 0000000..27e7f26 --- /dev/null +++ b/nonebot_plugin_nagabus/data/utils/session.py @@ -0,0 +1,25 @@ +import contextvars +from contextlib import AbstractAsyncContextManager, asynccontextmanager + +from nonebot import logger +from sqlalchemy.ext.asyncio import AsyncSession +from nonebot_plugin_datastore.db import get_engine + +_nagabus_current_session = contextvars.ContextVar("_nagabus_current_session") + + +@asynccontextmanager +async def _use_session() -> AbstractAsyncContextManager[AsyncSession]: + try: + yield _nagabus_current_session.get() + except LookupError: + session = AsyncSession(get_engine()) + logger.trace("sqlalchemy session was created") + token = _nagabus_current_session.set(session) + + try: + yield session + finally: + await session.close() + logger.trace("sqlalchemy session was closed") + _nagabus_current_session.reset(token) diff --git a/nonebot_plugin_nagabus/naga/fake_api.py b/nonebot_plugin_nagabus/naga/fake_api.py index d5eb2a2..bebd376 100644 --- a/nonebot_plugin_nagabus/naga/fake_api.py +++ b/nonebot_plugin_nagabus/naga/fake_api.py @@ -18,6 +18,7 @@ NagaGameRule, NagaOrderStatus, NagaReportPlayer, + NagaTonpuuModelType, NagaHanchanModelType, ) @@ -36,7 +37,7 @@ async def close(self): async def set_cookies(self, cookies: Mapping[str, str]): logger.info( - f"naga_cookies set to {'; '.join((f'{kv[0]}={kv[1]}' for kv in cookies))}" + f"naga_cookies set to {'; '.join(f'{kv[0]}={kv[1]}' for kv in cookies)}" ) async def order_report_list(self, year: int, month: int) -> OrderReportList: @@ -51,7 +52,7 @@ async def _produce_order(self, order: NagaOrder): @logger.catch async def _produce_report(self, report: NagaReport): - await asyncio.sleep(20) + await asyncio.sleep(5) self.report.insert(0, report) logger.debug( @@ -64,7 +65,7 @@ async def analyze_custom( seat: int, rule: NagaGameRule, model_type: Union[ - Sequence[NagaHanchanModelType], Sequence[NagaHanchanModelType] + Sequence[NagaHanchanModelType], Sequence[NagaTonpuuModelType] ], ): time = ( @@ -99,7 +100,7 @@ async def analyze_tenhou( seat: int, rule: NagaGameRule, model_type: Union[ - Sequence[NagaHanchanModelType], Sequence[NagaHanchanModelType] + Sequence[NagaHanchanModelType], Sequence[NagaTonpuuModelType] ], ) -> AnalyzeTenhou: for o in self.order: diff --git a/nonebot_plugin_nagabus/naga/service.py b/nonebot_plugin_nagabus/naga/service.py index 55bc78d..6ee1b73 100644 --- a/nonebot_plugin_nagabus/naga/service.py +++ b/nonebot_plugin_nagabus/naga/service.py @@ -20,6 +20,7 @@ from .utils import model_type_to_str from ..data.mjs import get_majsoul_paipu from .api import NagaApi, OrderReportList +from ..data.utils.session import _use_session from ..data.naga_cookies import get_naga_cookies, set_naga_cookies from .errors import ( OrderError, @@ -252,128 +253,132 @@ async def analyze_majsoul( None, Sequence[NagaHanchanModelType], Sequence[NagaTonpuuModelType] ] = None, ) -> NagaServiceOrder: - try: - data = await get_majsoul_paipu(majsoul_uuid) - except MajsoulDownloadError as e: - logger.opt(colors=True).warning( - f"Failed to download paipu {majsoul_uuid}, code: {e.code}" - ) - if e.code == 1203: - raise InvalidGameError(f"invalid majsoul_uuid: {majsoul_uuid}") from e + async with _use_session(): + try: + data = await get_majsoul_paipu(majsoul_uuid) + except MajsoulDownloadError as e: + logger.opt(colors=True).warning( + f"Failed to download paipu {majsoul_uuid}, code: {e.code}" + ) + if e.code == 1203: + raise InvalidGameError( + f"invalid majsoul_uuid: {majsoul_uuid}" + ) from e + else: + raise e + + if len(data["name"]) != 4: + raise UnsupportedGameError("only yonma game is supported") + + if "東" in data["rule"]["disp"]: + rule = NagaGameRule.tonpuu else: - raise e - - if len(data["name"]) != 4: - raise UnsupportedGameError("only yonma game is supported") - - if "東" in data["rule"]["disp"]: - rule = NagaGameRule.tonpuu - else: - rule = NagaGameRule.hanchan - - log = None - for i, log in enumerate(data["log"]): - if log[0][0] == kyoku: - if honba == -1: - # 未指定本场 - if (i == 0 or data["log"][i - 1][0][0] != kyoku) and ( - i == len(data["log"]) - 1 or data["log"][i + 1][0][0] != kyoku - ): - # 该场次只存在一个本场 - honba = log[0][1] + rule = NagaGameRule.hanchan + + log = None + for i, log in enumerate(data["log"]): + if log[0][0] == kyoku: + if honba == -1: + # 未指定本场 + if (i == 0 or data["log"][i - 1][0][0] != kyoku) and ( + i == len(data["log"]) - 1 + or data["log"][i + 1][0][0] != kyoku + ): + # 该场次只存在一个本场 + honba = log[0][1] + log = log + break + elif log[0][1] == honba: log = log - break - elif log[0][1] == honba: - log = log - break + break - if log is None: - available_kyoku_honba = [(log[0][0], log[0][1]) for log in data["log"]] - raise InvalidKyokuHonbaError(available_kyoku_honba) + if log is None: + available_kyoku_honba = [(log[0][0], log[0][1]) for log in data["log"]] + raise InvalidKyokuHonbaError(available_kyoku_honba) - model_type = self._handle_model_type(rule, model_type) - model_type_str = model_type_to_str(model_type) + model_type = self._handle_model_type(rule, model_type) + model_type_str = model_type_to_str(model_type) - haihu_id = "" - new_order = False + haihu_id = "" + new_order = False - # 加锁防止重复下单 - local_order = await get_local_majsoul_order( - majsoul_uuid, kyoku, honba, model_type_str - ) - if local_order is None: - async with self._majsoul_order_mutex: - local_order = await get_local_majsoul_order( - majsoul_uuid, kyoku, honba, model_type_str - ) - if local_order is None: - # 不存在记录,安排解析 - logger.opt(colors=True).info( - f"Ordering majsoul paipu {majsoul_uuid} " - f"(kyoku: {kyoku}, honba: {honba}) analyze..." + # 加锁防止重复下单 + local_order = await get_local_majsoul_order( + majsoul_uuid, kyoku, honba, model_type_str + ) + if local_order is None: + async with self._majsoul_order_mutex: + local_order = await get_local_majsoul_order( + majsoul_uuid, kyoku, honba, model_type_str ) - - # data["log"] = log - data = { - "title": data["title"], - "name": data["name"], - "rule": data["rule"], - "log": [log], - } - - order = await self._order_custom([data], rule, model_type) - haihu_id = order.haihu_id - - new_order = True - - session_persist_id = await get_session_persist_id(session) - await new_local_majsoul_order( - haihu_id, - session_persist_id, - majsoul_uuid, - kyoku, - honba, - model_type_str, + if local_order is None: + # 不存在记录,安排解析 + logger.opt(colors=True).info( + f"Ordering majsoul paipu {majsoul_uuid} " + f"(kyoku: {kyoku}, honba: {honba}) analyze..." + ) + + # data["log"] = log + data = { + "title": data["title"], + "name": data["name"], + "rule": data["rule"], + "log": [log], + } + + order = await self._order_custom([data], rule, model_type) + haihu_id = order.haihu_id + + new_order = True + + session_persist_id = await get_session_persist_id(session) + await new_local_majsoul_order( + haihu_id, + session_persist_id, + majsoul_uuid, + kyoku, + honba, + model_type_str, + ) + + if local_order is not None: + # 存在记录 + if local_order.status == NagaOrderStatus.ok: + logger.opt(colors=True).info( + f"Found a existing majsoul paipu {majsoul_uuid} " + f"(kyoku: {kyoku}, honba: {honba}) " + f"analyze report: {local_order.haihu_id}" ) + report = NagaReport(*json.loads(local_order.naga_report)) + return NagaServiceOrder(report, 0) - if local_order is not None: - # 存在记录 - if local_order.status == NagaOrderStatus.ok: + haihu_id = local_order.naga_haihu_id logger.opt(colors=True).info( - f"Found a existing majsoul paipu {majsoul_uuid} " + f"Found a processing majsoul paipu {majsoul_uuid} " f"(kyoku: {kyoku}, honba: {honba}) " - f"analyze report: {local_order.haihu_id}" + f"analyze order: {haihu_id}" ) - report = NagaReport(*json.loads(local_order.naga_report)) - return NagaServiceOrder(report, 0) - haihu_id = local_order.naga_haihu_id + assert haihu_id != "" + logger.opt(colors=True).info( - f"Found a processing majsoul paipu {majsoul_uuid} " + f"Waiting for majsoul paipu {majsoul_uuid} " f"(kyoku: {kyoku}, honba: {honba}) " - f"analyze order: {haihu_id}" + f"analyze report: {haihu_id} ..." ) + report = await self._get_report(haihu_id) - assert haihu_id != "" - - logger.opt(colors=True).info( - f"Waiting for majsoul paipu {majsoul_uuid} " - f"(kyoku: {kyoku}, honba: {honba}) " - f"analyze report: {haihu_id} ..." - ) - report = await self._get_report(haihu_id) - - if new_order: - # 需要更新之前创建的NagaOrderOrm - logger.opt(colors=True).debug( - f"Updating majsoul paipu {majsoul_uuid} " - f"(kyoku: {kyoku}, honba: {honba}) " - f"analyze report: {haihu_id}..." - ) - await update_local_majsoul_order(haihu_id, report) - return NagaServiceOrder(report, 10) - else: - return NagaServiceOrder(report, 0) + if new_order: + # 需要更新之前创建的NagaOrderOrm + logger.opt(colors=True).debug( + f"Updating majsoul paipu {majsoul_uuid} " + f"(kyoku: {kyoku}, honba: {honba}) " + f"analyze report: {haihu_id}..." + ) + await update_local_majsoul_order(haihu_id, report) + return NagaServiceOrder(report, 10) + else: + return NagaServiceOrder(report, 0) async def _order_tenhou( self, @@ -399,94 +404,99 @@ async def analyze_tenhou( None, Sequence[NagaHanchanModelType], Sequence[NagaTonpuuModelType] ] = None, ) -> NagaServiceOrder: - if not self._tenhou_haihu_id_reg.match(haihu_id): - raise InvalidGameError(f"invalid haihu_id: {haihu_id}") + async with _use_session(): + if not self._tenhou_haihu_id_reg.match(haihu_id): + raise InvalidGameError(f"invalid haihu_id: {haihu_id}") + + haihu_element = haihu_id.split("-") + if len(haihu_element) != 4: + raise InvalidGameError(f"invalid haihu_id: {haihu_id}") + + haihu_rule = int(haihu_element[1], 16) + is_yonma = not bool(haihu_rule & 16) + is_hanchan = bool(haihu_rule & 8) + is_kuitan = not bool(haihu_rule & 4) + is_online = bool(haihu_rule & 1) + + if is_yonma and is_kuitan and is_online: + rule = NagaGameRule.hanchan if is_hanchan else NagaGameRule.tonpuu + else: + raise UnsupportedGameError("only online kuitan yonma game is supported") - haihu_element = haihu_id.split("-") - if len(haihu_element) != 4: - raise InvalidGameError(f"invalid haihu_id: {haihu_id}") + model_type = self._handle_model_type(rule, model_type) + model_type_str = model_type_to_str(model_type) - haihu_rule = int(haihu_element[1], 16) - is_yonma = not bool(haihu_rule & 16) - is_hanchan = bool(haihu_rule & 8) - is_kuitan = not bool(haihu_rule & 4) - is_online = bool(haihu_rule & 1) + new_order = False - if is_yonma and is_kuitan and is_online: - rule = NagaGameRule.hanchan if is_hanchan else NagaGameRule.tonpuu - else: - raise UnsupportedGameError("only online kuitan yonma game is supported") + # 加锁防止重复下单 + local_order = await get_local_order(haihu_id, model_type_str) + if local_order is None: + async with self._tenhou_order_mutex: + local_order = await get_local_order(haihu_id, model_type_str) + if local_order is None: + # 不存在记录,安排解析 + logger.opt(colors=True).info( + f"Ordering tenhou paipu {haihu_id} analyze..." + ) - model_type = self._handle_model_type(rule, model_type) - model_type_str = model_type_to_str(model_type) + await self._order_tenhou(haihu_id, seat, rule, model_type) - new_order = False + new_order = True - # 加锁防止重复下单 - local_order = await get_local_order(haihu_id, model_type_str) - if local_order is None: - async with self._tenhou_order_mutex: - local_order = await get_local_order(haihu_id, model_type_str) - if local_order is None: - # 不存在记录,安排解析 - logger.opt(colors=True).info( - f"Ordering tenhou paipu {haihu_id} analyze..." - ) - - await self._order_tenhou(haihu_id, seat, rule, model_type) - - new_order = True + session_persist_id = await get_session_persist_id(session) + await new_local_order( + haihu_id, session_persist_id, rule, model_type_str + ) - session_persist_id = await get_session_persist_id(session) - await new_local_order( - haihu_id, session_persist_id, rule, model_type_str + if local_order is not None: + # 存在记录 + if local_order.status == NagaOrderStatus.ok: + logger.opt(colors=True).info( + f"Found a existing tenhou paipu {haihu_id}) " + "analyze report" ) + report = NagaReport(*json.loads(local_order.naga_report)) + return NagaServiceOrder(report, 0) - if local_order is not None: - # 存在记录 - if local_order.status == NagaOrderStatus.ok: logger.opt(colors=True).info( - f"Found a existing tenhou paipu {haihu_id}) " - "analyze report" + f"Found a processing tenhou paipu {haihu_id}) " + "analyze order" ) - report = NagaReport(*json.loads(local_order.naga_report)) - return NagaServiceOrder(report, 0) logger.opt(colors=True).info( - f"Found a processing tenhou paipu {haihu_id}) " "analyze order" + f"Waiting for tenhou paipu {haihu_id}) " f"analyze report..." ) + report = await self._get_report(haihu_id) - logger.opt(colors=True).info( - f"Waiting for tenhou paipu {haihu_id}) " f"analyze report..." - ) - report = await self._get_report(haihu_id) - - if new_order: - # 需要更新之前创建的NagaOrderOrm - logger.opt(colors=True).debug( - f"Updating tenhou paipu {haihu_id}) " "analyze report..." - ) - await update_local_order(haihu_id, report) + if new_order: + # 需要更新之前创建的NagaOrderOrm + logger.opt(colors=True).debug( + f"Updating tenhou paipu {haihu_id}) " "analyze report..." + ) + await update_local_order(haihu_id, report) - return NagaServiceOrder(report, 50) - else: - return NagaServiceOrder(report, 0) + return NagaServiceOrder(report, 50) + else: + return NagaServiceOrder(report, 0) async def statistic(self, year: int, month: int) -> list[NagaServiceUserStatistic]: - t_begin = datetime(year, month, 1) - t_end = datetime(year, month, 1) + monthdelta(months=1) - orders = await get_orders(t_begin, t_end) - - statistic = {} - - for order in orders: - if order.customer_id not in statistic: - statistic[order.customer_id] = 0 - statistic[order.customer_id] += order.cost_np - - statistic = [NagaServiceUserStatistic(x[0], x[1]) for x in statistic.items()] - statistic.sort(key=lambda x: x.cost_np, reverse=True) - return statistic + async with _use_session(): + t_begin = datetime(year, month, 1) + t_end = datetime(year, month, 1) + monthdelta(months=1) + orders = await get_orders(t_begin, t_end) + + statistic = {} + + for order in orders: + if order.customer_id not in statistic: + statistic[order.customer_id] = 0 + statistic[order.customer_id] += order.cost_np + + statistic = [ + NagaServiceUserStatistic(x[0], x[1]) for x in statistic.items() + ] + statistic.sort(key=lambda x: x.cost_np, reverse=True) + return statistic async def get_rest_np(self) -> int: return await self.api.get_rest_np() diff --git a/poetry.lock b/poetry.lock index ff8d076..165df60 100644 --- a/poetry.lock +++ b/poetry.lock @@ -190,6 +190,39 @@ types-python-dateutil = ">=2.8.10" doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "async-asgi-testclient" +version = "1.4.11" +description = "Async client for testing ASGI web applications" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "async-asgi-testclient-1.4.11.tar.gz", hash = "sha256:4449ac85d512d661998ec61f91c9ae01851639611d748d81ae7f816736551792"}, +] + +[package.dependencies] +multidict = ">=4.0,<7.0" +requests = ">=2.21,<3.0" + [[package]] name = "async-timeout" version = "4.0.3" @@ -574,6 +607,74 @@ pyyaml = ">=5.3.1" requests = ">=2.23.0" rich = "*" +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "cycler" version = "0.12.1" @@ -1090,6 +1191,18 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "isort" version = "5.12.0" @@ -1903,6 +2016,25 @@ httpx = ["httpx[http2] (>=0.20.0,<1.0.0)"] quart = ["Quart (>=0.18.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"] websockets = ["websockets (>=10.0)"] +[[package]] +name = "nonebug" +version = "0.3.5" +description = "nonebot2 test framework" +category = "dev" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nonebug-0.3.5-py3-none-any.whl", hash = "sha256:588831b08b3ea42d058874214bedae646e2ab8c1ec4ae1540ff789873107a8fa"}, + {file = "nonebug-0.3.5.tar.gz", hash = "sha256:4d4bf9448cd1cbfaaabaab73dbe4ac8757e86dd92a41ef79cdece8dd61e724e2"}, +] + +[package.dependencies] +asgiref = ">=3.4.0,<4.0.0" +async-asgi-testclient = ">=1.4.8,<2.0.0" +nonebot2 = ">=2.0.0-rc.2,<3.0.0" +pytest = ">=7.0.0,<8.0.0" +typing-extensions = ">=4.0.0,<5.0.0" + [[package]] name = "noneprompt" version = "0.1.9" @@ -2072,6 +2204,22 @@ files = [ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "pre-commit" version = "3.5.0" @@ -2236,6 +2384,67 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -3126,4 +3335,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "b8a71186dbc8a617bfe1ad009ac8ad0c9b2ea6616f718568e42886ed384ca55b" +content-hash = "2cc1b45e3745ae67e03c30bcc4de9e99db916c8b11a26b03d3ce31e2b4e6dab1" diff --git a/pyproject.toml b/pyproject.toml index 9f29f9c..c639028 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,11 @@ nonebot-plugin-escape-url = "^0.1.0" nb-cli = "^1.2.5" nonebot-plugin-orm = {extras = ["default"], version = "^0.5.0"} +nonebug = "^0.3.5" +pytest = "^7.4.3" +pytest-asyncio = "^0.21.1" +pytest-cov = "^4.1.0" + [tool.black] line-length = 88 target-version = ["py39", "py310", "py311", "py312"] diff --git a/nonebot_plugin_nagabus/mjs/__init__.py b/tests/__init__.py similarity index 100% rename from nonebot_plugin_nagabus/mjs/__init__.py rename to tests/__init__.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b019c91 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,40 @@ +import pytest +import pytest_asyncio +from nonebug import NONEBOT_INIT_KWARGS + + +def pytest_configure(config: pytest.Config) -> None: + config.stash[NONEBOT_INIT_KWARGS] = { + "log_level": "DEBUG", + "datastore_database_url": "sqlite+aiosqlite:///:memory:", + "sqlalchemy_database_url": "sqlite+aiosqlite:///:memory:", + "alembic_startup_check": False, + "naga_fake_api": True, + } + + +@pytest.fixture(scope="session", autouse=True) +def _prepare_nonebot(): + import nonebot + from nonebot.adapters.onebot.v11 import Adapter + + driver = nonebot.get_driver() + driver.register_adapter(Adapter) + + nonebot.require("nonebot_plugin_nagabus") + + +_orm_inited = False + + +@pytest_asyncio.fixture(autouse=True) +async def _init_dep_plugins(_prepare_nonebot): + from nonebot_plugin_orm import init_orm + from nonebot_plugin_datastore.db import init_db + + global _orm_inited + + if not _orm_inited: + await init_orm() + await init_db() + _orm_inited = True diff --git a/tests/sample_majsoul_paipu.json b/tests/sample_majsoul_paipu.json new file mode 100644 index 0000000..a3f16e2 --- /dev/null +++ b/tests/sample_majsoul_paipu.json @@ -0,0 +1 @@ +{"ver": "2.3", "ref": "230808-2e2c24ee-b480-4789-b689-470aba0ef2e4", "ratingc": "PF4", "rule": {"disp": "玉の間南喰", "aka53": 1, "aka52": 1, "aka51": 1}, "lobby": 0, "dan": ["雀聖★2", "雀豪★2", "雀豪★3", "雀豪★2"], "rate": [4151, 2653, 2930, 2032], "sx": ["C", "C", "C", "C"], "name": ["有毒蚊子", "ムチムチの天使", "INFINI", "无铭金重"], "sc": [13300, -16.7, 57900, 47.9, 29600, 9.6, -800, -40.8], "title": ["玉の間南喰", "2023-08-08 21:47:33"], "log": [[[0, 0, 0], [25000, 25000, 25000, 25000], [39], [32], [42, 11, 11, 47, 42, 22, 14, 41, 12, 28, 16, 33, 43], [34, 52, 31, 44, 15, 18, 36, 36], [43, 47, 41, 60, 28, 22, 60, 11], [27, 19, 39, 33, 43, 43, 18, 14, 42, 12, 34, 46, 43], [13, 15, 23, 17, 36, 28, 28, 33], [39, 46, 42, 23, 60, 12, "r27", 60], [46, 29, 23, 29, 16, 17, 31, 25, 21, 38, 18, 19, 13], [11, 24, 29, 21, 12, 37, 34, 27], [46, 31, 38, 11, 60, 13, 37, 34], [18, 13, 11, 38, 44, 26, 31, 31, 25, 12, 25, 16, 32], [47, 35, 45, 27, 26, 14, 17, 35], [60, 44, 60, 38, 35, 32, "r11", 60], ["和了", [0, 4600, 0, -2600], [1, 3, 1, "40符2飜2600点", "立直(1飜)", "裏ドラ(1飜)"]]], [[1, 0, 0], [25000, 28600, 25000, 21400], [45, 27], [47, 32], [19, 38, 16, 43, 18, 19, 31, 35, 47, 29, 38, 11, 12], [14, 47, 35, 52, 42, 35, 11, 23, "c131214", 25, 53, 34, "c171618", 44, 46, 23], [29, 31, 43, 11, 60, 47, 60, 47, 19, 19, "533535a35", 23, 34, 60, 60, 60], [24, 43, 21, 18, 14, 42, 36, 33, 14, 44, 27, 38, 34], [16, 33, 26, 45, "3333p33", 27, 18, 23, 11, 39, 31, 29, 25, 24, 47, 39, 15], [42, 21, 44, 24, 43, 60, 45, 60, 60, 60, 60, 60, 34, 60, 60, 60, 14], [28, 17, 29, 15, 39, 37, 11, 26, 38, 16, 22, 24, 31], [33, 23, 28, 27, 24, 46, 32, 41, 13, 42, 22, 17, 43, 26, 22, 44, 28], [11, 31, 29, "r33", 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60], [17, 37, 45, 12, 37, 41, 22, 28, 41, 32, 46, 51, 41], [16, 13, 31, 18, 14, 45, 19, 13, 36, 33, 37, 17, 39, 21, 43], [45, 32, 60, 41, 46, 41, 45, 41, 13, 60, 19, 60, 17, 22, 39], ["和了", [-3000, -6000, 13000, -3000], [2, 2, 2, "跳満3000-6000点", "門前清自摸和(1飜)", "平和(1飜)", "立直(1飜)", "ドラ(3飜)", "裏ドラ(0飜)"]]], [[2, 0, 0], [22000, 22600, 37000, 18400], [37], [], [43, 41, 44, 24, 45, 17, 43, 32, 28, 44, 21, 15, 41], ["p434343", "4141p41", 35, "44p4444", 19, 35, 39, 51, 33, 28], [32, 21, 60, 28, 24, 60, 60, 45, 60, 60], [45, 13, 12, 26, 41, 47, 14, 35, 45, 22, 31, 33, 24], [18, 34, 34, 25, 22, 33, 13, 37, "45p4545", 38, 36], [41, 47, 18, 31, 34, 60, 34, 35, 33, 13], [25, 17, 18, 28, 32, 21, 23, 47, 12, 34, 16, 23, 21], [38, 41, 44, 11, 14, 38, 12, 28, 22, 18], [47, 12, 60, 28, 34, 11, 32, 60, 21, 21], [26, 33, 17, 43, 45, 24, 25, 47, 37, 39, 46, 36, 38], [29, 11, 23, 42, 42, 46, 16, 27, 11], [43, 47, 29, 11, 33, 39, 45, 23, 24], ["和了", [-500, 2000, -1000, -500], [1, 1, 1, "30符2飜500-1000点", "役牌 白(1飜)", "ドラ(1飜)"]]], [[3, 0, 0], [21500, 24600, 36000, 17900], [39], [], [23, 11, 42, 16, 37, 34, 38, 14, 39, 33, 37, 32, 35], [44, 29, 18, 26, 32, 15, 34, 21, 31, 21], [60, 60, 42, 11, 23, 18, 26, 60, 32, 60], [42, 13, 13, 15, 41, 36, 28, 26, 19, 12, 25, 17, 15], [44, 34, 19, 38, 47, 22, 14, 43, 27], [42, 44, 28, 19, 60, 60, 19, 60, 17], [29, 28, 38, 52, 24, 42, 31, 18, 19, 12, 23, 46, 22], [27, 25, 41, 26, 14, 33, 36, 22, 18], [46, 42, 60, 12, 38, 14, 60, 18, 60], [13, 28, 22, 17, 36, 46, 11, 16, 46, 33, 46, 45, 33], [17, 43, 23, 18, "c121113", 29, 16, 44, "c181617", 12], [45, 60, 28, 36, 18, 60, 17, 60, 16, 60], ["和了", [-1500, 0, 0, 1500], [3, 0, 3, "30符1飜1500点", "役牌 發(1飜)"]]], [[3, 1, 0], [20000, 24600, 36000, 19400], [29, 42], [], [25, 31, 22, 15, 38, 31, 27, 26, 16, 37, 29, 45, 38], [34, 29, 43, 27, 21, 44, 16, 33, 47, 36, 31, 38, 36, 28], [45, 38, 60, 34, 29, 60, 60, 31, 60, 31, 60, 29, 15, 60], [31, 28, 26, 41, 22, 15, 23, 17, 41, 36, 18, 11, 37], [37, 23, 45, "41p4141", 26, 12, 44, "c161718", 43, 21, 41, 11, 17, 39, 34, 16], [31, 11, 15, 23, 28, 60, 60, 37, 60, 45, "41k414141", 60, 60, 60, 60, 60], [28, 42, 45, 35, 13, 26, 34, 43, 42, 15, 32, 17, 11], [39, 32, 13, 47, 47, 24, 34, 39, 45, 25, 24, 39, 42, 13, 11], [60, 43, 45, 60, 60, 11, 60, 60, 60, 28, 60, 60, 15, "r17", 60], [18, 21, 25, 44, 35, 23, 29, 19, 51, 41, 32, 36, 32], [33, 21, 23, 18, 46, 18, 24, 12, 14, 47, 52, 44, 28, 12, 46, 38], [44, 33, 29, 41, 60, 19, 51, 60, 60, 60, 25, 60, 60, 60, 60, 60], ["和了", [0, 3900, 0, -2900], [1, 3, 1, "40符2飜2600点", "場風 東(1飜)", "ドラ(1飜)"]]], [[4, 0, 0], [20000, 28500, 35000, 16500], [12], [35], [17, 47, 18, 36, 17, 16, 26, 39, 13, 13, 15, 26, 34], [31, 23, 24, 11, 17, "c222324", 43, 26, 53, 39, 38, 29, 41, 41], [39, 31, 47, 60, 36, 34, 60, 18, 60, 60, 60, 60, 60, 60], [11, 42, 46, 14, 28, 46, 22, 23, 19, 21, 16, 24, 15], [22, 27, 29, 28, 46, 47, 44, 35, 44, 28, 32, 32, 31, 21], [19, 11, 42, 28, "r22", 60, 60, 60, 60, 60, 60, 60, 60], [41, 31, 12, 37, 38, 27, 25, 22, 36, 45, 18, 41, 32], [36, 18, 14, 47, 13, 17, 45, 43, 25, 35, 16, 29, 23], [45, 22, 32, 31, 47, 41, 41, 18, 43, 45, 35, 36, 36], [34, 26, 43, 18, 12, 32, 29, 22, 39, 47, 39, 34, 33], [42, 38, 33, 38, 19, 33, 14, 34, 12, 16, 45, 13, 45], [43, 29, 47, 42, 22, 19, 34, 18, 34, 34, 32, 45, 60], ["和了", [-2600, 6200, -1300, -1300], [1, 1, 1, "40符3飜1300-2600点", "門前清自摸和(1飜)", "役牌 發(1飜)", "立直(1飜)", "裏ドラ(0飜)"]]], [[5, 0, 0], [17400, 33700, 33700, 15200], [23], [13], [46, 45, 19, 44, 29, 24, 38, 15, 25, 16, 22, 37, 28], [19, 14, 46, 37, 29, 43, 12, 32, 41, 39, 24], [22, 44, 45, 60, 46, 46, 43, 28, 12, 29, 29], [19, 21, 11, 36, 11, 21, 18, 34, 31, 26, 28, 11, 36], [17, 46, 33, 33, 27, 36, 45, 29, 39, 42, 12, 53], [31, 60, 28, 26, 60, "r33", 60, 60, 60, 60, 60], [18, 45, 42, 47, 42, 22, 34, 28, 29, 16, 34, 51, 46], [43, 36, 21, 12, 16, 23, 38, 35, 15, 39, 45], [60, 18, 45, 60, 60, 28, 29, 46, 47, 60, 60], [39, 47, 17, 32, 47, 44, 23, 43, 13, 22, 12, 26, 33], [16, 24, 13, 37, 31, 42, 22, 52, "p474747", 14, 31], [44, 39, 26, 60, 43, 60, 60, 22, 12, 60, 60], ["和了", [-2600, 8800, -2600, -2600], [1, 1, 1, "40符3飜2600点∀", "門前清自摸和(1飜)", "立直(1飜)", "赤ドラ(1飜)", "裏ドラ(0飜)"]]], [[5, 1, 0], [14800, 41500, 31100, 12600], [43], [34], [18, 12, 41, 46, 11, 18, 32, 16, 28, 12, 44, 41, 33], ["p414141", 29, 36, 39, "18p1818", 31, 31, 42, 21, 35, 36], [28, 60, 60, 60, 33, 60, 44, 60, 11, 21, 46], [29, 26, 19, 38, 17, 34, 47, 37, 17, 53, 25, 28, 44], [23, 29, 39, 37, 21, 18, 24, 28, 11, 23, 42, 45], [47, 44, 23, 60, 60, 17, "r28", 60, 60, 60, 60, 60], [44, 21, 46, 37, 27, 42, 15, 47, 15, 18, 13, 43, 33], [45, 44, 45, 36, 31, 46, 38, 16, 24, 47, 33, 43], [44, 60, 27, 21, 18, 13, 43, 47, 42, 60, 45, 45], [32, 21, 23, 17, 25, 27, 11, 13, 19, 15, 14, 27, 22], [41, 19, 36, 22, 34, 14, 14, 19, 26, 12, 51], [60, 32, 11, 60, 25, 17, 23, 21, 22, 26, 36], ["和了", [0, 12900, 0, -11900], [1, 3, 1, "30符4飜11600点", "平和(1飜)", "立直(1飜)", "赤ドラ(1飜)", "裏ドラ(1飜)"]]], [[5, 2, 0], [14800, 53400, 31100, 700], [23], [], [34, 18, 38, 39, 27, 26, 44, 45, 28, 18, 18, 22, 34], [24, 33, 17, 41, 51, 26, "c165117", 17, 19, 42, 13, 27], [44, 45, 38, 39, 41, 33, 22, 60, 60, 60, 60, 60], [33, 31, 16, 22, 29, 33, 31, 28, 38, 41, 27, 36, 47], [14, 43, 19, 43, 15, 24, 37, 23, 38, 45, 37, 14, 22], [47, 60, 60, 60, 22, 41, 31, 31, 60, 60, 60, 14], [24, 21, 47, 21, 52, 12, 46, 47, 29, 26, 43, 25, 32], ["p474747", 17, 39, 32, 46, 19, 11, 42, 44, "2121p21", 25, 45, 18], [32, 60, 12, 60, 39, 29, 60, 60, 19, 44, 43, 60, 60], [14, 53, 39, 11, 12, 41, 24, 43, 37, 34, 35, 44, 16], [31, 36, 35, 35, 13, 22, 33, 13, 21, 42, 11, 13, 38], [41, 44, 43, 31, 39, 11, 16, 60, 60, 60, 60, 60, 22], ["和了", [-1500, 4500, -1500, -1500], [1, 1, 1, "20符3飜1300点∀", "門前清自摸和(1飜)", "平和(1飜)", "ドラ(1飜)"]]]]} \ No newline at end of file diff --git a/tests/test_naga_service.py b/tests/test_naga_service.py new file mode 100644 index 0000000..1e158b6 --- /dev/null +++ b/tests/test_naga_service.py @@ -0,0 +1,42 @@ +import json +from datetime import datetime + +import pytest +from nonebug import App + + +@pytest.mark.asyncio +async def test_service(app: App): + from nonebot_plugin_session import Session, SessionLevel + + from nonebot_plugin_nagabus.naga import naga + from nonebot_plugin_nagabus.utils.tz import TZ_TOKYO + from nonebot_plugin_nagabus.data.mjs import _set_get_majsoul_paipu_delegate + + async def get_majsoul_paipu(uuid): + with open("./sample_majsoul_paipu.json", encoding="utf-8") as f: + return json.load(f) + + _set_get_majsoul_paipu_delegate(get_majsoul_paipu) + + session = Session( + bot_id="12345", + bot_type="OneBot V11", + platform="qq", + level=SessionLevel.LEVEL2, + id1="23456", + id2="34567", + ) + + order = await naga.analyze_majsoul( + "231126-23433728-1ce4-4a84-b945-7ab940d15d41", 0, 0, session + ) + assert order.cost_np == 10 + + order = await naga.analyze_tenhou("2023111804gm-0029-0000-1c8568b3", 0, session) + assert order.cost_np == 50 + + cur = datetime.now(tz=TZ_TOKYO) + statistic = await naga.statistic(cur.year, cur.month) + assert len(statistic) == 1 + assert statistic[0].cost_np == 60 diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/utils/ob11_event.py b/tests/utils/ob11_event.py new file mode 100644 index 0000000..97082b0 --- /dev/null +++ b/tests/utils/ob11_event.py @@ -0,0 +1,53 @@ +import time +from random import randint +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from nonebot.adapters.onebot.v11 import GroupMessageEvent, PrivateMessageEvent + +SELF_ID = 12345 + + +def fake_ob11_group_message_event( + content: str, user_id: int = 23456, group_id=34567 +) -> "GroupMessageEvent": + from nonebot.adapters.onebot.v11.event import Sender + from nonebot.adapters.onebot.v11 import Message, GroupMessageEvent + + return GroupMessageEvent( + time=int(time.time()), + self_id=SELF_ID, + message=Message(content), + post_type="message", + message_type="group", + sub_type="", + user_id=user_id, + message_id=randint(1, 10000), + original_message=Message(content), + raw_message=content, + font=0, + sender=Sender(user_id=user_id, nickname=str(user_id)), + group_id=group_id, + ) + + +def fake_ob11_private_message_event( + content: str, user_id: int = 23456 +) -> "PrivateMessageEvent": + from nonebot.adapters.onebot.v11.event import Sender + from nonebot.adapters.onebot.v11 import Message, PrivateMessageEvent + + return PrivateMessageEvent( + time=int(time.time()), + self_id=SELF_ID, + message=Message(content), + post_type="message", + message_type="private", + sub_type="", + user_id=user_id, + message_id=randint(1, 10000), + original_message=Message(content), + raw_message=content, + font=0, + sender=Sender(user_id=user_id, nickname=str(user_id)), + )