Skip to content

Commit

Permalink
缓存雀魂牌谱在本地文件
Browse files Browse the repository at this point in the history
  • Loading branch information
ssttkkl committed Nov 26, 2023
1 parent 0f8b8aa commit 4f21b3f
Show file tree
Hide file tree
Showing 13 changed files with 195 additions and 178 deletions.
56 changes: 25 additions & 31 deletions nonebot_plugin_nagabus/data/mjs.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import json
from typing import Any, Callable
from collections.abc import Awaitable
from typing import Any, Callable, Optional

import aiofiles
from nonebot import logger
from sqlalchemy import select
from sqlalchemy.orm import Mapped, mapped_column
from nonebot_plugin_localstore import get_cache_dir
from nonebot_plugin_majsoul.paipu import download_paipu

from .base import SqlModel
from ..data.utils import insert
from .utils.session import _use_session
from .utils.atomic_cache import get_atomic_cache


class MajsoulPaipuOrm(SqlModel):
Expand All @@ -21,38 +21,32 @@ class MajsoulPaipuOrm(SqlModel):


# 为了方便单测时mock实现
_get_majsoul_paipu_delegate: Optional[Callable[[str], Awaitable[Any]]] = None
_download_paipu_delegate: Callable[[str], Awaitable[Any]] = download_paipu


def _set_get_majsoul_paipu_delegate(get_majsoul_paipu_delegate):
global _get_majsoul_paipu_delegate
_get_majsoul_paipu_delegate = get_majsoul_paipu_delegate
def _set_download_paipu_delegate(download_paipu_delegate):
global _download_paipu_delegate
_download_paipu_delegate = download_paipu_delegate


async def get_majsoul_paipu(uuid: str):
if _get_majsoul_paipu_delegate is not None:
return await _get_majsoul_paipu_delegate(uuid)

async with _use_session() as sess:
stmt = (
select(MajsoulPaipuOrm).where(MajsoulPaipuOrm.paipu_uuid == uuid).limit(1)
)
res = (await sess.execute(stmt)).scalar_one_or_none()

if res is not None:
logger.opt(colors=True).info(f"Use cached majsoul paipu <y>{uuid}</y>")
return json.loads(res.content)
async def _do_get_majsoul_paipu(uuid: str):
mjs_paipu_dir = get_cache_dir("nonebot_plugin_nagabus") / "mjs_paipu"
mjs_paipu_dir.mkdir(parents=True, exist_ok=True)

paipu_file = mjs_paipu_dir / f"{uuid}.json"
if paipu_file.exists():
logger.opt(colors=True).info(f"Use cached majsoul paipu <y>{uuid}</y>")
async with aiofiles.open(paipu_file, "r", encoding="utf-8") as f:
return json.loads(await f.read())
else:
logger.opt(colors=True).info(f"Downloading majsoul paipu <y>{uuid}</y> ...")
data = await download_paipu(uuid)

stmt = (
insert(MajsoulPaipuOrm)
.values(paipu_uuid=uuid, content=json.dumps(data))
.on_conflict_do_nothing(index_elements=[MajsoulPaipuOrm.paipu_uuid])
)
data = await _download_paipu_delegate(uuid)
async with aiofiles.open(paipu_file, "w+", encoding="utf-8") as f:
await f.write(json.dumps(data))
return data

await sess.execute(stmt)
await sess.commit()

return data
async def get_majsoul_paipu(uuid: str):
return await get_atomic_cache(
f"mjs_paipu_{uuid}", lambda: _do_get_majsoul_paipu(uuid)
)
108 changes: 46 additions & 62 deletions nonebot_plugin_nagabus/data/naga.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
import json
from enum import IntEnum
from typing import Optional
from datetime import datetime, timezone

from nonebot import logger
from nonebot_plugin_orm import AsyncSession
from sqlalchemy import ForeignKey, select, update
from sqlalchemy.orm import Mapped, relationship, mapped_column

from nonebot_plugin_nagabus.data.utils.pydantic import PydanticModel

from .base import SqlModel
from .utils import UTCDateTime
from .utils.session import _use_session
from ..naga.model import NagaReport, NagaGameRule, NagaOrderStatus


Expand All @@ -28,7 +29,7 @@ class NagaOrderOrm(SqlModel):
source: Mapped[NagaOrderSource]
model_type: Mapped[str]
status: Mapped[NagaOrderStatus]
naga_report: Mapped[Optional[str]] # json of NagaReport
naga_report: Mapped[Optional[NagaReport]] = mapped_column(PydanticModel(NagaReport))
create_time: Mapped[datetime] = mapped_column(UTCDateTime, index=True)
update_time: Mapped[datetime] = mapped_column(UTCDateTime)

Expand All @@ -54,20 +55,23 @@ class MajsoulOrderOrm(SqlModel):
)


async def get_orders(t_begin: datetime, t_end: datetime) -> list[NagaOrderOrm]:
async with _use_session() as sess:
class NagaRepository:
def __init__(self, sess: AsyncSession):
self.sess = sess

async def get_orders(
self, t_begin: datetime, t_end: datetime
) -> list[NagaOrderOrm]:
stmt = select(NagaOrderOrm).where(
NagaOrderOrm.create_time >= t_begin,
NagaOrderOrm.create_time < t_end,
NagaOrderOrm.status == NagaOrderStatus.ok,
)
return list((await sess.execute(stmt)).scalars())

return list((await self.sess.execute(stmt)).scalars())

async def get_local_majsoul_order(
majsoul_uuid: str, kyoku: int, honba: int, model_type: str
) -> Optional[NagaOrderOrm]:
async with _use_session() as sess:
async def get_local_majsoul_order(
self, majsoul_uuid: str, kyoku: int, honba: int, model_type: str
) -> Optional[NagaOrderOrm]:
stmt = select(MajsoulOrderOrm).where(
MajsoulOrderOrm.paipu_uuid == majsoul_uuid,
MajsoulOrderOrm.kyoku == kyoku,
Expand All @@ -76,7 +80,7 @@ async def get_local_majsoul_order(
)

order_orm: Optional[MajsoulOrderOrm] = (
await sess.execute(stmt)
await self.sess.execute(stmt)
).scalar_one_or_none()
if order_orm is not None:
if (
Expand All @@ -93,21 +97,20 @@ async def get_local_majsoul_order(
f"analyze order: {order_orm.naga_haihu_id}, "
f"because it takes over 90 seconds and still not done"
)
await sess.delete(order_orm.order)
await sess.delete(order_orm)
await sess.commit()
await self.sess.delete(order_orm.order)
await self.sess.delete(order_orm)
await self.sess.commit()
return None


async def new_local_majsoul_order(
haihu_id: str,
customer_id: int,
majsoul_uuid: str,
kyoku: int,
honba: int,
model_type: str,
):
async with _use_session() as sess:
async def new_local_majsoul_order(
self,
haihu_id: str,
customer_id: int,
majsoul_uuid: str,
kyoku: int,
honba: int,
model_type: str,
):
order_orm = NagaOrderOrm(
haihu_id=haihu_id,
customer_id=customer_id,
Expand All @@ -128,35 +131,20 @@ async def new_local_majsoul_order(
order=order_orm,
)

sess.add(order_orm)
sess.add(majsoul_order_orm)
await sess.commit()

self.sess.add(order_orm)
self.sess.add(majsoul_order_orm)
await self.sess.commit()

async def update_local_majsoul_order(haihu_id: str, report: NagaReport):
async with _use_session() as sess:
stmt = (
update(NagaOrderOrm)
.where(NagaOrderOrm.haihu_id == haihu_id)
.values(
status=NagaOrderStatus.ok,
naga_report=json.dumps(report),
update_time=datetime.now(timezone.utc),
)
)
await sess.execute(stmt)
await sess.commit()


async def get_local_order(haihu_id: str, model_type: str) -> Optional[NagaOrderOrm]:
async with _use_session() as sess:
async def get_local_order(
self, haihu_id: str, model_type: str
) -> Optional[NagaOrderOrm]:
stmt = select(NagaOrderOrm).where(
NagaOrderOrm.haihu_id == haihu_id,
NagaOrderOrm.model_type == model_type,
)

order_orm: Optional[NagaOrderOrm] = (
await sess.execute(stmt)
await self.sess.execute(stmt)
).scalar_one_or_none()
if order_orm is not None:
if (
Expand All @@ -171,15 +159,13 @@ async def get_local_order(haihu_id: str, model_type: str) -> Optional[NagaOrderO
f"Delete tenhou paipu <y>{haihu_id}</y> analyze order "
f"because it takes over 90 seconds and still not done"
)
await sess.delete(order_orm)
await sess.commit()
await self.sess.delete(order_orm)
await self.sess.commit()
return None


async def new_local_order(
haihu_id: str, customer_id: int, rule: NagaGameRule, model_type: str
):
async with _use_session() as sess:
async def new_local_order(
self, haihu_id: str, customer_id: int, rule: NagaGameRule, model_type: str
):
order_orm = NagaOrderOrm(
haihu_id=haihu_id,
customer_id=customer_id,
Expand All @@ -191,20 +177,18 @@ async def new_local_order(
update_time=datetime.now(tz=timezone.utc),
)

sess.add(order_orm)
await sess.commit()

self.sess.add(order_orm)
await self.sess.commit()

async def update_local_order(haihu_id: str, report: NagaReport):
async with _use_session() as sess:
async def update_local_order(self, haihu_id: str, report: NagaReport):
stmt = (
update(NagaOrderOrm)
.where(NagaOrderOrm.haihu_id == haihu_id)
.values(
status=NagaOrderStatus.ok,
naga_report=json.dumps(report),
naga_report=report,
update_time=datetime.now(timezone.utc),
)
)
await sess.execute(stmt)
await sess.commit()
await self.sess.execute(stmt)
await self.sess.commit()
21 changes: 21 additions & 0 deletions nonebot_plugin_nagabus/data/utils/atomic_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from typing import Callable
from collections.abc import Coroutine
from asyncio import Future, create_task

_cache: dict[str, Future] = {}
_cache_consumers: dict[str, int] = {}


async def get_atomic_cache(key: str, get_cache: Callable[[], Coroutine]):
if key not in _cache:
_cache_consumers[key] = 0
_cache[key] = create_task(get_cache())

_cache_consumers[key] += 1
try:
return await _cache[key]
finally:
_cache_consumers[key] -= 1
if _cache_consumers[key] == 0:
del _cache[key]
del _cache_consumers[key]
21 changes: 21 additions & 0 deletions nonebot_plugin_nagabus/data/utils/pydantic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from pydantic import BaseModel
from sqlalchemy import JSON, TypeDecorator


class PydanticModel(TypeDecorator):
impl = JSON
cache_ok = True

def __init__(self, t_model: type[BaseModel], *args, **kwargs):
super().__init__(*args, **kwargs)
self.t_model = t_model

def process_bind_param(self, value, dialect):
if value is None:
return None
return value.dict()

def process_result_value(self, value, dialect):
if value is None:
return None
return self.t_model.parse_obj(value)
25 changes: 0 additions & 25 deletions nonebot_plugin_nagabus/data/utils/session.py

This file was deleted.

3 changes: 1 addition & 2 deletions nonebot_plugin_nagabus/naga/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,8 @@ async def analyze_tenhou(
self,
haihu_id: str,
seat: int,
rule: NagaGameRule,
model_type: Union[
Sequence[NagaHanchanModelType], Sequence[NagaHanchanModelType]
Sequence[NagaHanchanModelType], Sequence[NagaTonpuuModelType]
],
) -> AnalyzeTenhou:
data = {
Expand Down
Loading

0 comments on commit 4f21b3f

Please sign in to comment.