205 lines
6.9 KiB
Python
Raw Normal View History

2025-09-25 10:52:52 +08:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
VWED.data 模块 - 数据存储和缓存
"""
import json
import uuid
from typing import Dict, Any, Optional
class VWEDDataModule:
"""VWED.data 模块 - 数据存储和缓存"""
def __init__(self, script_id: str):
self.script_id = script_id
self._storage: Dict[str, Any] = {}
def get(self, key: str, default=None):
"""获取数据"""
return self._storage.get(key, default)
def set(self, key: str, value: Any):
"""设置数据"""
self._storage[key] = value
def delete(self, key: str):
"""删除数据"""
if key in self._storage:
del self._storage[key]
def clear(self):
"""清空所有数据"""
self._storage.clear()
def keys(self):
"""获取所有键"""
return list(self._storage.keys())
def has(self, key: str) -> bool:
"""检查键是否存在"""
return key in self._storage
async def get_cache_param(self, key: str) -> Optional[str]:
"""
获取缓存数据
Args:
key: 缓存键
Returns:
str | None: 字符串类型JSON格式如果数据不存在将返回None
Raises:
Exception: 本方法会抛出异常
"""
from sqlalchemy import select
from data.models.datacachesplit import VWEDDataCacheSplit
from data.session import get_async_session
async with get_async_session() as session:
query = select(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.data_key == key,
VWEDDataCacheSplit.is_deleted == 0
)
result = await session.execute(query)
record = result.scalar_one_or_none()
if not record or not record.data_value:
return None
return record.data_value
async def put_cache_param(self, key: str, value: str) -> None:
"""
缓存数据
将参数中的key和value键值对在系统中进行缓存缓存的数据将持久化
之后可通过get_cache_param方法根据key获取对应的value
Args:
key: 缓存键
value: 缓存值如果要缓存一个对象需要先序列化为JSON字符串
Returns:
None
Note:
本方法不会抛出异常
"""
try:
from sqlalchemy import select, insert, update
from data.models.datacachesplit import VWEDDataCacheSplit
from data.session import get_async_session
async with get_async_session() as session:
# 查询是否已存在相同key的记录且未被删除
query = select(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.data_key == key,
VWEDDataCacheSplit.is_deleted == 0
)
result = await session.execute(query)
existing_active_record = result.scalar_one_or_none()
# 如果有未删除的记录,则更新它
if existing_active_record:
stmt = update(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.id == existing_active_record.id
).values(
data_value=json.dumps(value) if not isinstance(value, str) else value,
)
await session.execute(stmt)
else:
# 如果没有未删除的记录,创建新记录
stmt = insert(VWEDDataCacheSplit).values(
id=str(uuid.uuid4()),
data_key=key,
data_value=json.dumps(value) if not isinstance(value, str) else value,
is_deleted=0
)
await session.execute(stmt)
await session.commit()
except Exception:
# 本方法不会抛出异常,静默处理
pass
async def clear_cache_param(self, key: str) -> None:
"""
删除缓存数据
Args:
key: 缓存键
Returns:
None
Note:
本方法不抛出异常
"""
try:
from sqlalchemy import update, select
from data.models.datacachesplit import VWEDDataCacheSplit
from data.session import get_async_session
async with get_async_session() as session:
# 首先检查是否存在该条记录
query = select(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.data_key == key,
VWEDDataCacheSplit.is_deleted == 0
)
result = await session.execute(query)
exists = result.scalar_one_or_none() is not None
if exists:
# 软删除具有指定键的记录
stmt = update(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.data_key == key,
VWEDDataCacheSplit.is_deleted == 0
).values(
is_deleted=1,
)
await session.execute(stmt)
await session.commit()
except Exception:
# 本方法不抛出异常,静默处理
pass
async def get_all_cache_params(self) -> str:
"""
获取所有的缓存数据
获取缓存块的全部缓存数据并以JSON格式返回
Returns:
str: 字符串类型JSON格式如果数据不存在将返回空字符串
Raises:
Exception: 本方法会抛出异常
"""
from sqlalchemy import select
from data.models.datacachesplit import VWEDDataCacheSplit
from data.session import get_async_session
async with get_async_session() as session:
query = select(VWEDDataCacheSplit).where(
VWEDDataCacheSplit.is_deleted == 0
)
result = await session.execute(query)
records = result.scalars().all()
if not records:
return ""
# 构建缓存数据字典
cache_data = {}
for record in records:
try:
# 尝试将值解析为JSON如果失败则使用原始字符串
cache_data[record.data_key] = json.loads(record.data_value)
except (json.JSONDecodeError, TypeError):
cache_data[record.data_key] = record.data_value
return json.dumps(cache_data, ensure_ascii=False)