544 lines
22 KiB
Python
Raw Normal View History

2025-09-29 09:35:08 +08:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
VWED库位相关内置模块
提供在线脚本中库位操作相关功能
"""
import json
from typing import Any, Dict
from sqlalchemy import and_, select
from utils.logger import get_logger
from utils.json_parser import safe_parse_dict
from data.session import get_async_session
from data.models.operate_point import OperatePoint
from data.models.operate_point_layer import OperatePointLayer
from data.models.storage_area import StorageArea
logger = get_logger("services.online_script.location_module")
class VWEDLocationModule:
"""VWED库位模块"""
def __init__(self, script_id: str):
self.script_id = script_id
logger.debug(f"初始化库位模块脚本ID: {script_id}")
async def is_point_exist(self, point_name: str) -> bool:
"""
检查站点是否存在
Args:
point_name: 站点名
Returns:
bool: True表示站点存在False表示不存在
"""
try:
async with get_async_session() as db:
result = await db.execute(
select(OperatePoint).filter(
OperatePoint.station_name == point_name,
OperatePoint.is_deleted == False
)
)
point = result.scalars().first()
return point is not None
except Exception as e:
logger.error(f"检查站点是否存在失败: {str(e)}")
raise
async def check_site_existed_by_site_id(self, site_id: str) -> bool:
"""
检查库位是否存在
Args:
site_id: 库位名称
Returns:
bool: True表示库位存在False表示不存在
"""
try:
async with get_async_session() as db:
result = await db.execute(
select(OperatePointLayer).filter(
OperatePointLayer.layer_name == site_id,
OperatePointLayer.is_deleted == False
)
)
layer = result.scalars().first()
return layer is not None
except Exception as e:
logger.error(f"检查库位是否存在失败: {str(e)}")
raise
async def check_site_group_existed_by_group_name(self, group_name: str) -> bool:
"""
检查库区是否存在
Args:
group_name: 库区名称
Returns:
bool: True表示库区存在False表示不存在
"""
try:
async with get_async_session() as db:
result = await db.execute(
select(StorageArea).filter(
StorageArea.area_name == group_name,
StorageArea.is_deleted == False
)
)
area = result.scalars().first()
return area is not None
except Exception as e:
logger.error(f"检查库区是否存在失败: {str(e)}")
raise
def _build_field_conditions(self, field_attr, field_values):
"""构建单个字段的查询条件支持列表AND逻辑和模糊匹配"""
if not isinstance(field_values, list):
field_values = [field_values]
and_conditions = []
for value in field_values:
if isinstance(value, str) and value.startswith("'") and value.endswith("'") and "%" in value:
# 模糊查询:'%pattern%' 格式
pattern = value.strip("'")
and_conditions.append(field_attr.like(pattern))
else:
# 精确匹配
and_conditions.append(field_attr == value)
# 使用AND逻辑连接所有条件
if len(and_conditions) == 1:
return and_conditions[0]
else:
return and_(*and_conditions)
def _build_query_filters(self, conditions: Dict[str, Any]):
"""构建查询过滤条件"""
query = select(OperatePointLayer).filter(OperatePointLayer.is_deleted == False)
# 字段映射配置
field_mappings = {
"site_ids": OperatePointLayer.layer_name,
"area": OperatePointLayer.area_name,
"group_names": OperatePointLayer.area_name,
"content": OperatePointLayer.goods_content,
"tags": OperatePointLayer.tags,
"no": OperatePointLayer.layer_name,
"type": OperatePointLayer.location_type,
"locked_by": OperatePointLayer.locked_by,
}
# 处理字段映射的条件
for field_name, field_attr in field_mappings.items():
if field_name in conditions:
condition = self._build_field_conditions(field_attr, conditions[field_name])
query = query.filter(condition)
# 处理布尔字段
if "filled" in conditions:
filled = conditions["filled"]
if isinstance(filled, bool):
query = query.filter(OperatePointLayer.is_occupied == filled)
elif isinstance(filled, list):
# 如果是列表使用AND逻辑虽然布尔字段用列表不太常见
bool_conditions = []
for val in filled:
if isinstance(val, bool):
bool_conditions.append(OperatePointLayer.is_occupied == val)
if bool_conditions:
query = query.filter(and_(*bool_conditions))
if "locked" in conditions:
locked = conditions["locked"]
if isinstance(locked, bool):
query = query.filter(OperatePointLayer.is_locked == locked)
elif isinstance(locked, list):
bool_conditions = []
for val in locked:
if isinstance(val, bool):
bool_conditions.append(OperatePointLayer.is_locked == val)
if bool_conditions:
query = query.filter(and_(*bool_conditions))
return query
async def find_sites_by_condition(self, conditions: str, sort: str) -> str:
"""
根据条件获取库位
Args:
conditions: 查询条件JSON字符串
sort: 排序方式"ASC"升序"DESC"降序
Returns:
str: 库位列表JSON字符串如果没有找到则返回"null"
"""
try:
condition_dict = safe_parse_dict(conditions, self.script_id)
if condition_dict is None:
return "null"
async with get_async_session() as db:
query = self._build_query_filters(condition_dict)
# 排序
if sort.upper() == "DESC":
query = query.order_by(OperatePointLayer.layer_name.desc())
else:
query = query.order_by(OperatePointLayer.layer_name.asc())
result = await db.execute(query)
results = result.scalars().all()
if not results:
return "null"
# 转换为返回格式
site_list = []
for site in results:
site_data = {
"area": site.area_name or "",
"content": site.goods_content or "",
"depth": "", # 需要根据实际需求映射
"disabled": site.is_disabled,
"filled": 1 if site.is_occupied else 0,
"group_names": site.area_name or "",
"level": "", # 需要根据实际需求映射
"locked": 1 if site.is_locked else 0,
"locked_by": site.locked_by or "",
"no": site.layer_name or "",
"preparing": False, # 需要根据实际业务逻辑确定
"row_num": "", # 需要根据实际需求映射
"site_id": site.layer_name or "",
"tags": site.tags or "",
"type": site.location_type,
"working": False # 需要根据实际业务逻辑确定
}
site_list.append(site_data)
return json.dumps(site_list, ensure_ascii=False)
except Exception as e:
logger.error(f"根据条件获取库位失败: {str(e)}")
raise
async def find_available_sites_by_condition(self, conditions: str, sort: str) -> str:
"""
根据条件获取有效库位未禁用且同步成功的库位
Args:
conditions: 查询条件JSON字符串
sort: 排序方式"ASC"升序"DESC"降序
Returns:
str: 库位列表JSON字符串如果没有找到则返回"null"
"""
try:
condition_dict = safe_parse_dict(conditions, self.script_id)
if condition_dict is None:
return "null"
async with get_async_session() as db:
query = self._build_query_filters(condition_dict)
# 添加有效库位的过滤条件:未禁用且同步成功
query = query.filter(
OperatePointLayer.is_disabled == False
# 这里假设sync_failed字段存在如果不存在可以去掉这个条件
# OperatePointLayer.sync_failed == False
)
# 排序
if sort.upper() == "DESC":
query = query.order_by(OperatePointLayer.layer_name.desc())
else:
query = query.order_by(OperatePointLayer.layer_name.asc())
result = await db.execute(query)
results = result.scalars().all()
if not results:
return "null"
# 转换为返回格式
site_list = []
for site in results:
site_data = {
"area": site.area_name or "",
"content": site.goods_content or "",
"depth": "",
"disabled": site.is_disabled,
"filled": 1 if site.is_occupied else 0,
"group_names": site.area_name or "",
"level": "",
"locked": 1 if site.is_locked else 0,
"locked_by": site.locked_by or "",
"no": site.layer_name or "",
"preparing": False,
"row_num": "",
"site_id": site.layer_name or "",
"tags": site.tags or "",
"type": site.location_type,
"working": False
}
site_list.append(site_data)
return json.dumps(site_list, ensure_ascii=False)
except Exception as e:
logger.error(f"根据条件获取有效库位失败: {str(e)}")
raise
async def find_available_sites_by_ext_fields(self, conditions: str) -> str:
"""
根据扩展字段获取有效库位
Args:
conditions: 查询条件JSON字符串包含扩展字段条件
Returns:
str: 库位列表JSON字符串如果没有找到则返回"null"
"""
try:
from utils.json_parser import safe_parse_list
condition_list = safe_parse_list(conditions, self.script_id)
if condition_list is None:
return "null"
async with get_async_session() as db:
query = select(OperatePointLayer).filter(
OperatePointLayer.is_deleted == False,
OperatePointLayer.is_disabled == False
)
# 处理扩展字段条件
from sqlalchemy import text
for i, condition in enumerate(condition_list):
attr_name = condition.get("attribute_name")
attr_value = condition.get("attribute_value")
# 处理字段名确保JSON路径正确
# 如果字段名是纯数字或包含特殊字符,需要用引号包围
if attr_name.isdigit() or not attr_name.isidentifier():
json_path = f'$.extended_fields."{attr_name}".value'
else:
json_path = f"$.extended_fields.{attr_name}.value"
if isinstance(attr_value, str) and "%" in attr_value:
# 模糊查询 - 使用JSON_UNQUOTE去除JSON字符串的引号
param_name = f"pattern_{i}"
sql_condition = f"JSON_UNQUOTE(JSON_EXTRACT(config_json, '{json_path}')) LIKE :{param_name}"
query = query.filter(text(sql_condition).params(**{param_name: attr_value}))
else:
# 精确查询
param_name = f"value_{i}"
sql_condition = f"JSON_EXTRACT(config_json, '{json_path}') = :{param_name}"
query = query.filter(text(sql_condition).params(**{param_name: attr_value}))
result = await db.execute(query)
results = result.scalars().all()
if not results:
return "null"
# 转换为返回格式
site_list = []
for site in results:
site_data = {
"area": site.area_name,
"content": site.goods_content,
"filled": 1 if site.is_occupied else 0,
"group_name": site.area_name,
"locked": 1 if site.is_locked else 0,
"locked_by": site.locked_by,
"site_id": site.layer_name,
"site_name": site.layer_name,
"tags": site.tags,
"type": site.location_type
}
site_list.append(site_data)
return json.dumps(site_list, ensure_ascii=False)
except Exception as e:
logger.error(f"根据扩展字段获取有效库位失败: {str(e)}")
raise
async def update_sites_by_condition(self, conditions: str, values: str) -> int:
"""
根据条件更新库位
Args:
conditions: 更新条件JSON字符串
values: 修改的字段JSON字符串
Returns:
int: 更新成功的数据库记录行数
"""
try:
condition_dict = safe_parse_dict(conditions, self.script_id)
value_dict = safe_parse_dict(values, self.script_id)
if condition_dict is None or value_dict is None:
return 0
async with get_async_session() as db:
from sqlalchemy import update
# 构建更新字段
update_data = {}
# 映射字段名
field_mapping = {
"area": "area_name",
"content": "goods_content",
"disabled": "is_disabled",
"filled": "is_occupied",
"group_name": "area_name",
"locked": "is_locked",
"locked_by": "locked_by",
"site_id": "layer_name",
"tags": "tags",
"type": "location_type",
"preparing": None, # 暂时不支持
"working": None, # 暂时不支持
}
for key, value in value_dict.items():
if key in field_mapping and field_mapping[key] is not None:
update_data[field_mapping[key]] = value
if not update_data:
return 0
# 检查是否需要更新库区名称,如果是则需要同步相关表
area_name_update = update_data.get("area_name")
if area_name_update:
# 先获取旧的库区名称,用于同步相关表
query_old_areas = self._build_query_filters(condition_dict)
result = await db.execute(query_old_areas)
affected_sites = result.scalars().all()
old_area_names = set()
for site in affected_sites:
if site.area_name:
old_area_names.add(site.area_name)
# 构建更新查询
filtered_query = self._build_query_filters(condition_dict)
# 执行OperatePointLayer表的更新
stmt = update(OperatePointLayer).values(**update_data)
# 应用同样的过滤条件
for condition in filtered_query.whereclause.clauses if hasattr(filtered_query.whereclause, 'clauses') else [filtered_query.whereclause]:
stmt = stmt.where(condition)
result = await db.execute(stmt)
affected_rows = result.rowcount
# 如果修改了库区名称,需要同步相关表
if area_name_update and old_area_names:
logger.debug(f"同步库区名称更新: {old_area_names} -> {area_name_update}")
# 更新 OperatePoint 表中的 area_name
from data.models.operate_point import OperatePoint
for old_area_name in old_area_names:
await db.execute(
update(OperatePoint)
.where(
and_(
OperatePoint.area_name == old_area_name,
OperatePoint.is_deleted == False
)
)
.values(area_name=area_name_update)
)
# 更新 StorageArea 表中的 area_name
from data.models.storage_area import StorageArea
for old_area_name in old_area_names:
await db.execute(
update(StorageArea)
.where(
and_(
StorageArea.area_name == old_area_name,
StorageArea.is_deleted == False
)
)
.values(area_name=area_name_update)
)
await db.commit()
return affected_rows
except Exception as e:
logger.error(f"根据条件更新库位失败: {str(e)}")
raise
async def update_site_ext_field_by_id_and_ext_field_name(self, conditions: str) -> None:
"""
根据条件更新库位扩展字段值
Args:
conditions: 库位ID扩展字段名和更新值的JSON字符串
"""
try:
from utils.json_parser import safe_parse_list
condition_list = safe_parse_list(conditions, self.script_id)
if condition_list is None:
return
async with get_async_session() as db:
for condition in condition_list:
site_id = condition.get("site_id")
ext_field_name = condition.get("ext_field_name")
update_value = condition.get("update_value")
if not site_id or not ext_field_name:
continue
# 查询库位
result = await db.execute(
select(OperatePointLayer).filter(
OperatePointLayer.layer_name == site_id,
OperatePointLayer.is_deleted == False
)
)
site = result.scalars().first()
if site:
# 解析现有的config_json
config_data = {}
if site.config_json:
try:
config_data = json.loads(site.config_json)
except:
config_data = {}
# 更新扩展字段
if "extended_fields" not in config_data:
config_data["extended_fields"] = {}
config_data["extended_fields"][ext_field_name] = update_value
# 保存更新后的config_json
site.config_json = json.dumps(config_data, ensure_ascii=False)
except Exception as e:
logger.error(f"根据条件更新库位扩展字段值失败: {str(e)}")
raise
__all__ = ['VWEDLocationModule']