Add post process station and related resources (#195)

* Add post process station and related resources

- Created JSON configuration for post_process_station and its child post_process_deck.
- Added YAML definitions for post_process_station, bottle carriers, bottles, and deck resources.
- Implemented Python classes for bottle carriers, bottles, decks, and warehouses to manage resources in the post process.
- Established a factory method for creating warehouses with customizable dimensions and layouts.
- Defined the structure and behavior of the post_process_deck and its associated warehouses.

* feat(post_process): add post_process_station and related warehouse functionality

- Introduced post_process_station.json to define the post-processing station structure.
- Implemented post_process_warehouse.py to create warehouse configurations with customizable layouts.
- Added warehouses.py for specific warehouse configurations (4x3x1).
- Updated post_process_station.yaml to reflect new module paths for OpcUaClient.
- Refactored bottle carriers and bottles YAML files to point to the new module paths.
- Adjusted deck.yaml to align with the new organizational structure for post_process_deck.
This commit is contained in:
Roy
2025-12-23 18:40:09 +08:00
committed by GitHub
parent acbaff7bb7
commit 5fec753fb9
16 changed files with 3874 additions and 261 deletions

View File

@@ -5,7 +5,6 @@ from typing import Any, Union, List, Dict, Callable, Optional, Tuple
from pydantic import BaseModel from pydantic import BaseModel
from opcua import Client, ua from opcua import Client, ua
from opcua.ua import NodeClass
import pandas as pd import pandas as pd
import os import os
@@ -13,7 +12,7 @@ from unilabos.device_comms.opcua_client.node.uniopcua import Base as OpcUaNodeBa
from unilabos.device_comms.opcua_client.node.uniopcua import Variable, Method, NodeType, DataType from unilabos.device_comms.opcua_client.node.uniopcua import Variable, Method, NodeType, DataType
from unilabos.device_comms.universal_driver import UniversalDriver from unilabos.device_comms.universal_driver import UniversalDriver
from unilabos.utils.log import logger from unilabos.utils.log import logger
from unilabos.devices.workstation.post_process.decks import post_process_deck
class OpcUaNode(BaseModel): class OpcUaNode(BaseModel):
name: str name: str
@@ -117,6 +116,8 @@ class BaseClient(UniversalDriver):
_variables_to_find: Dict[str, Dict[str, Any]] = {} _variables_to_find: Dict[str, Dict[str, Any]] = {}
_name_mapping: Dict[str, str] = {} # 英文名到中文名的映射 _name_mapping: Dict[str, str] = {} # 英文名到中文名的映射
_reverse_mapping: Dict[str, str] = {} # 中文名到英文名的映射 _reverse_mapping: Dict[str, str] = {} # 中文名到英文名的映射
# 直接缓存已找到的 ua.Node 对象,避免因字符串 NodeId 格式导致订阅失败
_found_node_objects: Dict[str, Any] = {}
def __init__(self): def __init__(self):
super().__init__() super().__init__()
@@ -125,6 +126,9 @@ class BaseClient(UniversalDriver):
# 初始化名称映射字典 # 初始化名称映射字典
self._name_mapping = {} self._name_mapping = {}
self._reverse_mapping = {} self._reverse_mapping = {}
# 初始化线程锁(在子类中会被重新创建,这里提供默认实现)
import threading
self._client_lock = threading.RLock()
def _set_client(self, client: Optional[Client]) -> None: def _set_client(self, client: Optional[Client]) -> None:
if client is None: if client is None:
@@ -152,15 +156,24 @@ class BaseClient(UniversalDriver):
if not self.client: if not self.client:
raise ValueError('client is not connected') raise ValueError('client is not connected')
logger.info('开始查找节点...') logger.info(f'开始查找 {len(self._variables_to_find)}节点...')
try: try:
# 获取根节点 # 获取根节点
root = self.client.get_root_node() root = self.client.get_root_node()
objects = root.get_child(["0:Objects"]) objects = root.get_child(["0:Objects"])
# 记录查找前的状态
before_count = len(self._node_registry)
# 查找节点 # 查找节点
self._find_nodes_recursive(objects) self._find_nodes_recursive(objects)
# 记录查找后的状态
after_count = len(self._node_registry)
newly_found = after_count - before_count
logger.info(f"本次查找新增 {newly_found} 个节点,当前共 {after_count}")
# 检查是否所有节点都已找到 # 检查是否所有节点都已找到
not_found = [] not_found = []
for var_name, var_info in self._variables_to_find.items(): for var_name, var_info in self._variables_to_find.items():
@@ -168,9 +181,13 @@ class BaseClient(UniversalDriver):
not_found.append(var_name) not_found.append(var_name)
if not_found: if not_found:
logger.warning(f"以下节点未找到: {', '.join(not_found)}") logger.warning(f"⚠ 以下 {len(not_found)}节点未找到: {', '.join(not_found[:10])}{'...' if len(not_found) > 10 else ''}")
logger.warning(f"提示:请检查这些节点名称是否与服务器的 BrowseName 完全匹配(包括大小写、空格等)")
# 提供一个示例来帮助调试
if not_found:
logger.info(f"尝试在服务器中查找第一个未找到的节点 '{not_found[0]}' 的相似节点...")
else: else:
logger.info("所有节点均已找到") logger.info(f"✓ 所有 {len(self._variables_to_find)}节点均已找到并注册")
except Exception as e: except Exception as e:
logger.error(f"查找节点失败: {e}") logger.error(f"查找节点失败: {e}")
@@ -188,17 +205,20 @@ class BaseClient(UniversalDriver):
var_info = self._variables_to_find[node_name] var_info = self._variables_to_find[node_name]
node_type = var_info.get("node_type") node_type = var_info.get("node_type")
data_type = var_info.get("data_type") data_type = var_info.get("data_type")
node_id_str = str(node.nodeid)
# 根据节点类型创建相应的对象 # 根据节点类型创建相应的对象
if node_type == NodeType.VARIABLE: if node_type == NodeType.VARIABLE:
self._node_registry[node_name] = Variable(self.client, node_name, str(node.nodeid), data_type) self._node_registry[node_name] = Variable(self.client, node_name, node_id_str, data_type)
logger.info(f"找到变量节点: {node_name}") logger.info(f"找到变量节点: '{node_name}', NodeId: {node_id_str}, DataType: {data_type}")
# 缓存真实的 ua.Node 对象用于订阅
self._found_node_objects[node_name] = node
elif node_type == NodeType.METHOD: elif node_type == NodeType.METHOD:
# 对于方法节点需要获取父节点ID # 对于方法节点需要获取父节点ID
parent_node = node.get_parent() parent_node = node.get_parent()
parent_node_id = str(parent_node.nodeid) parent_node_id = str(parent_node.nodeid)
self._node_registry[node_name] = Method(self.client, node_name, str(node.nodeid), parent_node_id, data_type) self._node_registry[node_name] = Method(self.client, node_name, node_id_str, parent_node_id, data_type)
logger.info(f"找到方法节点: {node_name}") logger.info(f"找到方法节点: '{node_name}', NodeId: {node_id_str}, ParentId: {parent_node_id}")
# 递归处理子节点 # 递归处理子节点
for child in node.get_children(): for child in node.get_children():
@@ -296,13 +316,17 @@ class BaseClient(UniversalDriver):
if name in self._name_mapping: if name in self._name_mapping:
chinese_name = self._name_mapping[name] chinese_name = self._name_mapping[name]
if chinese_name in self._node_registry: if chinese_name in self._node_registry:
return self._node_registry[chinese_name] node = self._node_registry[chinese_name]
logger.debug(f"使用节点: '{name}' -> '{chinese_name}', NodeId: {node.node_id}")
return node
elif chinese_name in self._variables_to_find: elif chinese_name in self._variables_to_find:
logger.warning(f"节点 {chinese_name} (英文名: {name}) 尚未找到,尝试重新查找") logger.warning(f"节点 {chinese_name} (英文名: {name}) 尚未找到,尝试重新查找")
if self.client: if self.client:
self._find_nodes() self._find_nodes()
if chinese_name in self._node_registry: if chinese_name in self._node_registry:
return self._node_registry[chinese_name] node = self._node_registry[chinese_name]
logger.info(f"重新查找成功: '{chinese_name}', NodeId: {node.node_id}")
return node
raise ValueError(f'节点 {chinese_name} (英文名: {name}) 未注册或未找到') raise ValueError(f'节点 {chinese_name} (英文名: {name}) 未注册或未找到')
# 直接使用原始名称查找 # 直接使用原始名称查找
@@ -312,9 +336,14 @@ class BaseClient(UniversalDriver):
if self.client: if self.client:
self._find_nodes() self._find_nodes()
if name in self._node_registry: if name in self._node_registry:
return self._node_registry[name] node = self._node_registry[name]
logger.info(f"重新查找成功: '{name}', NodeId: {node.node_id}")
return node
logger.error(f"❌ 节点 '{name}' 未注册或未找到。已注册节点: {list(self._node_registry.keys())[:5]}...")
raise ValueError(f'节点 {name} 未注册或未找到') raise ValueError(f'节点 {name} 未注册或未找到')
return self._node_registry[name] node = self._node_registry[name]
logger.debug(f"使用节点: '{name}', NodeId: {node.node_id}")
return node
def get_node_registry(self) -> Dict[str, OpcUaNodeBase]: def get_node_registry(self) -> Dict[str, OpcUaNodeBase]:
return self._node_registry return self._node_registry
@@ -335,12 +364,13 @@ class BaseClient(UniversalDriver):
return self return self
logger.info(f'开始注册 {len(node_list)} 个节点...') logger.info(f'开始注册 {len(node_list)} 个节点...')
new_nodes_count = 0
for node in node_list: for node in node_list:
if node is None: if node is None:
continue continue
if node.name in self._node_registry: if node.name in self._node_registry:
logger.info(f'节点 {node.name} 已存在') logger.debug(f'节点 "{node.name}" 已存在于注册表')
exist = self._node_registry[node.name] exist = self._node_registry[node.name]
if exist.type != node.node_type: if exist.type != node.node_type:
raise ValueError(f'节点 {node.name} 类型 {node.node_type} 与已存在的类型 {exist.type} 不一致') raise ValueError(f'节点 {node.name} 类型 {node.node_type} 与已存在的类型 {exist.type} 不一致')
@@ -351,9 +381,10 @@ class BaseClient(UniversalDriver):
"node_type": node.node_type, "node_type": node.node_type,
"data_type": node.data_type "data_type": node.data_type
} }
logger.info(f'添加节点 {node.name} 到待查找列表') new_nodes_count += 1
logger.debug(f'添加节点 "{node.name}" ({node.node_type}) 到待查找列表')
logger.info('节点注册完成') logger.info(f'节点注册完成:新增 {new_nodes_count} 个待查找节点,总计 {len(self._variables_to_find)}')
# 如果客户端已连接,立即开始查找 # 如果客户端已连接,立即开始查找
if self.client: if self.client:
@@ -470,7 +501,7 @@ class BaseClient(UniversalDriver):
val = result_dict.get("value") val = result_dict.get("value")
err = result_dict.get("error") err = result_dict.get("error")
print(f"读取 {node_name} 返回值 = {val} (类型: {type(val).__name__}), 错误 = {err}") print(f"读取 {node_name} 返回值 = {val} (类型: {type(val).__name__}, 错误 = {err}")
return val, err return val, err
except Exception as e: except Exception as e:
print(f"解析读取结果失败: {e}, 原始结果: {result_str}") print(f"解析读取结果失败: {e}, 原始结果: {result_str}")
@@ -518,15 +549,25 @@ class BaseClient(UniversalDriver):
raise ValueError("必须提供write_nodes参数") raise ValueError("必须提供write_nodes参数")
def execute_init_function(use_node: Callable[[str], OpcUaNodeBase]) -> bool: def execute_init_function(use_node: Callable[[str], OpcUaNodeBase]) -> bool:
if isinstance(write_nodes, list): """根据 _workflow_params 为各节点写入真实数值。
# 处理节点列表
for node_name in write_nodes:
# 尝试从参数中获取同名参数的值
current_value = True # 默认值
if hasattr(self, '_workflow_params') and node_name in self._workflow_params:
current_value = self._workflow_params[node_name]
print(f"初始化函数: 从参数获取值 {node_name} = {current_value}")
约定:
- write_nodes 为 list 时: 节点名 == 参数名,从 _workflow_params[node_name] 取值;
- write_nodes 为 dict 时:
* value 为字符串且在 _workflow_params 中: 当作参数名去取值;
* 否则 value 视为常量直接写入。
"""
params = getattr(self, "_workflow_params", {}) or {}
if isinstance(write_nodes, list):
# 节点列表形式: 节点名与参数名一致
for node_name in write_nodes:
if node_name not in params:
print(f"初始化函数: 参数中未找到 {node_name}, 跳过写入")
continue
current_value = params[node_name]
print(f"初始化函数: 写入节点 {node_name} = {current_value}") print(f"初始化函数: 写入节点 {node_name} = {current_value}")
input_json = json.dumps({"node_name": node_name, "value": current_value}) input_json = json.dumps({"node_name": node_name, "value": current_value})
result_str = self.write_node(input_json) result_str = self.write_node(input_json)
@@ -538,13 +579,14 @@ class BaseClient(UniversalDriver):
except Exception as e: except Exception as e:
print(f"初始化函数: 解析写入结果失败: {e}, 原始结果: {result_str}") print(f"初始化函数: 解析写入结果失败: {e}, 原始结果: {result_str}")
elif isinstance(write_nodes, dict): elif isinstance(write_nodes, dict):
# 处理节点字典,使用指定的值 # 映射形式: 节点名 -> 参数名或常量
for node_name, node_value in write_nodes.items(): for node_name, node_value in write_nodes.items():
# 检查值是否是字符串类型的参数名 if isinstance(node_value, str) and node_value in params:
current_value = node_value current_value = params[node_value]
if isinstance(node_value, str) and hasattr(self, '_workflow_params') and node_value in self._workflow_params:
current_value = self._workflow_params[node_value]
print(f"初始化函数: 从参数获取值 {node_value} = {current_value}") print(f"初始化函数: 从参数获取值 {node_value} = {current_value}")
else:
current_value = node_value
print(f"初始化函数: 使用常量值 写入 {node_name} = {current_value}")
print(f"初始化函数: 写入节点 {node_name} = {current_value}") print(f"初始化函数: 写入节点 {node_name} = {current_value}")
input_json = json.dumps({"node_name": node_name, "value": current_value}) input_json = json.dumps({"node_name": node_name, "value": current_value})
@@ -672,20 +714,20 @@ class BaseClient(UniversalDriver):
condition_nodes: 条件节点列表 [节点名1, 节点名2] condition_nodes: 条件节点列表 [节点名1, 节点名2]
""" """
def execute_start_function(use_node: Callable[[str], OpcUaNodeBase]) -> bool: def execute_start_function(use_node: Callable[[str], OpcUaNodeBase]) -> bool:
# 直接处理写入节点 """开始函数: 写入触发节点, 然后轮询条件节点直到满足停止条件。"""
params = getattr(self, "_workflow_params", {}) or {}
# 先处理写入节点(触发位等)
if write_nodes: if write_nodes:
if isinstance(write_nodes, list): if isinstance(write_nodes, list):
# 处理节点列表,默认值都是True # 列表形式: 节点名与参数名一致, 若无参数则直接写 True
for i, node_name in enumerate(write_nodes): for node_name in write_nodes:
# 尝试获取与节点对应的参数值 if node_name in params:
param_name = f"write_{i}" current_value = params[node_name]
else:
current_value = True
# 获取参数值(如果有)
current_value = True # 默认值
if hasattr(self, '_workflow_params') and param_name in self._workflow_params:
current_value = self._workflow_params[param_name]
# 直接写入节点
print(f"直接写入节点 {node_name} = {current_value}") print(f"直接写入节点 {node_name} = {current_value}")
input_json = json.dumps({"node_name": node_name, "value": current_value}) input_json = json.dumps({"node_name": node_name, "value": current_value})
result_str = self.write_node(input_json) result_str = self.write_node(input_json)
@@ -697,14 +739,13 @@ class BaseClient(UniversalDriver):
except Exception as e: except Exception as e:
print(f"解析直接写入结果失败: {e}, 原始结果: {result_str}") print(f"解析直接写入结果失败: {e}, 原始结果: {result_str}")
elif isinstance(write_nodes, dict): elif isinstance(write_nodes, dict):
# 处理节点字典,值是指定的 # 字典形式: 节点名 -> 常量值(如 True/False)
for node_name, node_value in write_nodes.items(): for node_name, node_value in write_nodes.items():
# 尝试获取参数值(如果节点名与参数名匹配) if node_name in params:
current_value = node_value # 使用指定的默认值 current_value = params[node_name]
if hasattr(self, '_workflow_params') and node_name in self._workflow_params: else:
current_value = self._workflow_params[node_name] current_value = node_value
# 直接写入节点
print(f"直接写入节点 {node_name} = {current_value}") print(f"直接写入节点 {node_name} = {current_value}")
input_json = json.dumps({"node_name": node_name, "value": current_value}) input_json = json.dumps({"node_name": node_name, "value": current_value})
result_str = self.write_node(input_json) result_str = self.write_node(input_json)
@@ -732,6 +773,7 @@ class BaseClient(UniversalDriver):
# 直接读取节点 # 直接读取节点
result_str = self.read_node(node_name) result_str = self.read_node(node_name)
try: try:
time.sleep(1)
result_str = result_str.replace("'", '"') result_str = result_str.replace("'", '"')
result_dict = json.loads(result_str) result_dict = json.loads(result_str)
read_res = result_dict.get("value") read_res = result_dict.get("value")
@@ -1035,6 +1077,8 @@ class BaseClient(UniversalDriver):
读取节点值的便捷方法 读取节点值的便捷方法
返回包含result字段的字典 返回包含result字段的字典
""" """
# 使用锁保护客户端访问
with self._client_lock:
try: try:
node = self.use_node(node_name) node = self.use_node(node_name)
value, error = node.read() value, error = node.read()
@@ -1068,6 +1112,8 @@ class BaseClient(UniversalDriver):
eg:'{\"node_name\":\"反应罐号码\",\"value\":\"2\"}' eg:'{\"node_name\":\"反应罐号码\",\"value\":\"2\"}'
返回JSON格式的字符串包含操作结果 返回JSON格式的字符串包含操作结果
""" """
# 使用锁保护客户端访问
with self._client_lock:
try: try:
# 解析JSON格式的输入 # 解析JSON格式的输入
if not isinstance(json_input, str): if not isinstance(json_input, str):
@@ -1128,13 +1174,50 @@ class BaseClient(UniversalDriver):
class OpcUaClient(BaseClient): class OpcUaClient(BaseClient):
def __init__(self, url: str, config_path: str = None, username: str = None, password: str = None, refresh_interval: float = 1.0): def __init__(
self,
url: str,
deck: Optional[Union[post_process_deck, Dict[str, Any]]] = None,
config_path: str = None,
username: str = None,
password: str = None,
use_subscription: bool = True,
cache_timeout: float = 5.0,
subscription_interval: int = 500,
*args,
**kwargs,
):
# 降低OPCUA库的日志级别 # 降低OPCUA库的日志级别
import logging import logging
logging.getLogger("opcua").setLevel(logging.WARNING) logging.getLogger("opcua").setLevel(logging.WARNING)
super().__init__() super().__init__()
# ===== 关键修改:参照 BioyondWorkstation 处理 deck =====
super().__init__()
# 处理 deck 参数
if deck is None:
self.deck = post_process_deck(setup=True)
elif isinstance(deck, dict):
self.deck = post_process_deck(setup=True)
elif hasattr(deck, 'children'):
self.deck = deck
else:
raise ValueError(f"deck 参数类型不支持: {type(deck)}")
if self.deck is None:
raise ValueError("Deck 配置不能为空")
# 统计仓库信息
warehouse_count = 0
if hasattr(self.deck, 'children'):
warehouse_count = len(self.deck.children)
logger.info(f"Deck 初始化完成,加载 {warehouse_count} 个资源")
# OPC UA 客户端初始化
client = Client(url) client = Client(url)
if username and password: if username and password:
@@ -1142,144 +1225,391 @@ class OpcUaClient(BaseClient):
client.set_password(password) client.set_password(password)
self._set_client(client) self._set_client(client)
self._connect()
# 节点值缓存和刷新相关属性 # 订阅相关属性
self._node_values = {} # 缓存节点值 self._use_subscription = use_subscription
self._refresh_interval = refresh_interval # 刷新间隔(秒) self._subscription = None
self._refresh_running = False self._subscription_handles = {}
self._refresh_thread = None self._subscription_interval = subscription_interval
# 缓存相关属性
self._node_values = {} # 修改为支持时间戳的缓存结构
self._cache_timeout = cache_timeout
# 连接状态监控
self._connection_check_interval = 30.0 # 连接检查间隔(秒)
self._connection_monitor_running = False
self._connection_monitor_thread = None
# 添加线程锁保护OPC UA客户端的并发访问
import threading
self._client_lock = threading.RLock()
# 连接到服务器
self._connect()
# 如果提供了配置文件路径,则加载配置并注册工作流 # 如果提供了配置文件路径,则加载配置并注册工作流
if config_path: if config_path:
self.load_config(config_path) self.load_config(config_path)
# 启动节点值刷新线程 # 启动连接监控
self.start_node_refresh() self._start_connection_monitor()
def _register_nodes_as_attributes(self):
"""将所有节点注册为实例属性可以通过self.node_name访问""" def _connect(self) -> None:
for node_name, node in self._node_registry.items(): """连接到OPC UA服务器"""
# 检查是否有对应的英文名称 logger.info('尝试连接到 OPC UA 服务器...')
eng_name = self._reverse_mapping.get(node_name) if self.client:
if eng_name: try:
# 如果有对应的英文名称,使用英文名称作为属性名 self.client.connect()
attr_name = eng_name logger.info('✓ 客户端已连接!')
# 连接后开始查找节点
if self._variables_to_find:
self._find_nodes()
# 如果启用订阅模式,设置订阅
if self._use_subscription:
self._setup_subscriptions()
else: else:
# 如果没有对应的英文名称,使用原始名称,但替换空格和特殊字符 logger.info("订阅模式已禁用,将使用按需读取模式")
attr_name = node_name.replace(' ', '_').replace('-', '_')
# 创建获取节点值的属性方法,使用中文名称获取节点值
def create_property_getter(node_key):
def getter(self):
# 优先从缓存获取值
if node_key in self._node_values:
return self._node_values[node_key]
# 缓存中没有则直接读取
value, _ = self.use_node(node_key).read()
return value
return getter
# 使用property装饰器将方法注册为类属性
setattr(OpcUaClient, attr_name, property(create_property_getter(node_name)))
logger.info(f"已注册节点 '{node_name}' 为属性 '{attr_name}'")
def refresh_node_values(self):
"""刷新所有节点的值到缓存"""
if not self.client:
logger.warning("客户端未初始化,无法刷新节点值")
return
try:
# 简单检查连接状态,如果不连接会抛出异常
self.client.get_namespace_array()
except Exception as e: except Exception as e:
logger.warning(f"客户端连接异常,无法刷新节点值: {e}") logger.error(f'客户端连接失败: {e}')
raise
else:
raise ValueError('客户端未初始化')
class SubscriptionHandler:
"""freeopcua订阅处理器必须实现 datachange_notification 方法"""
def __init__(self, outer):
self.outer = outer
def datachange_notification(self, node, val, data):
# 委托给外层类的处理函数
try:
self.outer._on_subscription_datachange(node, val, data)
except Exception as e:
logger.error(f"订阅数据回调处理失败: {e}")
# 可选:事件通知占位,避免库调用时报缺失
def event_notification(self, event):
pass
def _setup_subscriptions(self):
"""设置 OPC UA 订阅"""
if not self.client or not self._use_subscription:
return return
with self._client_lock:
try:
logger.info(f"开始设置订阅 (发布间隔: {self._subscription_interval}ms)...")
# 创建订阅
handler = OpcUaClient.SubscriptionHandler(self)
self._subscription = self.client.create_subscription(
self._subscription_interval,
handler
)
# 为所有变量节点创建监控项
subscribed_count = 0
skipped_count = 0
for node_name, node in self._node_registry.items(): for node_name, node in self._node_registry.items():
# 只为变量节点创建订阅
if node.type == NodeType.VARIABLE and node.node_id:
try: try:
if hasattr(node, 'read'): # 优先使用在查找阶段缓存的真实 ua.Node 对象
value, error = node.read() ua_node = self._found_node_objects.get(node_name)
if not error: if ua_node is None:
self._node_values[node_name] = value ua_node = self.client.get_node(node.node_id)
#logger.debug(f"已刷新节点 '{node_name}' 的值: {value}") handle = self._subscription.subscribe_data_change(ua_node)
self._subscription_handles[node_name] = handle
subscribed_count += 1
logger.debug(f"✓ 已订阅节点: {node_name}")
except Exception as e: except Exception as e:
logger.error(f"刷新节点 '{node_name}' 失败: {e}") skipped_count += 1
logger.warning(f"✗ 订阅节点 {node_name} 失败: {e}")
else:
skipped_count += 1
def get_node_value(self, name): logger.info(f"订阅设置完成: 成功 {subscribed_count} 个, 跳过 {skipped_count}")
"""获取节点值,支持中文名和英文名"""
# 如果提供的是英文名,转换为中文名 except Exception as e:
logger.error(f"设置订阅失败: {e}")
traceback.print_exc()
# 订阅失败时回退到按需读取模式
self._use_subscription = False
logger.warning("订阅模式设置失败,已自动切换到按需读取模式")
def _on_subscription_datachange(self, node, val, data):
"""订阅数据变化处理器(供内部 SubscriptionHandler 调用)"""
try:
node_id = str(node.nodeid)
current_time = time.time()
# 查找对应的节点名称
for node_name, node_obj in self._node_registry.items():
if node_obj.node_id == node_id:
self._node_values[node_name] = {
'value': val,
'timestamp': current_time,
'source': 'subscription'
}
logger.debug(f"订阅更新: {node_name} = {val}")
break
except Exception as e:
logger.error(f"处理订阅数据失败: {e}")
def get_node_value(self, name, use_cache=True, force_read=False):
"""
获取节点值(智能缓存版本)
参数:
name: 节点名称(支持中文名或英文名)
use_cache: 是否使用缓存
force_read: 是否强制从服务器读取(忽略缓存)
"""
# 处理名称映射
if name in self._name_mapping: if name in self._name_mapping:
chinese_name = self._name_mapping[name] chinese_name = self._name_mapping[name]
# 优先从缓存获取值 elif name in self._node_registry:
if chinese_name in self._node_values: chinese_name = name
return self._node_values[chinese_name] else:
# 缓存中没有则直接读取 raise ValueError(f"未找到名称为 '{name}' 的节点")
# 如果强制读取,直接从服务器读取
if force_read:
with self._client_lock:
value, _ = self.use_node(chinese_name).read() value, _ = self.use_node(chinese_name).read()
# 更新缓存
self._node_values[chinese_name] = {
'value': value,
'timestamp': time.time(),
'source': 'forced_read'
}
return value return value
# 如果提供的是中文名,直接使用
elif name in self._node_registry:
# 优先从缓存获取值
if name in self._node_values:
return self._node_values[name]
# 缓存中没有则直接读取
value, _ = self.use_node(name).read()
return value
else:
raise ValueError(f"未找到名称为 '{name}' 的节点")
def set_node_value(self, name, value): # 检查缓存
"""设置节点值,支持中文名和英文名""" if use_cache and chinese_name in self._node_values:
# 如果提供的是英文名,转换为中文名 cache_entry = self._node_values[chinese_name]
if name in self._name_mapping: cache_age = time.time() - cache_entry['timestamp']
chinese_name = self._name_mapping[name]
node = self.use_node(chinese_name)
# 如果提供的是中文名,直接使用
elif name in self._node_registry:
node = self.use_node(name)
else:
raise ValueError(f"未找到名称为 '{name}' 的节点")
# 写入值 # 如果是订阅模式,缓存永久有效(由订阅更新)
error = node.write(value) # 如果是按需读取模式,检查缓存超时
if cache_entry.get('source') == 'subscription' or cache_age < self._cache_timeout:
logger.debug(f"从缓存读取: {chinese_name} = {cache_entry['value']} (age: {cache_age:.2f}s, source: {cache_entry.get('source', 'unknown')})")
return cache_entry['value']
# 缓存过期或不存在,从服务器读取
with self._client_lock:
try:
value, error = self.use_node(chinese_name).read()
if not error: if not error:
# 更新缓存 # 更新缓存
if hasattr(node, 'name'): self._node_values[chinese_name] = {
self._node_values[node.name] = value 'value': value,
'timestamp': time.time(),
'source': 'on_demand_read'
}
return value
else:
logger.warning(f"读取节点 {chinese_name} 失败")
return None
except Exception as e:
logger.error(f"读取节点 {chinese_name} 出错: {e}")
return None
def set_node_value(self, name, value):
"""
设置节点值
写入成功后会立即更新本地缓存
"""
# 处理名称映射
if name in self._name_mapping:
chinese_name = self._name_mapping[name]
elif name in self._node_registry:
chinese_name = name
else:
raise ValueError(f"未找到名称为 '{name}' 的节点")
with self._client_lock:
try:
node = self.use_node(chinese_name)
error = node.write(value)
if not error:
# 写入成功,立即更新缓存
self._node_values[chinese_name] = {
'value': value,
'timestamp': time.time(),
'source': 'write'
}
logger.debug(f"写入成功: {chinese_name} = {value}")
return True return True
else:
logger.warning(f"写入节点 {chinese_name} 失败")
return False
except Exception as e:
logger.error(f"写入节点 {chinese_name} 出错: {e}")
return False return False
def _refresh_worker(self): def _check_connection(self) -> bool:
"""节点值刷新线程的工作函数""" """检查连接状态"""
self._refresh_running = True
logger.info(f"节点值刷新线程已启动,刷新间隔: {self._refresh_interval}")
while self._refresh_running:
try: try:
self.refresh_node_values() with self._client_lock:
if self.client:
# 尝试获取命名空间数组来验证连接
self.client.get_namespace_array()
return True
except Exception as e: except Exception as e:
logger.error(f"节点值刷新过程出错: {e}") logger.warning(f"连接检查失败: {e}")
return False
return False
# 等待下一次刷新 def _connection_monitor_worker(self):
time.sleep(self._refresh_interval) """连接监控线程工作函数"""
self._connection_monitor_running = True
logger.info(f"连接监控线程已启动 (检查间隔: {self._connection_check_interval}秒)")
def start_node_refresh(self): reconnect_attempts = 0
"""启动节点值刷新线程""" max_reconnect_attempts = 5
if self._refresh_thread is not None and self._refresh_thread.is_alive():
logger.warning("节点值刷新线程已在运行") while self._connection_monitor_running:
try:
# 检查连接状态
if not self._check_connection():
logger.warning("检测到连接断开,尝试重新连接...")
reconnect_attempts += 1
if reconnect_attempts <= max_reconnect_attempts:
try:
# 尝试重新连接
with self._client_lock:
if self.client:
try:
self.client.disconnect()
except:
pass
self.client.connect()
logger.info("✓ 重新连接成功")
# 重新设置订阅
if self._use_subscription:
self._setup_subscriptions()
reconnect_attempts = 0
except Exception as e:
logger.error(f"重新连接失败 (尝试 {reconnect_attempts}/{max_reconnect_attempts}): {e}")
time.sleep(5) # 重连失败后等待5秒
else:
logger.error(f"达到最大重连次数 ({max_reconnect_attempts}),停止重连")
self._connection_monitor_running = False
else:
# 连接正常,重置重连计数
reconnect_attempts = 0
except Exception as e:
logger.error(f"连接监控出错: {e}")
# 等待下次检查
time.sleep(self._connection_check_interval)
def _start_connection_monitor(self):
"""启动连接监控线程"""
if self._connection_monitor_thread is not None and self._connection_monitor_thread.is_alive():
logger.warning("连接监控线程已在运行")
return return
import threading import threading
self._refresh_thread = threading.Thread(target=self._refresh_worker, daemon=True) self._connection_monitor_thread = threading.Thread(
self._refresh_thread.start() target=self._connection_monitor_worker,
daemon=True,
name="OpcUaConnectionMonitor"
)
self._connection_monitor_thread.start()
def stop_node_refresh(self): def _stop_connection_monitor(self):
"""停止节点值刷新线程""" """停止连接监控线程"""
self._refresh_running = False self._connection_monitor_running = False
if self._refresh_thread and self._refresh_thread.is_alive(): if self._connection_monitor_thread and self._connection_monitor_thread.is_alive():
self._refresh_thread.join(timeout=2.0) self._connection_monitor_thread.join(timeout=2.0)
logger.info("节点值刷新线程已停止") logger.info("连接监控线程已停止")
def read_node(self, node_name: str) -> str:
"""
读取节点值的便捷方法(使用缓存)
返回JSON格式字符串
"""
try:
# 使用get_node_value方法自动处理缓存
value = self.get_node_value(node_name, use_cache=True)
# 获取缓存信息
chinese_name = self._name_mapping.get(node_name, node_name)
cache_info = self._node_values.get(chinese_name, {})
result = {
"value": value,
"error": False,
"node_name": node_name,
"timestamp": time.time(),
"cache_age": time.time() - cache_info.get('timestamp', time.time()),
"source": cache_info.get('source', 'unknown')
}
return json.dumps(result)
except Exception as e:
logger.error(f"读取节点 {node_name} 失败: {e}")
result = {
"value": None,
"error": True,
"node_name": node_name,
"error_message": str(e),
"timestamp": time.time()
}
return json.dumps(result)
def get_cache_stats(self) -> Dict[str, Any]:
"""获取缓存统计信息"""
current_time = time.time()
stats = {
'total_cached_nodes': len(self._node_values),
'subscription_nodes': 0,
'on_demand_nodes': 0,
'expired_nodes': 0,
'cache_timeout': self._cache_timeout,
'using_subscription': self._use_subscription
}
for node_name, cache_entry in self._node_values.items():
source = cache_entry.get('source', 'unknown')
cache_age = current_time - cache_entry['timestamp']
if source == 'subscription':
stats['subscription_nodes'] += 1
elif source in ['on_demand_read', 'forced_read', 'write']:
stats['on_demand_nodes'] += 1
if cache_age > self._cache_timeout:
stats['expired_nodes'] += 1
return stats
def print_cache_stats(self):
"""打印缓存统计信息"""
stats = self.get_cache_stats()
print("\n" + "="*80)
print("缓存统计信息")
print("="*80)
print(f"总缓存节点数: {stats['total_cached_nodes']}")
print(f"订阅模式: {'启用' if stats['using_subscription'] else '禁用'}")
print(f" - 订阅更新节点: {stats['subscription_nodes']}")
print(f" - 按需读取节点: {stats['on_demand_nodes']}")
print(f" - 已过期节点: {stats['expired_nodes']}")
print(f"缓存超时时间: {stats['cache_timeout']}")
print("="*80 + "\n")
def load_config(self, config_path: str) -> None: def load_config(self, config_path: str) -> None:
"""从JSON配置文件加载并注册工作流""" """从JSON配置文件加载并注册工作流"""
@@ -1289,42 +1619,111 @@ class OpcUaClient(BaseClient):
# 处理节点注册 # 处理节点注册
if "register_node_list_from_csv_path" in config_data: if "register_node_list_from_csv_path" in config_data:
# 获取配置文件所在目录
config_dir = os.path.dirname(os.path.abspath(config_path)) config_dir = os.path.dirname(os.path.abspath(config_path))
# 处理CSV路径如果是相对路径则相对于配置文件所在目录
if "path" in config_data["register_node_list_from_csv_path"]: if "path" in config_data["register_node_list_from_csv_path"]:
csv_path = config_data["register_node_list_from_csv_path"]["path"] csv_path = config_data["register_node_list_from_csv_path"]["path"]
if not os.path.isabs(csv_path): if not os.path.isabs(csv_path):
# 转换为绝对路径
csv_path = os.path.join(config_dir, csv_path) csv_path = os.path.join(config_dir, csv_path)
config_data["register_node_list_from_csv_path"]["path"] = csv_path config_data["register_node_list_from_csv_path"]["path"] = csv_path
# 直接使用字典
self.register_node_list_from_csv_path(**config_data["register_node_list_from_csv_path"]) self.register_node_list_from_csv_path(**config_data["register_node_list_from_csv_path"])
if self.client and self._variables_to_find:
logger.info("CSV加载完成开始查找服务器节点...")
self._find_nodes()
# 处理工作流创建 # 处理工作流创建
if "create_flow" in config_data: if "create_flow" in config_data:
# 直接传递字典列表
self.create_workflow_from_json(config_data["create_flow"]) self.create_workflow_from_json(config_data["create_flow"])
# 将工作流注册为实例方法
self.register_workflows_as_methods() self.register_workflows_as_methods()
# 将所有节点注册为属性 # 将所有节点注册为属性
self._register_nodes_as_attributes() self._register_nodes_as_attributes()
# 打印统计信息
found_count = len(self._node_registry)
total_count = len(self._variables_to_find)
if found_count < total_count:
logger.warning(f"节点查找完成:找到 {found_count}/{total_count} 个节点")
else:
logger.info(f"✓ 节点查找完成:所有 {found_count} 个节点均已找到")
# 如果使用订阅模式,重新设置订阅(确保新节点被订阅)
if self._use_subscription and found_count > 0:
self._setup_subscriptions()
logger.info(f"成功从 {config_path} 加载配置") logger.info(f"成功从 {config_path} 加载配置")
except Exception as e: except Exception as e:
logger.error(f"加载配置文件 {config_path} 失败: {e}") logger.error(f"加载配置文件 {config_path} 失败: {e}")
traceback.print_exc() traceback.print_exc()
def disconnect(self): def disconnect(self):
# 停止刷新线程 """断开连接并清理资源"""
self.stop_node_refresh() logger.info("正在断开连接...")
# 停止连接监控
self._stop_connection_monitor()
# 删除订阅
if self._subscription:
try:
with self._client_lock:
self._subscription.delete()
logger.info("订阅已删除")
except Exception as e:
logger.warning(f"删除订阅失败: {e}")
# 断开客户端连接
if self.client: if self.client:
try:
with self._client_lock:
self.client.disconnect() self.client.disconnect()
logger.info("OPC UA client disconnected") logger.info("OPC UA 客户端已断开连接")
except Exception as e:
logger.error(f"断开连接失败: {e}")
def _register_nodes_as_attributes(self):
"""将所有节点注册为实例属性"""
for node_name, node in self._node_registry.items():
if not node.node_id or node.node_id == "":
logger.warning(f"⚠ 节点 '{node_name}' 的 node_id 为空,跳过注册为属性")
continue
eng_name = self._reverse_mapping.get(node_name)
attr_name = eng_name if eng_name else node_name.replace(' ', '_').replace('-', '_')
def create_property_getter(node_key):
def getter(self):
return self.get_node_value(node_key, use_cache=True)
return getter
setattr(OpcUaClient, attr_name, property(create_property_getter(node_name)))
logger.debug(f"已注册节点 '{node_name}' 为属性 '{attr_name}'")
def post_init(self, ros_node):
"""ROS2 节点就绪后的初始化"""
if not (hasattr(self, 'deck') and self.deck):
return
if not (hasattr(ros_node, 'resource_tracker') and ros_node.resource_tracker):
logger.warning("resource_tracker 不存在,无法注册 deck")
return
# 1. 本地注册(必需)
ros_node.resource_tracker.add_resource(self.deck)
# 2. 上传云端
try:
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode
ROS2DeviceNode.run_async_func(
ros_node.update_resource,
True,
resources=[self.deck]
)
logger.info("Deck 已上传到云端")
except Exception as e:
logger.error(f"上传失败: {e}")
if __name__ == '__main__': if __name__ == '__main__':
@@ -1338,8 +1737,8 @@ if __name__ == '__main__':
# 创建OPC UA客户端并加载配置 # 创建OPC UA客户端并加载配置
try: try:
client = OpcUaClient( client = OpcUaClient(
url="opc.tcp://localhost:4840/freeopcua/server/", # 替换为实际的OPC UA服务器地址 url="opc.tcp://192.168.1.88:4840/freeopcua/server/", # 替换为实际的OPC UA服务器地址
config_path=config_path # 传入配置文件路径 config_path="D:\\Uni-Lab-OS\\unilabos\\device_comms\\opcua_client\\opcua_huairou.json" # 传入配置文件路径
) )
# 列出所有已注册的工作流 # 列出所有已注册的工作流
@@ -1349,7 +1748,9 @@ if __name__ == '__main__':
# 测试trigger_grab_action工作流 - 使用英文参数名 # 测试trigger_grab_action工作流 - 使用英文参数名
print("\n测试trigger_grab_action工作流 - 使用英文参数名:") print("\n测试trigger_grab_action工作流 - 使用英文参数名:")
client.trigger_grab_action(reaction_tank_number=2, raw_tank_number=3) client.trigger_grab_action(reaction_tank_number=2, raw_tank_number=2)
# client.set_node_value("reaction_tank_number", 2)
# 读取节点值 - 使用英文节点名 # 读取节点值 - 使用英文节点名
grab_complete = client.get_node_value("grab_complete") grab_complete = client.get_node_value("grab_complete")

View File

@@ -3,7 +3,7 @@ from enum import Enum
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Tuple, Union, Optional, Any, List from typing import Tuple, Union, Optional, Any, List
from opcua import Client, Node from opcua import Client, Node, ua
from opcua.ua import NodeId, NodeClass, VariantType from opcua.ua import NodeId, NodeClass, VariantType
@@ -47,23 +47,68 @@ class Base(ABC):
def _get_node(self) -> Node: def _get_node(self) -> Node:
if self._node is None: if self._node is None:
try: try:
# 检查是否是NumericNodeId(ns=X;i=Y)格式 # 尝试多种 NodeId 字符串格式解析,兼容不同服务器/库的输出
if "NumericNodeId" in self._node_id: # 可能的格式示例: 'ns=2;i=1234', 'ns=2;s=SomeString',
# 从字符串中提取命名空间和标识符 # 'StringNodeId(ns=4;s=OPC|变量名)', 'NumericNodeId(ns=2;i=1234)' 等
import re import re
match = re.search(r'ns=(\d+);i=(\d+)', self._node_id)
if match: nid = self._node_id
ns = int(match.group(1)) # 如果已经是 NodeId/Node 对象(库用户可能传入),直接使用
identifier = int(match.group(2)) try:
from opcua.ua import NodeId as UaNodeId
if isinstance(nid, UaNodeId):
self._node = self._client.get_node(nid)
return self._node
except Exception:
# 若导入或类型判断失败,则继续下一步
pass
# 直接以字符串形式处理
if isinstance(nid, str):
nid = nid.strip()
# 处理包含类名的格式,如 'StringNodeId(ns=4;s=...)' 或 'NumericNodeId(ns=2;i=...)'
# 提取括号内的内容
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
if match_wrapped:
# 提取括号内的实际 node_id 字符串
nid = match_wrapped.group(2).strip()
# 常见短格式 'ns=2;i=1234' 或 'ns=2;s=SomeString'
if re.match(r'^ns=\d+;[is]=', nid):
self._node = self._client.get_node(nid)
else:
# 尝试提取 ns 和 i 或 s
# 对于字符串标识符,可能包含特殊字符,使用非贪婪匹配
m_num = re.search(r'ns=(\d+);i=(\d+)', nid)
m_str = re.search(r'ns=(\d+);s=(.+?)(?:\)|$)', nid)
if m_num:
ns = int(m_num.group(1))
identifier = int(m_num.group(2))
node_id = NodeId(identifier, ns) node_id = NodeId(identifier, ns)
self._node = self._client.get_node(node_id) self._node = self._client.get_node(node_id)
elif m_str:
ns = int(m_str.group(1))
identifier = m_str.group(2).strip()
# 对于字符串标识符,直接使用字符串格式
node_id_str = f"ns={ns};s={identifier}"
self._node = self._client.get_node(node_id_str)
else: else:
raise ValueError(f"无法解析节点ID: {self._node_id}") # 回退:尝试直接传入字符串(有些实现接受其它格式)
try:
self._node = self._client.get_node(self._node_id)
except Exception as e:
# 输出更详细的错误信息供调试
print(f"获取节点失败(尝试直接字符串): {self._node_id}, 错误: {e}")
raise
else: else:
# 直接使用节点ID字符串 # 非字符串,尝试直接使用
self._node = self._client.get_node(self._node_id) self._node = self._client.get_node(self._node_id)
except Exception as e: except Exception as e:
print(f"获取节点失败: {self._node_id}, 错误: {e}") print(f"获取节点失败: {self._node_id}, 错误: {e}")
# 添加额外提示,帮助定位 BadNodeIdUnknown 问题
print("提示: 请确认该 node_id 是否来自当前连接的服务器地址空间," \
"以及 CSV/配置中名称与服务器 BrowseName 是否匹配。")
raise raise
return self._node return self._node
@@ -104,7 +149,56 @@ class Variable(Base):
def write(self, value: Any) -> bool: def write(self, value: Any) -> bool:
try: try:
# 如果声明了数据类型,则尝试转换并使用对应的 Variant 写入
coerced = value
try:
if self._data_type is not None:
# 基于声明的数据类型做简单类型转换
dt = self._data_type
if dt in (DataType.SBYTE, DataType.BYTE, DataType.INT16, DataType.UINT16,
DataType.INT32, DataType.UINT32, DataType.INT64, DataType.UINT64):
# 数值类型 -> int
if isinstance(value, str):
coerced = int(value)
else:
coerced = int(value)
elif dt in (DataType.FLOAT, DataType.DOUBLE):
if isinstance(value, str):
coerced = float(value)
else:
coerced = float(value)
elif dt == DataType.BOOLEAN:
if isinstance(value, str):
v = value.strip().lower()
if v in ("true", "1", "yes", "on"):
coerced = True
elif v in ("false", "0", "no", "off"):
coerced = False
else:
coerced = bool(value)
else:
coerced = bool(value)
elif dt == DataType.STRING or dt == DataType.BYTESTRING or dt == DataType.DATETIME:
coerced = str(value)
# 使用 ua.Variant 明确指定 VariantType
try:
variant = ua.Variant(coerced, dt.value)
self._get_node().set_value(variant)
except Exception:
# 回退:有些 set_value 实现接受 (value, variant_type)
try:
self._get_node().set_value(coerced, dt.value)
except Exception:
# 最后回退到直接写入(保持兼容性)
self._get_node().set_value(coerced)
else:
# 未声明数据类型,直接写入
self._get_node().set_value(value) self._get_node().set_value(value)
except Exception:
# 若在转换或按数据类型写入失败,尝试直接写入原始值并让上层捕获错误
self._get_node().set_value(value)
return False return False
except Exception as e: except Exception as e:
print(f"写入变量 {self._name} 失败: {e}") print(f"写入变量 {self._name} 失败: {e}")
@@ -120,20 +214,50 @@ class Method(Base):
def _get_parent_node(self) -> Node: def _get_parent_node(self) -> Node:
if self._parent_node is None: if self._parent_node is None:
try: try:
# 检查是否是NumericNodeId(ns=X;i=Y)格式 # 处理父节点ID使用与_get_node相同的解析逻辑
if "NumericNodeId" in self._parent_node_id:
# 从字符串中提取命名空间和标识符
import re import re
match = re.search(r'ns=(\d+);i=(\d+)', self._parent_node_id)
if match: nid = self._parent_node_id
ns = int(match.group(1))
identifier = int(match.group(2)) # 如果已经是 NodeId 对象,直接使用
try:
from opcua.ua import NodeId as UaNodeId
if isinstance(nid, UaNodeId):
self._parent_node = self._client.get_node(nid)
return self._parent_node
except Exception:
pass
# 字符串处理
if isinstance(nid, str):
nid = nid.strip()
# 处理包含类名的格式
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
if match_wrapped:
nid = match_wrapped.group(2).strip()
# 常见短格式
if re.match(r'^ns=\d+;[is]=', nid):
self._parent_node = self._client.get_node(nid)
else:
# 提取 ns 和 i 或 s
m_num = re.search(r'ns=(\d+);i=(\d+)', nid)
m_str = re.search(r'ns=(\d+);s=(.+?)(?:\)|$)', nid)
if m_num:
ns = int(m_num.group(1))
identifier = int(m_num.group(2))
node_id = NodeId(identifier, ns) node_id = NodeId(identifier, ns)
self._parent_node = self._client.get_node(node_id) self._parent_node = self._client.get_node(node_id)
elif m_str:
ns = int(m_str.group(1))
identifier = m_str.group(2).strip()
node_id_str = f"ns={ns};s={identifier}"
self._parent_node = self._client.get_node(node_id_str)
else: else:
raise ValueError(f"无法解析父节点ID: {self._parent_node_id}") # 回退
self._parent_node = self._client.get_node(self._parent_node_id)
else: else:
# 直接使用节点ID字符串
self._parent_node = self._client.get_node(self._parent_node_id) self._parent_node = self._client.get_node(self._parent_node_id)
except Exception as e: except Exception as e:
print(f"获取父节点失败: {self._parent_node_id}, 错误: {e}") print(f"获取父节点失败: {self._parent_node_id}, 错误: {e}")

View File

@@ -0,0 +1,93 @@
from pylabrobot.resources import create_homogeneous_resources, Coordinate, ResourceHolder, create_ordered_items_2d
from unilabos.resources.itemized_carrier import BottleCarrier
from unilabos.devices.workstation.post_process.bottles import POST_PROCESS_PolymerStation_Reagent_Bottle
# 命名约定:试剂瓶-Bottle烧杯-Beaker烧瓶-Flask,小瓶-Vial
# ============================================================================
# 聚合站PolymerStation载体定义统一入口
# ============================================================================
def POST_PROCESS_Raw_1BottleCarrier(name: str) -> BottleCarrier:
"""聚合站-单试剂瓶载架
参数:
- name: 载架名称前缀
"""
# 载架尺寸 (mm)
carrier_size_x = 127.8
carrier_size_y = 85.5
carrier_size_z = 20.0
# 烧杯/试剂瓶占位尺寸(使用圆形占位)
beaker_diameter = 60.0
# 计算中央位置
center_x = (carrier_size_x - beaker_diameter) / 2
center_y = (carrier_size_y - beaker_diameter) / 2
center_z = 5.0
carrier = BottleCarrier(
name=name,
size_x=carrier_size_x,
size_y=carrier_size_y,
size_z=carrier_size_z,
sites=create_homogeneous_resources(
klass=ResourceHolder,
locations=[Coordinate(center_x, center_y, center_z)],
resource_size_x=beaker_diameter,
resource_size_y=beaker_diameter,
name_prefix=name,
),
model="POST_PROCESS_Raw_1BottleCarrier",
)
carrier.num_items_x = 1
carrier.num_items_y = 1
carrier.num_items_z = 1
# 统一后缀采用 "flask_1" 命名(可按需调整)
carrier[0] = POST_PROCESS_PolymerStation_Reagent_Bottle(f"{name}_flask_1")
return carrier
def POST_PROCESS_Reaction_1BottleCarrier(name: str) -> BottleCarrier:
"""聚合站-单试剂瓶载架
参数:
- name: 载架名称前缀
"""
# 载架尺寸 (mm)
carrier_size_x = 127.8
carrier_size_y = 85.5
carrier_size_z = 20.0
# 烧杯/试剂瓶占位尺寸(使用圆形占位)
beaker_diameter = 60.0
# 计算中央位置
center_x = (carrier_size_x - beaker_diameter) / 2
center_y = (carrier_size_y - beaker_diameter) / 2
center_z = 5.0
carrier = BottleCarrier(
name=name,
size_x=carrier_size_x,
size_y=carrier_size_y,
size_z=carrier_size_z,
sites=create_homogeneous_resources(
klass=ResourceHolder,
locations=[Coordinate(center_x, center_y, center_z)],
resource_size_x=beaker_diameter,
resource_size_y=beaker_diameter,
name_prefix=name,
),
model="POST_PROCESS_Reaction_1BottleCarrier",
)
carrier.num_items_x = 1
carrier.num_items_y = 1
carrier.num_items_z = 1
# 统一后缀采用 "flask_1" 命名(可按需调整)
carrier[0] = POST_PROCESS_PolymerStation_Reagent_Bottle(f"{name}_flask_1")
return carrier

View File

@@ -0,0 +1,20 @@
from unilabos.resources.itemized_carrier import Bottle
def POST_PROCESS_PolymerStation_Reagent_Bottle(
name: str,
diameter: float = 70.0,
height: float = 120.0,
max_volume: float = 500000.0, # 500mL
barcode: str = None,
) -> Bottle:
"""创建试剂瓶"""
return Bottle(
name=name,
diameter=diameter,
height=height,
max_volume=max_volume,
barcode=barcode,
model="POST_PROCESS_PolymerStation_Reagent_Bottle",
)

View File

@@ -0,0 +1,46 @@
from os import name
from pylabrobot.resources import Deck, Coordinate, Rotation
from unilabos.devices.workstation.post_process.warehouses import (
post_process_warehouse_4x3x1,
post_process_warehouse_4x3x1_2,
)
class post_process_deck(Deck):
def __init__(
self,
name: str = "post_process_deck",
size_x: float = 2000.0,
size_y: float = 1000.0,
size_z: float = 2670.0,
category: str = "deck",
setup: bool = True,
) -> None:
super().__init__(name=name, size_x=1700.0, size_y=1350.0, size_z=2670.0)
if setup:
self.setup()
def setup(self) -> None:
# 添加仓库
self.warehouses = {
"原料罐堆栈": post_process_warehouse_4x3x1("原料罐堆栈"),
"反应罐堆栈": post_process_warehouse_4x3x1_2("反应罐堆栈"),
}
# warehouse 的位置
self.warehouse_locations = {
"原料罐堆栈": Coordinate(350.0, 55.0, 0.0),
"反应罐堆栈": Coordinate(1000.0, 55.0, 0.0),
}
for warehouse_name, warehouse in self.warehouses.items():
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])

View File

@@ -0,0 +1,157 @@
{
"register_node_list_from_csv_path": {
"path": "opcua_nodes_huairou.csv"
},
"create_flow": [
{
"name": "trigger_grab_action",
"description": "触发反应罐及原料罐抓取动作",
"parameters": ["reaction_tank_number", "raw_tank_number"],
"action": [
{
"init_function": {
"func_name": "init_grab_params",
"write_nodes": ["reaction_tank_number", "raw_tank_number"]
},
"start_function": {
"func_name": "start_grab",
"write_nodes": {"grab_trigger": true},
"condition_nodes": ["grab_complete"],
"stop_condition_expression": "grab_complete == True",
"timeout_seconds": 999999.0
},
"stop_function": {
"func_name": "stop_grab",
"write_nodes": {"grab_trigger": false}
}
}
]
},
{
"name": "trigger_post_processing",
"description": "触发后处理动作",
"parameters": ["atomization_fast_speed", "wash_slow_speed","injection_pump_suction_speed",
"injection_pump_push_speed","raw_liquid_suction_count","first_wash_water_amount",
"second_wash_water_amount","first_powder_mixing_time","second_powder_mixing_time",
"first_powder_wash_count","second_powder_wash_count","initial_water_amount",
"pre_filtration_mixing_time","atomization_pressure_kpa"],
"action": [
{
"init_function": {
"func_name": "init_post_processing_params",
"write_nodes": ["atomization_fast_speed", "wash_slow_speed","injection_pump_suction_speed",
"injection_pump_push_speed","raw_liquid_suction_count","first_wash_water_amount",
"second_wash_water_amount","first_powder_mixing_time","second_powder_mixing_time",
"first_powder_wash_count","second_powder_wash_count","initial_water_amount",
"pre_filtration_mixing_time","atomization_pressure_kpa"]
},
"start_function": {
"func_name": "start_post_processing",
"write_nodes": {"post_process_trigger": true},
"condition_nodes": ["post_process_complete"],
"stop_condition_expression": "post_process_complete == True",
"timeout_seconds": 999999.0
},
"stop_function": {
"func_name": "stop_post_processing",
"write_nodes": {"post_process_trigger": false}
}
}
]
},
{
"name": "trigger_cleaning_action",
"description": "触发清洗及管路吹气动作",
"parameters": ["nmp_outer_wall_cleaning_injection", "nmp_outer_wall_cleaning_count","nmp_outer_wall_cleaning_wait_time",
"nmp_outer_wall_cleaning_waste_time","nmp_inner_wall_cleaning_injection","nmp_inner_wall_cleaning_count",
"nmp_pump_cleaning_suction_count",
"nmp_inner_wall_cleaning_waste_time",
"nmp_stirrer_cleaning_injection",
"nmp_stirrer_cleaning_count",
"nmp_stirrer_cleaning_wait_time",
"nmp_stirrer_cleaning_waste_time",
"water_outer_wall_cleaning_injection",
"water_outer_wall_cleaning_count",
"water_outer_wall_cleaning_wait_time",
"water_outer_wall_cleaning_waste_time",
"water_inner_wall_cleaning_injection",
"water_inner_wall_cleaning_count",
"water_pump_cleaning_suction_count",
"water_inner_wall_cleaning_waste_time",
"water_stirrer_cleaning_injection",
"water_stirrer_cleaning_count",
"water_stirrer_cleaning_wait_time",
"water_stirrer_cleaning_waste_time",
"acetone_outer_wall_cleaning_injection",
"acetone_outer_wall_cleaning_count",
"acetone_outer_wall_cleaning_wait_time",
"acetone_outer_wall_cleaning_waste_time",
"acetone_inner_wall_cleaning_injection",
"acetone_inner_wall_cleaning_count",
"acetone_pump_cleaning_suction_count",
"acetone_inner_wall_cleaning_waste_time",
"acetone_stirrer_cleaning_injection",
"acetone_stirrer_cleaning_count",
"acetone_stirrer_cleaning_wait_time",
"acetone_stirrer_cleaning_waste_time",
"pipe_blowing_time",
"injection_pump_forward_empty_suction_count",
"injection_pump_reverse_empty_suction_count",
"filtration_liquid_selection"],
"action": [
{
"init_function": {
"func_name": "init_cleaning_params",
"write_nodes": ["nmp_outer_wall_cleaning_injection", "nmp_outer_wall_cleaning_count","nmp_outer_wall_cleaning_wait_time",
"nmp_outer_wall_cleaning_waste_time","nmp_inner_wall_cleaning_injection","nmp_inner_wall_cleaning_count",
"nmp_pump_cleaning_suction_count",
"nmp_inner_wall_cleaning_waste_time",
"nmp_stirrer_cleaning_injection",
"nmp_stirrer_cleaning_count",
"nmp_stirrer_cleaning_wait_time",
"nmp_stirrer_cleaning_waste_time",
"water_outer_wall_cleaning_injection",
"water_outer_wall_cleaning_count",
"water_outer_wall_cleaning_wait_time",
"water_outer_wall_cleaning_waste_time",
"water_inner_wall_cleaning_injection",
"water_inner_wall_cleaning_count",
"water_pump_cleaning_suction_count",
"water_inner_wall_cleaning_waste_time",
"water_stirrer_cleaning_injection",
"water_stirrer_cleaning_count",
"water_stirrer_cleaning_wait_time",
"water_stirrer_cleaning_waste_time",
"acetone_outer_wall_cleaning_injection",
"acetone_outer_wall_cleaning_count",
"acetone_outer_wall_cleaning_wait_time",
"acetone_outer_wall_cleaning_waste_time",
"acetone_inner_wall_cleaning_injection",
"acetone_inner_wall_cleaning_count",
"acetone_pump_cleaning_suction_count",
"acetone_inner_wall_cleaning_waste_time",
"acetone_stirrer_cleaning_injection",
"acetone_stirrer_cleaning_count",
"acetone_stirrer_cleaning_wait_time",
"acetone_stirrer_cleaning_waste_time",
"pipe_blowing_time",
"injection_pump_forward_empty_suction_count",
"injection_pump_reverse_empty_suction_count",
"filtration_liquid_selection"]
},
"start_function": {
"func_name": "start_cleaning",
"write_nodes": {"cleaning_and_pipe_blowing_trigger": true},
"condition_nodes": ["cleaning_complete"],
"stop_condition_expression": "cleaning_complete == True",
"timeout_seconds": 999999.0
},
"stop_function": {
"func_name": "stop_cleaning",
"write_nodes": {"cleaning_and_pipe_blowing_trigger": false}
}
}
]
}
]
}

View File

@@ -0,0 +1,70 @@
Name,EnglishName,NodeType,DataType,NodeLanguage,NodeId
原料罐号码,raw_tank_number,VARIABLE,INT16,Chinese,ns=4;s=OPC|原料罐号码
反应罐号码,reaction_tank_number,VARIABLE,INT16,Chinese,ns=4;s=OPC|反应罐号码
反应罐及原料罐抓取触发,grab_trigger,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|反应罐及原料罐抓取触发
后处理动作触发,post_process_trigger,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|后处理动作触发
搅拌桨雾化快速,atomization_fast_speed,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|搅拌桨雾化快速
搅拌桨洗涤慢速,wash_slow_speed,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|搅拌桨洗涤慢速
注射泵抽液速度,injection_pump_suction_speed,VARIABLE,INT16,Chinese,ns=4;s=OPC|注射泵抽液速度
注射泵推液速度,injection_pump_push_speed,VARIABLE,INT16,Chinese,ns=4;s=OPC|注射泵推液速度
抽原液次数,raw_liquid_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|抽原液次数
第1次洗涤加水量,first_wash_water_amount,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|第1次洗涤加水量
第2次洗涤加水量,second_wash_water_amount,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|第2次洗涤加水量
第1次粉末搅拌时间,first_powder_mixing_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|第1次粉末搅拌时间
第2次粉末搅拌时间,second_powder_mixing_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|第2次粉末搅拌时间
第1次粉末洗涤次数,first_powder_wash_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|第1次粉末洗涤次数
第2次粉末洗涤次数,second_powder_wash_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|第2次粉末洗涤次数
最开始加水量,initial_water_amount,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|最开始加水量
抽滤前搅拌时间,pre_filtration_mixing_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|抽滤前搅拌时间
雾化压力Kpa,atomization_pressure_kpa,VARIABLE,INT16,Chinese,ns=4;s=OPC|雾化压力Kpa
清洗及管路吹气触发,cleaning_and_pipe_blowing_trigger,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|清洗及管路吹气触发
废液桶满报警,waste_tank_full_alarm,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|废液桶满报警
清水桶空报警,water_tank_empty_alarm,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|清水桶空报警
NMP桶空报警,nmp_tank_empty_alarm,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|NMP桶空报警
丙酮桶空报警,acetone_tank_empty_alarm,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|丙酮桶空报警
门开报警,door_open_alarm,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|门开报警
反应罐及原料罐抓取完成PLCtoPC,grab_complete,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|反应罐及原料罐抓取完成PLCtoPC
后处理动作完成PLCtoPC,post_process_complete,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|后处理动作完成PLCtoPC
清洗及管路吹气完成PLCtoPC,cleaning_complete,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|清洗及管路吹气完成PLCtoPC
远程模式PLCtoPC,remote_mode,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|远程模式PLCtoPC
设备准备就绪PLCtoPC,device_ready,VARIABLE,BOOLEAN,Chinese,ns=4;s=OPC|设备准备就绪PLCtoPC
NMP外壁清洗加注,nmp_outer_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|NMP外壁清洗加注
NMP外壁清洗次数,nmp_outer_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|NMP外壁清洗次数
NMP外壁清洗等待时间,nmp_outer_wall_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|NMP外壁清洗等待时间
NMP外壁清洗抽废时间,nmp_outer_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|NMP外壁清洗抽废时间
NMP内壁清洗加注,nmp_inner_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|NMP内壁清洗加注
NMP内壁清洗次数,nmp_inner_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|NMP内壁清洗次数
NMP泵清洗抽次数,nmp_pump_cleaning_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|NMP泵清洗抽次数
NMP内壁清洗抽废时间,nmp_inner_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|NMP内壁清洗抽废时间
NMP搅拌桨清洗加注,nmp_stirrer_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|NMP搅拌桨清洗加注
NMP搅拌桨清洗次数,nmp_stirrer_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|NMP搅拌桨清洗次数
NMP搅拌桨清洗等待时间,nmp_stirrer_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|NMP搅拌桨清洗等待时间
NMP搅拌桨清洗抽废时间,nmp_stirrer_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|NMP搅拌桨清洗抽废时间
清水外壁清洗加注,water_outer_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|清水外壁清洗加注
清水外壁清洗次数,water_outer_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|清水外壁清洗次数
清水外壁清洗等待时间,water_outer_wall_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|清水外壁清洗等待时间
清水外壁清洗抽废时间,water_outer_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|清水外壁清洗抽废时间
清水内壁清洗加注,water_inner_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|清水内壁清洗加注
清水内壁清洗次数,water_inner_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|清水内壁清洗次数
清水泵清洗抽次数,water_pump_cleaning_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|清水泵清洗抽次数
清水内壁清洗抽废时间,water_inner_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|清水内壁清洗抽废时间
清水搅拌桨清洗加注,water_stirrer_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|清水搅拌桨清洗加注
清水搅拌桨清洗次数,water_stirrer_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|清水搅拌桨清洗次数
清水搅拌桨清洗等待时间,water_stirrer_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|清水搅拌桨清洗等待时间
清水搅拌桨清洗抽废时间,water_stirrer_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|清水搅拌桨清洗抽废时间
丙酮外壁清洗加注,acetone_outer_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|丙酮外壁清洗加注
丙酮外壁清洗次数,acetone_outer_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|丙酮外壁清洗次数
丙酮外壁清洗等待时间,acetone_outer_wall_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|丙酮外壁清洗等待时间
丙酮外壁清洗抽废时间,acetone_outer_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|丙酮外壁清洗抽废时间
丙酮内壁清洗加注,acetone_inner_wall_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|丙酮内壁清洗加注
丙酮内壁清洗次数,acetone_inner_wall_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|丙酮内壁清洗次数
丙酮泵清洗抽次数,acetone_pump_cleaning_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|丙酮泵清洗抽次数
丙酮内壁清洗抽废时间,acetone_inner_wall_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|丙酮内壁清洗抽废时间
丙酮搅拌桨清洗加注,acetone_stirrer_cleaning_injection,VARIABLE,FLOAT,Chinese,ns=4;s=OPC|丙酮搅拌桨清洗加注
丙酮搅拌桨清洗次数,acetone_stirrer_cleaning_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|丙酮搅拌桨清洗次数
丙酮搅拌桨清洗等待时间,acetone_stirrer_cleaning_wait_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|丙酮搅拌桨清洗等待时间
丙酮搅拌桨清洗抽废时间,acetone_stirrer_cleaning_waste_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|丙酮搅拌桨清洗抽废时间
管道吹气时间,pipe_blowing_time,VARIABLE,INT32,Chinese,ns=4;s=OPC|管道吹气时间
注射泵正向空抽次数,injection_pump_forward_empty_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|注射泵正向空抽次数
注射泵反向空抽次数,injection_pump_reverse_empty_suction_count,VARIABLE,INT16,Chinese,ns=4;s=OPC|注射泵反向空抽次数
抽滤液选择0水1丙酮,filtration_liquid_selection,VARIABLE,INT16,Chinese,ns=4;s=OPC|抽滤液选择0水1丙酮
1 Name EnglishName NodeType DataType NodeLanguage NodeId
2 原料罐号码 raw_tank_number VARIABLE INT16 Chinese ns=4;s=OPC|原料罐号码
3 反应罐号码 reaction_tank_number VARIABLE INT16 Chinese ns=4;s=OPC|反应罐号码
4 反应罐及原料罐抓取触发 grab_trigger VARIABLE BOOLEAN Chinese ns=4;s=OPC|反应罐及原料罐抓取触发
5 后处理动作触发 post_process_trigger VARIABLE BOOLEAN Chinese ns=4;s=OPC|后处理动作触发
6 搅拌桨雾化快速 atomization_fast_speed VARIABLE FLOAT Chinese ns=4;s=OPC|搅拌桨雾化快速
7 搅拌桨洗涤慢速 wash_slow_speed VARIABLE FLOAT Chinese ns=4;s=OPC|搅拌桨洗涤慢速
8 注射泵抽液速度 injection_pump_suction_speed VARIABLE INT16 Chinese ns=4;s=OPC|注射泵抽液速度
9 注射泵推液速度 injection_pump_push_speed VARIABLE INT16 Chinese ns=4;s=OPC|注射泵推液速度
10 抽原液次数 raw_liquid_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|抽原液次数
11 第1次洗涤加水量 first_wash_water_amount VARIABLE FLOAT Chinese ns=4;s=OPC|第1次洗涤加水量
12 第2次洗涤加水量 second_wash_water_amount VARIABLE FLOAT Chinese ns=4;s=OPC|第2次洗涤加水量
13 第1次粉末搅拌时间 first_powder_mixing_time VARIABLE INT32 Chinese ns=4;s=OPC|第1次粉末搅拌时间
14 第2次粉末搅拌时间 second_powder_mixing_time VARIABLE INT32 Chinese ns=4;s=OPC|第2次粉末搅拌时间
15 第1次粉末洗涤次数 first_powder_wash_count VARIABLE INT16 Chinese ns=4;s=OPC|第1次粉末洗涤次数
16 第2次粉末洗涤次数 second_powder_wash_count VARIABLE INT16 Chinese ns=4;s=OPC|第2次粉末洗涤次数
17 最开始加水量 initial_water_amount VARIABLE FLOAT Chinese ns=4;s=OPC|最开始加水量
18 抽滤前搅拌时间 pre_filtration_mixing_time VARIABLE INT32 Chinese ns=4;s=OPC|抽滤前搅拌时间
19 雾化压力Kpa atomization_pressure_kpa VARIABLE INT16 Chinese ns=4;s=OPC|雾化压力Kpa
20 清洗及管路吹气触发 cleaning_and_pipe_blowing_trigger VARIABLE BOOLEAN Chinese ns=4;s=OPC|清洗及管路吹气触发
21 废液桶满报警 waste_tank_full_alarm VARIABLE BOOLEAN Chinese ns=4;s=OPC|废液桶满报警
22 清水桶空报警 water_tank_empty_alarm VARIABLE BOOLEAN Chinese ns=4;s=OPC|清水桶空报警
23 NMP桶空报警 nmp_tank_empty_alarm VARIABLE BOOLEAN Chinese ns=4;s=OPC|NMP桶空报警
24 丙酮桶空报警 acetone_tank_empty_alarm VARIABLE BOOLEAN Chinese ns=4;s=OPC|丙酮桶空报警
25 门开报警 door_open_alarm VARIABLE BOOLEAN Chinese ns=4;s=OPC|门开报警
26 反应罐及原料罐抓取完成PLCtoPC grab_complete VARIABLE BOOLEAN Chinese ns=4;s=OPC|反应罐及原料罐抓取完成PLCtoPC
27 后处理动作完成PLCtoPC post_process_complete VARIABLE BOOLEAN Chinese ns=4;s=OPC|后处理动作完成PLCtoPC
28 清洗及管路吹气完成PLCtoPC cleaning_complete VARIABLE BOOLEAN Chinese ns=4;s=OPC|清洗及管路吹气完成PLCtoPC
29 远程模式PLCtoPC remote_mode VARIABLE BOOLEAN Chinese ns=4;s=OPC|远程模式PLCtoPC
30 设备准备就绪PLCtoPC device_ready VARIABLE BOOLEAN Chinese ns=4;s=OPC|设备准备就绪PLCtoPC
31 NMP外壁清洗加注 nmp_outer_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|NMP外壁清洗加注
32 NMP外壁清洗次数 nmp_outer_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|NMP外壁清洗次数
33 NMP外壁清洗等待时间 nmp_outer_wall_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|NMP外壁清洗等待时间
34 NMP外壁清洗抽废时间 nmp_outer_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|NMP外壁清洗抽废时间
35 NMP内壁清洗加注 nmp_inner_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|NMP内壁清洗加注
36 NMP内壁清洗次数 nmp_inner_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|NMP内壁清洗次数
37 NMP泵清洗抽次数 nmp_pump_cleaning_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|NMP泵清洗抽次数
38 NMP内壁清洗抽废时间 nmp_inner_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|NMP内壁清洗抽废时间
39 NMP搅拌桨清洗加注 nmp_stirrer_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|NMP搅拌桨清洗加注
40 NMP搅拌桨清洗次数 nmp_stirrer_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|NMP搅拌桨清洗次数
41 NMP搅拌桨清洗等待时间 nmp_stirrer_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|NMP搅拌桨清洗等待时间
42 NMP搅拌桨清洗抽废时间 nmp_stirrer_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|NMP搅拌桨清洗抽废时间
43 清水外壁清洗加注 water_outer_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|清水外壁清洗加注
44 清水外壁清洗次数 water_outer_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|清水外壁清洗次数
45 清水外壁清洗等待时间 water_outer_wall_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|清水外壁清洗等待时间
46 清水外壁清洗抽废时间 water_outer_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|清水外壁清洗抽废时间
47 清水内壁清洗加注 water_inner_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|清水内壁清洗加注
48 清水内壁清洗次数 water_inner_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|清水内壁清洗次数
49 清水泵清洗抽次数 water_pump_cleaning_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|清水泵清洗抽次数
50 清水内壁清洗抽废时间 water_inner_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|清水内壁清洗抽废时间
51 清水搅拌桨清洗加注 water_stirrer_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|清水搅拌桨清洗加注
52 清水搅拌桨清洗次数 water_stirrer_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|清水搅拌桨清洗次数
53 清水搅拌桨清洗等待时间 water_stirrer_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|清水搅拌桨清洗等待时间
54 清水搅拌桨清洗抽废时间 water_stirrer_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|清水搅拌桨清洗抽废时间
55 丙酮外壁清洗加注 acetone_outer_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|丙酮外壁清洗加注
56 丙酮外壁清洗次数 acetone_outer_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|丙酮外壁清洗次数
57 丙酮外壁清洗等待时间 acetone_outer_wall_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|丙酮外壁清洗等待时间
58 丙酮外壁清洗抽废时间 acetone_outer_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|丙酮外壁清洗抽废时间
59 丙酮内壁清洗加注 acetone_inner_wall_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|丙酮内壁清洗加注
60 丙酮内壁清洗次数 acetone_inner_wall_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|丙酮内壁清洗次数
61 丙酮泵清洗抽次数 acetone_pump_cleaning_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|丙酮泵清洗抽次数
62 丙酮内壁清洗抽废时间 acetone_inner_wall_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|丙酮内壁清洗抽废时间
63 丙酮搅拌桨清洗加注 acetone_stirrer_cleaning_injection VARIABLE FLOAT Chinese ns=4;s=OPC|丙酮搅拌桨清洗加注
64 丙酮搅拌桨清洗次数 acetone_stirrer_cleaning_count VARIABLE INT16 Chinese ns=4;s=OPC|丙酮搅拌桨清洗次数
65 丙酮搅拌桨清洗等待时间 acetone_stirrer_cleaning_wait_time VARIABLE INT32 Chinese ns=4;s=OPC|丙酮搅拌桨清洗等待时间
66 丙酮搅拌桨清洗抽废时间 acetone_stirrer_cleaning_waste_time VARIABLE INT32 Chinese ns=4;s=OPC|丙酮搅拌桨清洗抽废时间
67 管道吹气时间 pipe_blowing_time VARIABLE INT32 Chinese ns=4;s=OPC|管道吹气时间
68 注射泵正向空抽次数 injection_pump_forward_empty_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|注射泵正向空抽次数
69 注射泵反向空抽次数 injection_pump_reverse_empty_suction_count VARIABLE INT16 Chinese ns=4;s=OPC|注射泵反向空抽次数
70 抽滤液选择0水1丙酮 filtration_liquid_selection VARIABLE INT16 Chinese ns=4;s=OPC|抽滤液选择0水1丙酮

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,45 @@
{
"nodes": [
{
"id": "post_process_station",
"name": "post_process_station",
"children": [
"post_process_deck"
],
"parent": null,
"type": "device",
"class": "post_process_station",
"config": {
"url": "opc.tcp://LAPTOP-AN6QGCSD:53530/OPCUA/SimulationServer",
"config_path": "C:\\Users\\Roy\\Desktop\\DPLC\\Uni-Lab-OS\\unilabos\\devices\\workstation\\post_process\\opcua_huairou.json",
"deck": {
"data": {
"_resource_child_name": "post_process_deck",
"_resource_type": "unilabos.devices.workstation.post_process.decks:post_process_deck"
}
}
},
"data": {
}
},
{
"id": "post_process_deck",
"name": "post_process_deck",
"sample_id": null,
"children": [],
"parent": "post_process_station",
"type": "deck",
"class": "post_process_deck",
"position": {
"x": 0,
"y": 0,
"z": 0
},
"config": {
"type": "post_process_deck",
"setup": true
},
"data": {}
}
]
}

View File

@@ -0,0 +1,160 @@
from typing import Dict, Optional, List, Union
from pylabrobot.resources import Coordinate
from pylabrobot.resources.carrier import ResourceHolder, create_homogeneous_resources
from unilabos.resources.itemized_carrier import ItemizedCarrier, ResourcePLR
LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def warehouse_factory(
name: str,
num_items_x: int = 1,
num_items_y: int = 4,
num_items_z: int = 4,
dx: float = 137.0,
dy: float = 96.0,
dz: float = 120.0,
item_dx: float = 10.0,
item_dy: float = 10.0,
item_dz: float = 10.0,
resource_size_x: float = 127.0,
resource_size_y: float = 86.0,
resource_size_z: float = 25.0,
removed_positions: Optional[List[int]] = None,
empty: bool = False,
category: str = "warehouse",
model: Optional[str] = None,
col_offset: int = 0, # 列起始偏移量用于生成5-8等命名
layout: str = "col-major", # 新增:排序方式,"col-major"=列优先,"row-major"=行优先
):
# 创建位置坐标
locations = []
for layer in range(num_items_z): # 层
for row in range(num_items_y): # 行
for col in range(num_items_x): # 列
# 计算位置
x = dx + col * item_dx
# 根据 layout 决定 y 坐标计算
if layout == "row-major":
# 行优先row=0(第1行) 应该显示在上方y 值最小
y = dy + row * item_dy
else:
# 列优先:保持原逻辑
y = dy + (num_items_y - row - 1) * item_dy
z = dz + (num_items_z - layer - 1) * item_dz
locations.append(Coordinate(x, y, z))
if removed_positions:
locations = [loc for i, loc in enumerate(locations) if i not in removed_positions]
_sites = create_homogeneous_resources(
klass=ResourceHolder,
locations=locations,
resource_size_x=resource_size_x,
resource_size_y=resource_size_y,
resource_size_z=resource_size_z,
name_prefix=name,
)
len_x, len_y = (num_items_x, num_items_y) if num_items_z == 1 else (num_items_y, num_items_z) if num_items_x == 1 else (num_items_x, num_items_z)
# 🔑 修改使用数字命名最上面是4321最下面是12,11,10,9
# 命名顺序必须与坐标生成顺序一致:层 → 行 → 列
keys = []
for layer in range(num_items_z): # 遍历每一层
for row in range(num_items_y): # 遍历每一行
for col in range(num_items_x): # 遍历每一列
# 倒序计算全局行号row=0 应该对应 global_row=0第1行4321
# row=1 应该对应 global_row=1第2行8765
# row=2 应该对应 global_row=2第3行12,11,10,9
# 但前端显示时 row=2 在最上面,所以需要反转
reversed_row = (num_items_y - 1 - row) # row=0→reversed_row=2, row=1→reversed_row=1, row=2→reversed_row=0
global_row = layer * num_items_y + reversed_row
# 每行的最大数字 = (global_row + 1) * num_items_x + col_offset
base_num = (global_row + 1) * num_items_x + col_offset
# 从右到左递减4,3,2,1
key = str(base_num - col)
keys.append(key)
sites = {i: site for i, site in zip(keys, _sites.values())}
return WareHouse(
name=name,
size_x=dx + item_dx * num_items_x,
size_y=dy + item_dy * num_items_y,
size_z=dz + item_dz * num_items_z,
num_items_x = num_items_x,
num_items_y = num_items_y,
num_items_z = num_items_z,
ordering_layout=layout, # 传递排序方式到 ordering_layout
sites=sites,
category=category,
model=model,
)
class WareHouse(ItemizedCarrier):
"""堆栈载体类 - 可容纳16个板位的载体4层x4行x1列"""
def __init__(
self,
name: str,
size_x: float,
size_y: float,
size_z: float,
num_items_x: int,
num_items_y: int,
num_items_z: int,
layout: str = "x-y",
sites: Optional[Dict[Union[int, str], Optional[ResourcePLR]]] = None,
category: str = "warehouse",
model: Optional[str] = None,
ordering_layout: str = "col-major",
**kwargs
):
super().__init__(
name=name,
size_x=size_x,
size_y=size_y,
size_z=size_z,
# ordered_items=ordered_items,
# ordering=ordering,
num_items_x=num_items_x,
num_items_y=num_items_y,
num_items_z=num_items_z,
layout=layout,
sites=sites,
category=category,
model=model,
)
# 保存排序方式供graphio.py的坐标映射使用
# 使用独立属性避免与父类的layout冲突
self.ordering_layout = ordering_layout
def serialize(self) -> dict:
"""序列化时保存 ordering_layout 属性"""
data = super().serialize()
data['ordering_layout'] = self.ordering_layout
return data
def get_site_by_layer_position(self, row: int, col: int, layer: int) -> ResourceHolder:
if not (0 <= layer < 4 and 0 <= row < 4 and 0 <= col < 1):
raise ValueError("无效的位置: layer={}, row={}, col={}".format(layer, row, col))
site_index = layer * 4 + row * 1 + col
return self.sites[site_index]
def add_rack_to_position(self, row: int, col: int, layer: int, rack) -> None:
site = self.get_site_by_layer_position(row, col, layer)
site.assign_child_resource(rack)
def get_rack_at_position(self, row: int, col: int, layer: int):
site = self.get_site_by_layer_position(row, col, layer)
return site.resource

View File

@@ -0,0 +1,38 @@
from unilabos.devices.workstation.post_process.post_process_warehouse import WareHouse, warehouse_factory
# =================== Other ===================
def post_process_warehouse_4x3x1(name: str) -> WareHouse:
"""创建post_process 4x3x1仓库"""
return warehouse_factory(
name=name,
num_items_x=4,
num_items_y=3,
num_items_z=1,
dx=10.0,
dy=10.0,
dz=10.0,
item_dx=137.0,
item_dy=96.0,
item_dz=120.0,
category="warehouse",
)
def post_process_warehouse_4x3x1_2(name: str) -> WareHouse:
"""已弃用创建post_process 4x3x1仓库"""
return warehouse_factory(
name=name,
num_items_x=4,
num_items_y=3,
num_items_z=1,
dx=12.0,
dy=12.0,
dz=12.0,
item_dx=137.0,
item_dy=96.0,
item_dz=120.0,
category="warehouse",
)

View File

@@ -0,0 +1,630 @@
post_process_station:
category:
- post_process_station
class:
action_value_mappings:
disconnect:
feedback: {}
goal:
command: {}
goal_default:
command: ''
handles: {}
result:
success: success
schema:
description: ''
properties:
feedback:
properties:
status:
type: string
required:
- status
title: SendCmd_Feedback
type: object
goal:
properties:
command:
type: string
required:
- command
title: SendCmd_Goal
type: object
result:
properties:
return_info:
type: string
success:
type: boolean
required:
- return_info
- success
title: SendCmd_Result
type: object
required:
- goal
title: SendCmd
type: object
type: SendCmd
read_node:
feedback:
result: result
goal:
command: node_name
goal_default:
command: ''
handles: {}
result:
success: success
schema:
description: ''
properties:
feedback:
properties:
status:
type: string
required:
- status
title: SendCmd_Feedback
type: object
goal:
properties:
command:
type: string
required:
- command
title: SendCmd_Goal
type: object
result:
properties:
return_info:
type: string
success:
type: boolean
required:
- return_info
- success
title: SendCmd_Result
type: object
required:
- goal
title: SendCmd
type: object
type: SendCmd
trigger_cleaning_action:
feedback: {}
goal:
acetone_inner_wall_cleaning_count: acetone_inner_wall_cleaning_count
acetone_inner_wall_cleaning_injection: acetone_inner_wall_cleaning_injection
acetone_inner_wall_cleaning_waste_time: acetone_inner_wall_cleaning_waste_time
acetone_outer_wall_cleaning_count: acetone_outer_wall_cleaning_count
acetone_outer_wall_cleaning_injection: acetone_outer_wall_cleaning_injection
acetone_outer_wall_cleaning_wait_time: acetone_outer_wall_cleaning_wait_time
acetone_outer_wall_cleaning_waste_time: acetone_outer_wall_cleaning_waste_time
acetone_pump_cleaning_suction_count: acetone_pump_cleaning_suction_count
acetone_stirrer_cleaning_count: acetone_stirrer_cleaning_count
acetone_stirrer_cleaning_injection: acetone_stirrer_cleaning_injection
acetone_stirrer_cleaning_wait_time: acetone_stirrer_cleaning_wait_time
acetone_stirrer_cleaning_waste_time: acetone_stirrer_cleaning_waste_time
filtration_liquid_selection: filtration_liquid_selection
injection_pump_forward_empty_suction_count: injection_pump_forward_empty_suction_count
injection_pump_reverse_empty_suction_count: injection_pump_reverse_empty_suction_count
nmp_inner_wall_cleaning_count: nmp_inner_wall_cleaning_count
nmp_inner_wall_cleaning_injection: nmp_inner_wall_cleaning_injection
nmp_inner_wall_cleaning_waste_time: nmp_inner_wall_cleaning_waste_time
nmp_outer_wall_cleaning_count: nmp_outer_wall_cleaning_count
nmp_outer_wall_cleaning_injection: nmp_outer_wall_cleaning_injection
nmp_outer_wall_cleaning_wait_time: nmp_outer_wall_cleaning_wait_time
nmp_outer_wall_cleaning_waste_time: nmp_outer_wall_cleaning_waste_time
nmp_pump_cleaning_suction_count: nmp_pump_cleaning_suction_count
nmp_stirrer_cleaning_count: nmp_stirrer_cleaning_count
nmp_stirrer_cleaning_injection: nmp_stirrer_cleaning_injection
nmp_stirrer_cleaning_wait_time: nmp_stirrer_cleaning_wait_time
nmp_stirrer_cleaning_waste_time: nmp_stirrer_cleaning_waste_time
pipe_blowing_time: pipe_blowing_time
water_inner_wall_cleaning_count: water_inner_wall_cleaning_count
water_inner_wall_cleaning_injection: water_inner_wall_cleaning_injection
water_inner_wall_cleaning_waste_time: water_inner_wall_cleaning_waste_time
water_outer_wall_cleaning_count: water_outer_wall_cleaning_count
water_outer_wall_cleaning_injection: water_outer_wall_cleaning_injection
water_outer_wall_cleaning_wait_time: water_outer_wall_cleaning_wait_time
water_outer_wall_cleaning_waste_time: water_outer_wall_cleaning_waste_time
water_pump_cleaning_suction_count: water_pump_cleaning_suction_count
water_stirrer_cleaning_count: water_stirrer_cleaning_count
water_stirrer_cleaning_injection: water_stirrer_cleaning_injection
water_stirrer_cleaning_wait_time: water_stirrer_cleaning_wait_time
water_stirrer_cleaning_waste_time: water_stirrer_cleaning_waste_time
goal_default:
acetone_inner_wall_cleaning_count: 0
acetone_inner_wall_cleaning_injection: 0.0
acetone_inner_wall_cleaning_waste_time: 0
acetone_outer_wall_cleaning_count: 0
acetone_outer_wall_cleaning_injection: 0.0
acetone_outer_wall_cleaning_wait_time: 0
acetone_outer_wall_cleaning_waste_time: 0
acetone_pump_cleaning_suction_count: 0
acetone_stirrer_cleaning_count: 0
acetone_stirrer_cleaning_injection: 0.0
acetone_stirrer_cleaning_wait_time: 0
acetone_stirrer_cleaning_waste_time: 0
filtration_liquid_selection: 0
injection_pump_forward_empty_suction_count: 0
injection_pump_reverse_empty_suction_count: 0
nmp_inner_wall_cleaning_count: 0
nmp_inner_wall_cleaning_injection: 0.0
nmp_inner_wall_cleaning_waste_time: 0
nmp_outer_wall_cleaning_count: 0
nmp_outer_wall_cleaning_injection: 0.0
nmp_outer_wall_cleaning_wait_time: 0
nmp_outer_wall_cleaning_waste_time: 0
nmp_pump_cleaning_suction_count: 0
nmp_stirrer_cleaning_count: 0
nmp_stirrer_cleaning_injection: 0.0
nmp_stirrer_cleaning_wait_time: 0
nmp_stirrer_cleaning_waste_time: 0
pipe_blowing_time: 0
water_inner_wall_cleaning_count: 0
water_inner_wall_cleaning_injection: 0.0
water_inner_wall_cleaning_waste_time: 0
water_outer_wall_cleaning_count: 0
water_outer_wall_cleaning_injection: 0.0
water_outer_wall_cleaning_wait_time: 0
water_outer_wall_cleaning_waste_time: 0
water_pump_cleaning_suction_count: 0
water_stirrer_cleaning_count: 0
water_stirrer_cleaning_injection: 0.0
water_stirrer_cleaning_wait_time: 0
water_stirrer_cleaning_waste_time: 0
handles: {}
result:
return_info: return_info
schema:
description: ''
properties:
feedback:
properties: {}
required: []
title: PostProcessTriggerClean_Feedback
type: object
goal:
properties:
acetone_inner_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_inner_wall_cleaning_injection:
type: number
acetone_inner_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_outer_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_outer_wall_cleaning_injection:
type: number
acetone_outer_wall_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_outer_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_pump_cleaning_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_stirrer_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_stirrer_cleaning_injection:
type: number
acetone_stirrer_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
acetone_stirrer_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
filtration_liquid_selection:
maximum: 2147483647
minimum: -2147483648
type: integer
injection_pump_forward_empty_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
injection_pump_reverse_empty_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_inner_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_inner_wall_cleaning_injection:
type: number
nmp_inner_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_outer_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_outer_wall_cleaning_injection:
type: number
nmp_outer_wall_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_outer_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_pump_cleaning_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_stirrer_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_stirrer_cleaning_injection:
type: number
nmp_stirrer_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
nmp_stirrer_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
pipe_blowing_time:
maximum: 2147483647
minimum: -2147483648
type: integer
water_inner_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
water_inner_wall_cleaning_injection:
type: number
water_inner_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
water_outer_wall_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
water_outer_wall_cleaning_injection:
type: number
water_outer_wall_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
water_outer_wall_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
water_pump_cleaning_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
water_stirrer_cleaning_count:
maximum: 2147483647
minimum: -2147483648
type: integer
water_stirrer_cleaning_injection:
type: number
water_stirrer_cleaning_wait_time:
maximum: 2147483647
minimum: -2147483648
type: integer
water_stirrer_cleaning_waste_time:
maximum: 2147483647
minimum: -2147483648
type: integer
required:
- nmp_outer_wall_cleaning_injection
- nmp_outer_wall_cleaning_count
- nmp_outer_wall_cleaning_wait_time
- nmp_outer_wall_cleaning_waste_time
- nmp_inner_wall_cleaning_injection
- nmp_inner_wall_cleaning_count
- nmp_pump_cleaning_suction_count
- nmp_inner_wall_cleaning_waste_time
- nmp_stirrer_cleaning_injection
- nmp_stirrer_cleaning_count
- nmp_stirrer_cleaning_wait_time
- nmp_stirrer_cleaning_waste_time
- water_outer_wall_cleaning_injection
- water_outer_wall_cleaning_count
- water_outer_wall_cleaning_wait_time
- water_outer_wall_cleaning_waste_time
- water_inner_wall_cleaning_injection
- water_inner_wall_cleaning_count
- water_pump_cleaning_suction_count
- water_inner_wall_cleaning_waste_time
- water_stirrer_cleaning_injection
- water_stirrer_cleaning_count
- water_stirrer_cleaning_wait_time
- water_stirrer_cleaning_waste_time
- acetone_outer_wall_cleaning_injection
- acetone_outer_wall_cleaning_count
- acetone_outer_wall_cleaning_wait_time
- acetone_outer_wall_cleaning_waste_time
- acetone_inner_wall_cleaning_injection
- acetone_inner_wall_cleaning_count
- acetone_pump_cleaning_suction_count
- acetone_inner_wall_cleaning_waste_time
- acetone_stirrer_cleaning_injection
- acetone_stirrer_cleaning_count
- acetone_stirrer_cleaning_wait_time
- acetone_stirrer_cleaning_waste_time
- pipe_blowing_time
- injection_pump_forward_empty_suction_count
- injection_pump_reverse_empty_suction_count
- filtration_liquid_selection
title: PostProcessTriggerClean_Goal
type: object
result:
properties:
return_info:
type: string
required:
- return_info
title: PostProcessTriggerClean_Result
type: object
required:
- goal
title: PostProcessTriggerClean
type: object
type: PostProcessTriggerClean
trigger_grab_action:
feedback: {}
goal:
raw_tank_number: raw_tank_number
reaction_tank_number: reaction_tank_number
goal_default:
raw_tank_number: 0
reaction_tank_number: 0
handles: {}
result:
return_info: return_info
schema:
description: ''
properties:
feedback:
properties: {}
required: []
title: PostProcessGrab_Feedback
type: object
goal:
properties:
raw_tank_number:
maximum: 2147483647
minimum: -2147483648
type: integer
reaction_tank_number:
maximum: 2147483647
minimum: -2147483648
type: integer
required:
- reaction_tank_number
- raw_tank_number
title: PostProcessGrab_Goal
type: object
result:
properties:
return_info:
type: string
required:
- return_info
title: PostProcessGrab_Result
type: object
required:
- goal
title: PostProcessGrab
type: object
type: PostProcessGrab
trigger_post_processing:
feedback: {}
goal:
atomization_fast_speed: atomization_fast_speed
atomization_pressure_kpa: atomization_pressure_kpa
first_powder_mixing_tim: first_powder_mixing_tim
first_powder_wash_count: first_powder_wash_count
first_wash_water_amount: first_wash_water_amount
initial_water_amount: initial_water_amount
injection_pump_push_speed: injection_pump_push_speed
injection_pump_suction_speed: injection_pump_suction_speed
pre_filtration_mixing_time: pre_filtration_mixing_time
raw_liquid_suction_count: raw_liquid_suction_count
second_powder_mixing_time: second_powder_mixing_time
second_powder_wash_count: second_powder_wash_count
second_wash_water_amount: second_wash_water_amount
wash_slow_speed: wash_slow_speed
goal_default:
atomization_fast_speed: 0.0
atomization_pressure_kpa: 0
first_powder_mixing_tim: 0
first_powder_wash_count: 0
first_wash_water_amount: 0.0
initial_water_amount: 0.0
injection_pump_push_speed: 0
injection_pump_suction_speed: 0
pre_filtration_mixing_time: 0
raw_liquid_suction_count: 0
second_powder_mixing_time: 0
second_powder_wash_count: 0
second_wash_water_amount: 0.0
wash_slow_speed: 0.0
handles: {}
result:
return_info: return_info
schema:
description: ''
properties:
feedback:
properties: {}
required: []
title: PostProcessTriggerPostPro_Feedback
type: object
goal:
properties:
atomization_fast_speed:
type: number
atomization_pressure_kpa:
maximum: 2147483647
minimum: -2147483648
type: integer
first_powder_mixing_tim:
maximum: 2147483647
minimum: -2147483648
type: integer
first_powder_wash_count:
maximum: 2147483647
minimum: -2147483648
type: integer
first_wash_water_amount:
type: number
initial_water_amount:
type: number
injection_pump_push_speed:
maximum: 2147483647
minimum: -2147483648
type: integer
injection_pump_suction_speed:
maximum: 2147483647
minimum: -2147483648
type: integer
pre_filtration_mixing_time:
maximum: 2147483647
minimum: -2147483648
type: integer
raw_liquid_suction_count:
maximum: 2147483647
minimum: -2147483648
type: integer
second_powder_mixing_time:
maximum: 2147483647
minimum: -2147483648
type: integer
second_powder_wash_count:
maximum: 2147483647
minimum: -2147483648
type: integer
second_wash_water_amount:
type: number
wash_slow_speed:
type: number
required:
- atomization_fast_speed
- wash_slow_speed
- injection_pump_suction_speed
- injection_pump_push_speed
- raw_liquid_suction_count
- first_wash_water_amount
- second_wash_water_amount
- first_powder_mixing_tim
- second_powder_mixing_time
- first_powder_wash_count
- second_powder_wash_count
- initial_water_amount
- pre_filtration_mixing_time
- atomization_pressure_kpa
title: PostProcessTriggerPostPro_Goal
type: object
result:
properties:
return_info:
type: string
required:
- return_info
title: PostProcessTriggerPostPro_Result
type: object
required:
- goal
title: PostProcessTriggerPostPro
type: object
type: PostProcessTriggerPostPro
write_node:
feedback:
result: result
goal:
command: json_input
goal_default:
command: ''
handles: {}
result:
success: success
schema:
description: ''
properties:
feedback:
properties:
status:
type: string
required:
- status
title: SendCmd_Feedback
type: object
goal:
properties:
command:
type: string
required:
- command
title: SendCmd_Goal
type: object
result:
properties:
return_info:
type: string
success:
type: boolean
required:
- return_info
- success
title: SendCmd_Result
type: object
required:
- goal
title: SendCmd
type: object
type: SendCmd
module: unilabos.devices.workstation.post_process.post_process:OpcUaClient
status_types:
acetone_tank_empty_alarm: Bool
atomization_fast_speed: Float64
atomization_pressure_kpa: Int32
cleaning_complete: Bool
device_ready: Bool
door_open_alarm: Bool
grab_complete: Bool
grab_trigger: Bool
injection_pump_push_speed: Int32
injection_pump_suction_speed: Int32
nmp_tank_empty_alarm: Bool
post_process_complete: Bool
post_process_trigger: Bool
raw_tank_number: Int32
reaction_tank_number: Int32
remote_mode: Bool
wash_slow_speed: Float64
waste_tank_full_alarm: Bool
water_tank_empty_alarm: Bool
type: python
config_info: []
description: 后处理站
handles: []
icon: post_process_station.webp
init_param_schema: {}
version: 1.0.0

View File

@@ -0,0 +1,25 @@
POST_PROCESS_Raw_1BottleCarrier:
category:
- bottle_carriers
class:
module: unilabos.devices.workstation.post_process.bottle_carriers:POST_PROCESS_Raw_1BottleCarrier
type: pylabrobot
description: POST_PROCESS_Raw_1BottleCarrier
handles: []
icon: ''
init_param_schema: {}
registry_type: resource
version: 1.0.0
POST_PROCESS_Reaction_1BottleCarrier:
category:
- bottle_carriers
class:
module: unilabos.devices.workstation.post_process.bottle_carriers:POST_PROCESS_Reaction_1BottleCarrier
type: pylabrobot
description: POST_PROCESS_Reaction_1BottleCarrier
handles: []
icon: ''
init_param_schema: {}
registry_type: resource
version: 1.0.0

View File

@@ -0,0 +1,11 @@
POST_PROCESS_PolymerStation_Reagent_Bottle:
category:
- bottles
class:
module: unilabos.devices.workstation.post_process.bottles:POST_PROCESS_PolymerStation_Reagent_Bottle
type: pylabrobot
handles: []
icon: ''
init_param_schema: {}
version: 1.0.0

View File

@@ -0,0 +1,12 @@
post_process_deck:
category:
- post_process_deck
class:
module: unilabos.devices.workstation.post_process.decks:post_process_deck
type: pylabrobot
description: post_process_deck
handles: []
icon: ''
init_param_schema: {}
registry_type: resource
version: 1.0.0