12 Commits

Author SHA1 Message Date
ZiWei
39cc280c91 feat: Add SyringePump (SY-03B) driver with unified serial/TCP transport for chinwe device, including registry and test configurations. 2025-12-19 03:05:11 +08:00
Xuwznln
152d3a7563 Update docs 2025-12-14 13:12:19 +08:00
Xuwznln
ef14737839 update "laiyu" missing init file. 2025-12-14 13:08:27 +08:00
Xuwznln
5d5569121c fix "laiyu" missing init file. 2025-12-14 12:55:25 +08:00
Xuwznln
d23e85ade4 fix "🐛 fix" 2025-12-14 01:17:24 +08:00
Haohui
02afafd423 🐛 fix: config file is overwrited by default args even if not be set. 2025-12-12 23:55:38 +08:00
Xianwei Qi
6ac510dcd2 mix
修改了mix,仿真流程报错问题
2025-12-11 23:26:11 +08:00
Xuwznln
ed56c1eba2 reduce logs 2025-12-08 19:23:53 +08:00
Xuwznln
16ee3de086 Add workflow upload func. 2025-12-08 19:12:05 +08:00
Junhan Chang
ced961050d add unilabos/workflow and entrypoint 2025-12-07 17:50:27 +08:00
Xuwznln
11b2c99836 update version to 0.10.12
(cherry picked from commit b1cdef9185)
2025-12-04 18:47:44 +08:00
Xuwznln
04024bc8a3 fix ros2 future 2025-12-04 18:44:50 +08:00
55 changed files with 2690 additions and 987 deletions

View File

@@ -1,6 +1,6 @@
package: package:
name: unilabos name: unilabos
version: 0.10.11 version: 0.10.12
source: source:
path: ../unilabos path: ../unilabos

View File

@@ -39,7 +39,9 @@ Uni-Lab-OS recommends using `mamba` for environment management. Choose the appro
```bash ```bash
# Create new environment # Create new environment
mamba create -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge mamba create -n unilab python=3.11.11
mamba activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
``` ```
## Install Dev Uni-Lab-OS ## Install Dev Uni-Lab-OS

View File

@@ -41,7 +41,9 @@ Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的操作系统选择适
```bash ```bash
# 创建新环境 # 创建新环境
mamba create -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge mamba create -n unilab python=3.11.11
mamba activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
``` ```
2. 安装开发版 Uni-Lab-OS: 2. 安装开发版 Uni-Lab-OS:

View File

@@ -317,45 +317,6 @@ unilab --help
如果所有命令都正常输出,说明开发环境配置成功! 如果所有命令都正常输出,说明开发环境配置成功!
### 开发工具推荐
#### IDE
- **PyCharm Professional**: 强大的 Python IDE支持远程调试
- **VS Code**: 轻量级,配合 Python 扩展使用
- **Vim/Emacs**: 适合终端开发
#### 推荐的 VS Code 扩展
- Python
- Pylance
- ROS
- URDF
- YAML
#### 调试工具
```bash
# 安装调试工具
pip install ipdb pytest pytest-cov -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
# 代码质量检查
pip install black flake8 mypy -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
```
### 设置 pre-commit 钩子(可选)
```bash
# 安装 pre-commit
pip install pre-commit -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
# 设置钩子
pre-commit install
# 手动运行检查
pre-commit run --all-files
```
--- ---
## 验证安装 ## 验证安装

View File

@@ -1,6 +1,6 @@
package: package:
name: ros-humble-unilabos-msgs name: ros-humble-unilabos-msgs
version: 0.10.11 version: 0.10.12
source: source:
path: ../../unilabos_msgs path: ../../unilabos_msgs
target_directory: src target_directory: src

View File

@@ -1,6 +1,6 @@
package: package:
name: unilabos name: unilabos
version: "0.10.11" version: "0.10.12"
source: source:
path: ../.. path: ../..

View File

@@ -2,7 +2,6 @@ import json
import logging import logging
import traceback import traceback
import uuid import uuid
import xml.etree.ElementTree as ET
from typing import Any, Dict, List from typing import Any, Dict, List
import networkx as nx import networkx as nx
@@ -25,7 +24,15 @@ class SimpleGraph:
def add_edge(self, source, target, **attrs): def add_edge(self, source, target, **attrs):
"""添加边""" """添加边"""
edge = {"source": source, "target": target, **attrs} # edge = {"source": source, "target": target, **attrs}
edge = {
"source": source, "target": target,
"source_node_uuid": source,
"target_node_uuid": target,
"source_handle_io": "source",
"target_handle_io": "target",
**attrs
}
self.edges.append(edge) self.edges.append(edge)
def to_dict(self): def to_dict(self):
@@ -42,6 +49,7 @@ class SimpleGraph:
"multigraph": False, "multigraph": False,
"graph": {}, "graph": {},
"nodes": nodes_list, "nodes": nodes_list,
"edges": self.edges,
"links": self.edges, "links": self.edges,
} }
@@ -58,495 +66,8 @@ def extract_json_from_markdown(text: str) -> str:
return text return text
def convert_to_type(val: str) -> Any:
"""将字符串值转换为适当的数据类型"""
if val == "True":
return True
if val == "False":
return False
if val == "?":
return None
if val.endswith(" g"):
return float(val.split(" ")[0])
if val.endswith("mg"):
return float(val.split("mg")[0])
elif val.endswith("mmol"):
return float(val.split("mmol")[0]) / 1000
elif val.endswith("mol"):
return float(val.split("mol")[0])
elif val.endswith("ml"):
return float(val.split("ml")[0])
elif val.endswith("RPM"):
return float(val.split("RPM")[0])
elif val.endswith(" °C"):
return float(val.split(" ")[0])
elif val.endswith(" %"):
return float(val.split(" ")[0])
return val
def refactor_data(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""统一的数据重构函数,根据操作类型自动选择模板"""
refactored_data = []
# 定义操作映射,包含生物实验和有机化学的所有操作
OPERATION_MAPPING = {
# 生物实验操作
"transfer_liquid": "SynBioFactory-liquid_handler.prcxi-transfer_liquid",
"transfer": "SynBioFactory-liquid_handler.biomek-transfer",
"incubation": "SynBioFactory-liquid_handler.biomek-incubation",
"move_labware": "SynBioFactory-liquid_handler.biomek-move_labware",
"oscillation": "SynBioFactory-liquid_handler.biomek-oscillation",
# 有机化学操作
"HeatChillToTemp": "SynBioFactory-workstation-HeatChillProtocol",
"StopHeatChill": "SynBioFactory-workstation-HeatChillStopProtocol",
"StartHeatChill": "SynBioFactory-workstation-HeatChillStartProtocol",
"HeatChill": "SynBioFactory-workstation-HeatChillProtocol",
"Dissolve": "SynBioFactory-workstation-DissolveProtocol",
"Transfer": "SynBioFactory-workstation-TransferProtocol",
"Evaporate": "SynBioFactory-workstation-EvaporateProtocol",
"Recrystallize": "SynBioFactory-workstation-RecrystallizeProtocol",
"Filter": "SynBioFactory-workstation-FilterProtocol",
"Dry": "SynBioFactory-workstation-DryProtocol",
"Add": "SynBioFactory-workstation-AddProtocol",
}
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
for step in data:
operation = step.get("action")
if not operation or operation in UNSUPPORTED_OPERATIONS:
continue
# 处理重复操作
if operation == "Repeat":
times = step.get("times", step.get("parameters", {}).get("times", 1))
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
for i in range(int(times)):
sub_data = refactor_data(sub_steps)
refactored_data.extend(sub_data)
continue
# 获取模板名称
template = OPERATION_MAPPING.get(operation)
if not template:
# 自动推断模板类型
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
template = f"SynBioFactory-liquid_handler.biomek-{operation}"
else:
template = f"SynBioFactory-workstation-{operation}Protocol"
# 创建步骤数据
step_data = {
"template": template,
"description": step.get("description", step.get("purpose", f"{operation} operation")),
"lab_node_type": "Device",
"parameters": step.get("parameters", step.get("action_args", {})),
}
refactored_data.append(step_data)
return refactored_data
def build_protocol_graph(
labware_info: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]], workstation_name: str
) -> SimpleGraph:
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑"""
G = SimpleGraph()
resource_last_writer = {}
LAB_NAME = "SynBioFactory"
protocol_steps = refactor_data(protocol_steps)
# 检查协议步骤中的模板来判断协议类型
has_biomek_template = any(
("biomek" in step.get("template", "")) or ("prcxi" in step.get("template", ""))
for step in protocol_steps
)
if has_biomek_template:
# 生物实验协议图构建
for labware_id, labware in labware_info.items():
node_id = str(uuid.uuid4())
labware_attrs = labware.copy()
labware_id = labware_attrs.pop("id", labware_attrs.get("name", f"labware_{uuid.uuid4()}"))
labware_attrs["description"] = labware_id
labware_attrs["lab_node_type"] = (
"Reagent" if "Plate" in str(labware_id) else "Labware" if "Rack" in str(labware_id) else "Sample"
)
labware_attrs["device_id"] = workstation_name
G.add_node(node_id, template=f"{LAB_NAME}-host_node-create_resource", **labware_attrs)
resource_last_writer[labware_id] = f"{node_id}:labware"
# 处理协议步骤
prev_node = None
for i, step in enumerate(protocol_steps):
node_id = str(uuid.uuid4())
G.add_node(node_id, **step)
# 添加控制流边
if prev_node is not None:
G.add_edge(prev_node, node_id, source_port="ready", target_port="ready")
prev_node = node_id
# 处理物料流
params = step.get("parameters", {})
if "sources" in params and params["sources"] in resource_last_writer:
source_node, source_port = resource_last_writer[params["sources"]].split(":")
G.add_edge(source_node, node_id, source_port=source_port, target_port="labware")
if "targets" in params:
resource_last_writer[params["targets"]] = f"{node_id}:labware"
# 添加协议结束节点
end_id = str(uuid.uuid4())
G.add_node(end_id, template=f"{LAB_NAME}-liquid_handler.biomek-run_protocol")
if prev_node is not None:
G.add_edge(prev_node, end_id, source_port="ready", target_port="ready")
else:
# 有机化学协议图构建
WORKSTATION_ID = workstation_name
# 为所有labware创建资源节点
for item_id, item in labware_info.items():
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
node_id = str(uuid.uuid4())
# 判断节点类型
if item.get("type") == "hardware" or "reactor" in str(item_id).lower():
if "reactor" not in str(item_id).lower():
continue
lab_node_type = "Sample"
description = f"Prepare Reactor: {item_id}"
liquid_type = []
liquid_volume = []
else:
lab_node_type = "Reagent"
description = f"Add Reagent to Flask: {item_id}"
liquid_type = [item_id]
liquid_volume = [1e5]
G.add_node(
node_id,
template=f"{LAB_NAME}-host_node-create_resource",
description=description,
lab_node_type=lab_node_type,
res_id=item_id,
device_id=WORKSTATION_ID,
class_name="container",
parent=WORKSTATION_ID,
bind_locations={"x": 0.0, "y": 0.0, "z": 0.0},
liquid_input_slot=[-1],
liquid_type=liquid_type,
liquid_volume=liquid_volume,
slot_on_deck="",
role=item.get("role", ""),
)
resource_last_writer[item_id] = f"{node_id}:labware"
last_control_node_id = None
# 处理协议步骤
for step in protocol_steps:
node_id = str(uuid.uuid4())
G.add_node(node_id, **step)
# 控制流
if last_control_node_id is not None:
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
last_control_node_id = node_id
# 物料流
params = step.get("parameters", {})
input_resources = {
"Vessel": params.get("vessel"),
"ToVessel": params.get("to_vessel"),
"FromVessel": params.get("from_vessel"),
"reagent": params.get("reagent"),
"solvent": params.get("solvent"),
"compound": params.get("compound"),
"sources": params.get("sources"),
"targets": params.get("targets"),
}
for target_port, resource_name in input_resources.items():
if resource_name and resource_name in resource_last_writer:
source_node, source_port = resource_last_writer[resource_name].split(":")
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
output_resources = {
"VesselOut": params.get("vessel"),
"FromVesselOut": params.get("from_vessel"),
"ToVesselOut": params.get("to_vessel"),
"FiltrateOut": params.get("filtrate_vessel"),
"reagent": params.get("reagent"),
"solvent": params.get("solvent"),
"compound": params.get("compound"),
"sources_out": params.get("sources"),
"targets_out": params.get("targets"),
}
for source_port, resource_name in output_resources.items():
if resource_name:
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
return G
def draw_protocol_graph(protocol_graph: SimpleGraph, output_path: str):
"""
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
"""
if not protocol_graph:
print("Cannot draw graph: Graph object is empty.")
return
G = nx.DiGraph()
for node_id, attrs in protocol_graph.nodes.items():
label = attrs.get("description", attrs.get("template", node_id[:8]))
G.add_node(node_id, label=label, **attrs)
for edge in protocol_graph.edges:
G.add_edge(edge["source"], edge["target"])
plt.figure(figsize=(20, 15))
try:
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
except Exception:
pos = nx.shell_layout(G) # Fallback layout
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
nx.draw(
G,
pos,
with_labels=False,
node_size=2500,
node_color="skyblue",
node_shape="o",
edge_color="gray",
width=1.5,
arrowsize=15,
)
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
plt.title("Chemical Protocol Workflow Graph", size=15)
plt.savefig(output_path, dpi=300, bbox_inches="tight")
plt.close()
print(f" - Visualization saved to '{output_path}'")
from networkx.drawing.nx_agraph import to_agraph
import re
COMPASS = {"n","e","s","w","ne","nw","se","sw","c"}
def _is_compass(port: str) -> bool:
return isinstance(port, str) and port.lower() in COMPASS
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
"""
使用 Graphviz 端口语法绘制协议工作流图。
- 若边上的 source_port/target_port 是 compassn/e/s/w/...),直接用 compass。
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
最终由 PyGraphviz 渲染并输出到 output_path后缀决定格式如 .png/.svg/.pdf
"""
if not protocol_graph:
print("Cannot draw graph: Graph object is empty.")
return
# 1) 先用 networkx 搭建有向图,保留端口属性
G = nx.DiGraph()
for node_id, attrs in protocol_graph.nodes.items():
label = attrs.get("description", attrs.get("template", node_id[:8]))
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
G.add_node(node_id, _core_label=str(label), **{k:v for k,v in attrs.items() if k not in ("label",)})
edges_data = []
in_ports_by_node = {} # 收集命名输入端口
out_ports_by_node = {} # 收集命名输出端口
for edge in protocol_graph.edges:
u = edge["source"]
v = edge["target"]
sp = edge.get("source_port")
tp = edge.get("target_port")
# 记录到图里(保留原始端口信息)
G.add_edge(u, v, source_port=sp, target_port=tp)
edges_data.append((u, v, sp, tp))
# 如果不是 compass就按“命名端口”先归类等会儿给节点造 record
if sp and not _is_compass(sp):
out_ports_by_node.setdefault(u, set()).add(str(sp))
if tp and not _is_compass(tp):
in_ports_by_node.setdefault(v, set()).add(str(tp))
# 2) 转为 AGraph使用 Graphviz 渲染
A = to_agraph(G)
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
A.node_attr.update(shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica")
A.edge_attr.update(arrowsize="0.8", color="#666666")
# 3) 为需要命名端口的节点设置 record 形状与 label
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
for n in A.nodes():
node = A.get_node(n)
core = G.nodes[n].get("_core_label", n)
in_ports = sorted(in_ports_by_node.get(n, []))
out_ports = sorted(out_ports_by_node.get(n, []))
# 如果该节点涉及命名端口,则用 record否则保留原 box
if in_ports or out_ports:
def port_fields(ports):
if not ports:
return " " # 必须留一个空槽占位
# 每个端口一个小格子,<p> name
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
left = port_fields(in_ports)
right = port_fields(out_ports)
# 三栏:左(入) | 中(节点名) | 右(出)
record_label = f"{{ {left} | {core} | {right} }}"
node.attr.update(shape="record", label=record_label)
else:
# 没有命名端口:普通盒子,显示核心标签
node.attr.update(label=str(core))
# 4) 给边设置 headport / tailport
# - 若端口为 compass直接用 compasse.g., headport="e"
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
for (u, v, sp, tp) in edges_data:
e = A.get_edge(u, v)
# Graphviz 属性tail 是源head 是目标
if sp:
if _is_compass(sp):
e.attr["tailport"] = sp.lower()
else:
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
e.attr["tailport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(sp))
if tp:
if _is_compass(tp):
e.attr["headport"] = tp.lower()
else:
e.attr["headport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(tp))
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
# e.attr["arrowhead"] = "vee"
# 5) 输出
A.draw(output_path, prog="dot")
print(f" - Port-aware workflow rendered to '{output_path}'")
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
"""展平嵌套的XDL程序结构"""
flattened_operations = []
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
def extract_operations(element: ET.Element):
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
flattened_operations.append(element)
for child in element:
extract_operations(child)
for child in procedure_elem:
extract_operations(child)
return flattened_operations
def parse_xdl_content(xdl_content: str) -> tuple:
"""解析XDL内容"""
try:
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
root = ET.fromstring(xdl_content_cleaned)
synthesis_elem = root.find("Synthesis")
if synthesis_elem is None:
return None, None, None
# 解析硬件组件
hardware_elem = synthesis_elem.find("Hardware")
hardware = []
if hardware_elem is not None:
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
# 解析试剂
reagents_elem = synthesis_elem.find("Reagents")
reagents = []
if reagents_elem is not None:
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
# 解析程序
procedure_elem = synthesis_elem.find("Procedure")
if procedure_elem is None:
return None, None, None
flattened_operations = flatten_xdl_procedure(procedure_elem)
return hardware, reagents, flattened_operations
except ET.ParseError as e:
raise ValueError(f"Invalid XDL format: {e}")
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
"""
将XDL XML格式转换为标准的字典格式
Args:
xdl_content: XDL XML内容
Returns:
转换结果,包含步骤和器材信息
"""
try:
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
if hardware is None:
return {"error": "Failed to parse XDL content", "success": False}
# 将XDL元素转换为字典格式
steps_data = []
for elem in flattened_operations:
# 转换参数类型
parameters = {}
for key, val in elem.attrib.items():
converted_val = convert_to_type(val)
if converted_val is not None:
parameters[key] = converted_val
step_dict = {
"operation": elem.tag,
"parameters": parameters,
"description": elem.get("purpose", f"Operation: {elem.tag}"),
}
steps_data.append(step_dict)
# 合并硬件和试剂为统一的labware_info格式
labware_data = []
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
return {
"success": True,
"steps": steps_data,
"labware": labware_data,
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
}
except Exception as e:
error_msg = f"XDL conversion failed: {str(e)}"
logger.error(error_msg)
return {"error": error_msg, "success": False}
def create_workflow( def create_workflow(

View File

@@ -4,7 +4,7 @@ package_name = 'unilabos'
setup( setup(
name=package_name, name=package_name,
version='0.10.11', version='0.10.12',
packages=find_packages(), packages=find_packages(),
include_package_data=True, include_package_data=True,
install_requires=['setuptools'], install_requires=['setuptools'],

View File

Before

Width:  |  Height:  |  Size: 148 KiB

After

Width:  |  Height:  |  Size: 148 KiB

View File

Before

Width:  |  Height:  |  Size: 140 KiB

After

Width:  |  Height:  |  Size: 140 KiB

View File

Before

Width:  |  Height:  |  Size: 117 KiB

After

Width:  |  Height:  |  Size: 117 KiB

View File

@@ -0,0 +1,35 @@
import sys
from datetime import datetime
from pathlib import Path
ROOT_DIR = Path(__file__).resolve().parents[2]
if str(ROOT_DIR) not in sys.path:
sys.path.insert(0, str(ROOT_DIR))
import pytest
from unilabos.workflow.convert_from_json import (
convert_from_json,
normalize_steps as _normalize_steps,
normalize_labware as _normalize_labware,
)
from unilabos.workflow.common import draw_protocol_graph_with_ports
@pytest.mark.parametrize(
"protocol_name",
[
"example_bio",
# "bioyond_materials_liquidhandling_1",
"example_prcxi",
],
)
def test_build_protocol_graph(protocol_name):
data_path = Path(__file__).with_name(f"{protocol_name}.json")
graph = convert_from_json(data_path, workstation_name="PRCXi")
timestamp = datetime.now().strftime("%Y%m%d_%H%M")
output_path = data_path.with_name(f"{protocol_name}_graph_{timestamp}.png")
draw_protocol_graph_with_ports(graph, str(output_path))
print(graph)

View File

@@ -1 +1 @@
__version__ = "0.10.11" __version__ = "0.10.12"

View File

@@ -20,6 +20,7 @@ if unilabos_dir not in sys.path:
from unilabos.utils.banner_print import print_status, print_unilab_banner from unilabos.utils.banner_print import print_status, print_unilab_banner
from unilabos.config.config import load_config, BasicConfig, HTTPConfig from unilabos.config.config import load_config, BasicConfig, HTTPConfig
def load_config_from_file(config_path): def load_config_from_file(config_path):
if config_path is None: if config_path is None:
config_path = os.environ.get("UNILABOS_BASICCONFIG_CONFIG_PATH", None) config_path = os.environ.get("UNILABOS_BASICCONFIG_CONFIG_PATH", None)
@@ -41,7 +42,7 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
for i, arg in enumerate(sys.argv): for i, arg in enumerate(sys.argv):
for option_string in option_strings: for option_string in option_strings:
if arg.startswith(option_string): if arg.startswith(option_string):
new_arg = arg[:2] + arg[2:len(option_string)].replace("-", "_") + arg[len(option_string):] new_arg = arg[:2] + arg[2 : len(option_string)].replace("-", "_") + arg[len(option_string) :]
sys.argv[i] = new_arg sys.argv[i] = new_arg
break break
@@ -49,6 +50,8 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
def parse_args(): def parse_args():
"""解析命令行参数""" """解析命令行参数"""
parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.") parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.")
subparsers = parser.add_subparsers(title="Valid subcommands", dest="command")
parser.add_argument("-g", "--graph", help="Physical setup graph file path.") parser.add_argument("-g", "--graph", help="Physical setup graph file path.")
parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.") parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.")
parser.add_argument( parser.add_argument(
@@ -153,21 +156,54 @@ def parse_args():
default=False, default=False,
help="Complete registry information", help="Complete registry information",
) )
# workflow upload subcommand
workflow_parser = subparsers.add_parser(
"workflow_upload",
aliases=["wf"],
help="Upload workflow from xdl/json/python files",
)
workflow_parser.add_argument(
"-f",
"--workflow_file",
type=str,
required=True,
help="Path to the workflow file (JSON format)",
)
workflow_parser.add_argument(
"-n",
"--workflow_name",
type=str,
default=None,
help="Workflow name, if not provided will use the name from file or filename",
)
workflow_parser.add_argument(
"--tags",
type=str,
nargs="*",
default=[],
help="Tags for the workflow (space-separated)",
)
workflow_parser.add_argument(
"--published",
action="store_true",
default=False,
help="Whether to publish the workflow (default: False)",
)
return parser return parser
def main(): def main():
"""主函数""" """主函数"""
# 解析命令行参数 # 解析命令行参数
args = parse_args() parser = parse_args()
convert_argv_dashes_to_underscores(args) convert_argv_dashes_to_underscores(parser)
args_dict = vars(args.parse_args()) args = parser.parse_args()
args_dict = vars(args)
# 环境检查 - 检查并自动安装必需的包 (可选) # 环境检查 - 检查并自动安装必需的包 (可选)
if not args_dict.get("skip_env_check", False): if not args_dict.get("skip_env_check", False):
from unilabos.utils.environment_check import check_environment from unilabos.utils.environment_check import check_environment
print_status("正在进行环境依赖检查...", "info")
if not check_environment(auto_install=True): if not check_environment(auto_install=True):
print_status("环境检查失败,程序退出", "error") print_status("环境检查失败,程序退出", "error")
os._exit(1) os._exit(1)
@@ -220,17 +256,18 @@ def main():
logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.") logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.")
configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir) configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir)
if args_dict["addr"] == "test": if args.addr != parser.get_default("addr"):
if args.addr == "test":
print_status("使用测试环境地址", "info") print_status("使用测试环境地址", "info")
HTTPConfig.remote_addr = "https://uni-lab.test.bohrium.com/api/v1" HTTPConfig.remote_addr = "https://uni-lab.test.bohrium.com/api/v1"
elif args_dict["addr"] == "uat": elif args.addr == "uat":
print_status("使用uat环境地址", "info") print_status("使用uat环境地址", "info")
HTTPConfig.remote_addr = "https://uni-lab.uat.bohrium.com/api/v1" HTTPConfig.remote_addr = "https://uni-lab.uat.bohrium.com/api/v1"
elif args_dict["addr"] == "local": elif args.addr == "local":
print_status("使用本地环境地址", "info") print_status("使用本地环境地址", "info")
HTTPConfig.remote_addr = "http://127.0.0.1:48197/api/v1" HTTPConfig.remote_addr = "http://127.0.0.1:48197/api/v1"
else: else:
HTTPConfig.remote_addr = args_dict.get("addr", "") HTTPConfig.remote_addr = args.addr
# 设置BasicConfig参数 # 设置BasicConfig参数
if args_dict.get("ak", ""): if args_dict.get("ak", ""):
@@ -239,9 +276,12 @@ def main():
if args_dict.get("sk", ""): if args_dict.get("sk", ""):
BasicConfig.sk = args_dict.get("sk", "") BasicConfig.sk = args_dict.get("sk", "")
print_status("传入了sk参数优先采用传入参数", "info") print_status("传入了sk参数优先采用传入参数", "info")
BasicConfig.working_dir = working_dir
workflow_upload = args_dict.get("command") in ("workflow_upload", "wf")
# 使用远程资源启动 # 使用远程资源启动
if args_dict["use_remote_resource"]: if not workflow_upload and args_dict["use_remote_resource"]:
print_status("使用远程资源启动", "info") print_status("使用远程资源启动", "info")
from unilabos.app.web import http_client from unilabos.app.web import http_client
@@ -254,7 +294,6 @@ def main():
BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port
BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser
BasicConfig.working_dir = working_dir
BasicConfig.is_host_mode = not args_dict.get("is_slave", False) BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False) BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
BasicConfig.upload_registry = args_dict.get("upload_registry", False) BasicConfig.upload_registry = args_dict.get("upload_registry", False)
@@ -283,9 +322,31 @@ def main():
# 注册表 # 注册表
lab_registry = build_registry( lab_registry = build_registry(
args_dict["registry_path"], args_dict.get("complete_registry", False), args_dict["upload_registry"] args_dict["registry_path"], args_dict.get("complete_registry", False), BasicConfig.upload_registry
) )
if BasicConfig.upload_registry:
# 设备注册到服务端 - 需要 ak 和 sk
if BasicConfig.ak and BasicConfig.sk:
print_status("开始注册设备到服务端...", "info")
try:
register_devices_and_resources(lab_registry)
print_status("设备注册完成", "info")
except Exception as e:
print_status(f"设备注册失败: {e}", "error")
else:
print_status("未提供 ak 和 sk跳过设备注册", "info")
else:
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
# 处理 workflow_upload 子命令
if workflow_upload:
from unilabos.workflow.wf_utils import handle_workflow_upload_command
handle_workflow_upload_command(args_dict)
print_status("工作流上传完成,程序退出", "info")
os._exit(0)
if not BasicConfig.ak or not BasicConfig.sk: if not BasicConfig.ak or not BasicConfig.sk:
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning") print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
os._exit(1) os._exit(1)
@@ -362,20 +423,6 @@ def main():
args_dict["devices_config"] = resource_tree_set args_dict["devices_config"] = resource_tree_set
args_dict["graph"] = graph_res.physical_setup_graph args_dict["graph"] = graph_res.physical_setup_graph
if BasicConfig.upload_registry:
# 设备注册到服务端 - 需要 ak 和 sk
if BasicConfig.ak and BasicConfig.sk:
print_status("开始注册设备到服务端...", "info")
try:
register_devices_and_resources(lab_registry)
print_status("设备注册完成", "info")
except Exception as e:
print_status(f"设备注册失败: {e}", "error")
else:
print_status("未提供 ak 和 sk跳过设备注册", "info")
else:
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
if args_dict["controllers"] is not None: if args_dict["controllers"] is not None:
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8")) args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
else: else:
@@ -390,6 +437,7 @@ def main():
comm_client = get_communication_client() comm_client = get_communication_client()
if "websocket" in args_dict["app_bridges"]: if "websocket" in args_dict["app_bridges"]:
args_dict["bridges"].append(comm_client) args_dict["bridges"].append(comm_client)
def _exit(signum, frame): def _exit(signum, frame):
comm_client.stop() comm_client.stop()
sys.exit(0) sys.exit(0)
@@ -431,16 +479,13 @@ def main():
resource_visualization.start() resource_visualization.start()
except OSError as e: except OSError as e:
if "AMENT_PREFIX_PATH" in str(e): if "AMENT_PREFIX_PATH" in str(e):
print_status( print_status(f"ROS 2环境未正确设置跳过3D可视化启动。错误详情: {e}", "warning")
f"ROS 2环境未正确设置跳过3D可视化启动。错误详情: {e}",
"warning"
)
print_status( print_status(
"建议解决方案:\n" "建议解决方案:\n"
"1. 激活Conda环境: conda activate unilab\n" "1. 激活Conda环境: conda activate unilab\n"
"2. 或使用 --backend simple 参数\n" "2. 或使用 --backend simple 参数\n"
"3. 或使用 --visual disable 参数禁用可视化", "3. 或使用 --visual disable 参数禁用可视化",
"info" "info",
) )
else: else:
raise raise

View File

@@ -76,7 +76,8 @@ class HTTPClient:
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid} Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
""" """
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f: with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
f.write(json.dumps({"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}, indent=4)) payload = {"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}
f.write(json.dumps(payload, indent=4))
# 从序列化数据中提取所有节点的UUID保存旧UUID # 从序列化数据中提取所有节点的UUID保存旧UUID
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes} old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
if not self.initialized or first_add: if not self.initialized or first_add:
@@ -331,6 +332,67 @@ class HTTPClient:
logger.error(f"响应内容: {response.text}") logger.error(f"响应内容: {response.text}")
return None return None
def workflow_import(
self,
name: str,
workflow_uuid: str,
workflow_name: str,
nodes: List[Dict[str, Any]],
edges: List[Dict[str, Any]],
tags: Optional[List[str]] = None,
published: bool = False,
) -> Dict[str, Any]:
"""
导入工作流到服务器
Args:
name: 工作流名称(顶层)
workflow_uuid: 工作流UUID
workflow_name: 工作流名称data内部
nodes: 工作流节点列表
edges: 工作流边列表
tags: 工作流标签列表,默认为空列表
published: 是否发布工作流默认为False
Returns:
Dict: API响应数据包含 code 和 data (uuid, name)
"""
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
payload = {
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
"name": name,
"data": {
"workflow_uuid": workflow_uuid,
"workflow_name": workflow_name,
"nodes": nodes,
"edges": edges,
"tags": tags if tags is not None else [],
"published": published,
},
}
# 保存请求到文件
with open(os.path.join(BasicConfig.working_dir, "req_workflow_upload.json"), "w", encoding="utf-8") as f:
f.write(json.dumps(payload, indent=4, ensure_ascii=False))
response = requests.post(
f"{self.remote_addr}/lab/workflow/owner/import",
json=payload,
headers={"Authorization": f"Lab {self.auth}"},
timeout=60,
)
# 保存响应到文件
with open(os.path.join(BasicConfig.working_dir, "res_workflow_upload.json"), "w", encoding="utf-8") as f:
f.write(f"{response.status_code}" + "\n" + response.text)
if response.status_code == 200:
res = response.json()
if "code" in res and res["code"] != 0:
logger.error(f"导入工作流失败: {response.text}")
return res
else:
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
return {"code": response.status_code, "message": response.text}
# 创建默认客户端实例 # 创建默认客户端实例
http_client = HTTPClient() http_client = HTTPClient()

View File

@@ -438,7 +438,7 @@ class MessageProcessor:
self.connected = True self.connected = True
self.reconnect_count = 0 self.reconnect_count = 0
logger.info(f"[MessageProcessor] Connected to {self.websocket_url}") logger.trace(f"[MessageProcessor] Connected to {self.websocket_url}")
# 启动发送协程 # 启动发送协程
send_task = asyncio.create_task(self._send_handler()) send_task = asyncio.create_task(self._send_handler())
@@ -503,7 +503,7 @@ class MessageProcessor:
async def _send_handler(self): async def _send_handler(self):
"""处理发送队列中的消息""" """处理发送队列中的消息"""
logger.debug("[MessageProcessor] Send handler started") logger.trace("[MessageProcessor] Send handler started")
try: try:
while self.connected and self.websocket: while self.connected and self.websocket:
@@ -965,7 +965,7 @@ class QueueProcessor:
def _run(self): def _run(self):
"""运行队列处理主循环""" """运行队列处理主循环"""
logger.debug("[QueueProcessor] Queue processor started") logger.trace("[QueueProcessor] Queue processor started")
while self.is_running: while self.is_running:
try: try:
@@ -1175,7 +1175,6 @@ class WebSocketClient(BaseCommunicationClient):
else: else:
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule" url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
logger.debug(f"[WebSocketClient] URL: {url}")
return url return url
def start(self) -> None: def start(self) -> None:
@@ -1188,13 +1187,11 @@ class WebSocketClient(BaseCommunicationClient):
logger.error("[WebSocketClient] WebSocket URL not configured") logger.error("[WebSocketClient] WebSocket URL not configured")
return return
logger.info(f"[WebSocketClient] Starting connection to {self.websocket_url}")
# 启动两个核心线程 # 启动两个核心线程
self.message_processor.start() self.message_processor.start()
self.queue_processor.start() self.queue_processor.start()
logger.info("[WebSocketClient] All threads started") logger.trace("[WebSocketClient] All threads started")
def stop(self) -> None: def stop(self) -> None:
"""停止WebSocket客户端""" """停止WebSocket客户端"""

View File

@@ -21,7 +21,8 @@ class BasicConfig:
startup_json_path = None # 填写绝对路径 startup_json_path = None # 填写绝对路径
disable_browser = False # 禁止浏览器自动打开 disable_browser = False # 禁止浏览器自动打开
port = 8002 # 本地HTTP服务 port = 8002 # 本地HTTP服务
log_level: Literal['TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] = "DEBUG" # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"
@classmethod @classmethod
def auth_secret(cls): def auth_secret(cls):
@@ -41,7 +42,7 @@ class WSConfig:
# HTTP配置 # HTTP配置
class HTTPConfig: class HTTPConfig:
remote_addr = "http://127.0.0.1:48197/api/v1" remote_addr = "https://uni-lab.bohrium.com/api/v1"
# ROS配置 # ROS配置
@@ -65,13 +66,14 @@ def _update_config_from_module(module):
if not attr.startswith("_"): if not attr.startswith("_"):
setattr(obj, attr, getattr(getattr(module, name), attr)) setattr(obj, attr, getattr(getattr(module, name), attr))
def _update_config_from_env(): def _update_config_from_env():
prefix = "UNILABOS_" prefix = "UNILABOS_"
for env_key, env_value in os.environ.items(): for env_key, env_value in os.environ.items():
if not env_key.startswith(prefix): if not env_key.startswith(prefix):
continue continue
try: try:
key_path = env_key[len(prefix):] # Remove UNILAB_ prefix key_path = env_key[len(prefix) :] # Remove UNILAB_ prefix
class_field = key_path.upper().split("_", 1) class_field = key_path.upper().split("_", 1)
if len(class_field) != 2: if len(class_field) != 2:
logger.warning(f"[ENV] 环境变量格式不正确:{env_key}") logger.warning(f"[ENV] 环境变量格式不正确:{env_key}")

View File

@@ -989,6 +989,18 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
else: else:
dis_vols = [float(v) for v in dis_vols] dis_vols = [float(v) for v in dis_vols]
# 统一混合次数为标量,防止数组/列表与 int 比较时报错
if mix_times is not None and not isinstance(mix_times, (int, float)):
try:
mix_times = mix_times[0] if len(mix_times) > 0 else None
except Exception:
try:
mix_times = next(iter(mix_times))
except Exception:
pass
if mix_times is not None:
mix_times = int(mix_times)
# 识别传输模式 # 识别传输模式
num_sources = len(sources) num_sources = len(sources)
num_targets = len(targets) num_targets = len(targets)

View File

@@ -5,6 +5,7 @@ import json
import os import os
import socket import socket
import time import time
import uuid
from typing import Any, List, Dict, Optional, Tuple, TypedDict, Union, Sequence, Iterator, Literal from typing import Any, List, Dict, Optional, Tuple, TypedDict, Union, Sequence, Iterator, Literal
from pylabrobot.liquid_handling import ( from pylabrobot.liquid_handling import (
@@ -856,7 +857,30 @@ class PRCXI9300Api:
def _raw_request(self, payload: str) -> str: def _raw_request(self, payload: str) -> str:
if self.debug: if self.debug:
return " " # 调试/仿真模式下直接返回可解析的模拟 JSON避免后续 json.loads 报错
try:
req = json.loads(payload)
method = req.get("MethodName")
except Exception:
method = None
data: Any = True
if method in {"AddSolution"}:
data = str(uuid.uuid4())
elif method in {"AddWorkTabletMatrix", "AddWorkTabletMatrix2"}:
data = {"Success": True, "Message": "debug mock"}
elif method in {"GetErrorCode"}:
data = ""
elif method in {"RemoveErrorCodet", "Reset", "Start", "LoadSolution", "Pause", "Resume", "Stop"}:
data = True
elif method in {"GetStepStateList", "GetStepStatus", "GetStepState"}:
data = []
elif method in {"GetLocation"}:
data = {"X": 0, "Y": 0, "Z": 0}
elif method in {"GetResetStatus"}:
data = False
return json.dumps({"Success": True, "Msg": "debug mock", "Data": data})
with contextlib.closing(socket.socket()) as sock: with contextlib.closing(socket.socket()) as sock:
sock.settimeout(self.timeout) sock.settimeout(self.timeout)
sock.connect((self.host, self.port)) sock.connect((self.host, self.port))

View File

@@ -1,282 +1,636 @@
import sys # -*- coding: utf-8 -*-
import threading """
Contains drivers for:
1. SyringePump: Runze Fluid SY-03B (ASCII)
2. EmmMotor: Emm V5.0 Closed-loop Stepper (Modbus-RTU variant)
3. XKCSensor: XKC Non-contact Level Sensor (Modbus-RTU)
"""
import socket
import serial import serial
import serial.tools.list_ports
import re
import time import time
from typing import Optional, List, Dict, Tuple import threading
import struct
import re
import traceback
import queue
from typing import Optional, Dict, List, Any
class ChinweDevice: try:
""" from unilabos.device_comms.universal_driver import UniversalDriver
ChinWe设备控制类 except ImportError:
提供串口通信、电机控制、传感器数据读取等功能 import logging
""" class UniversalDriver:
def __init__(self):
self.logger = logging.getLogger(self.__class__.__name__)
def __init__(self, port: str, baudrate: int = 115200, debug: bool = False): def execute_command_from_outer(self, command: str):
""" pass
初始化ChinWe设备
Args: # ==============================================================================
port: 串口名称如果为None则自动检测 # 1. Transport Layer (通信层)
baudrate: 波特率默认115200 # ==============================================================================
class TransportManager:
""" """
self.debug = debug 统一通信管理类。
自动识别 串口 (Serial) 或 网络 (TCP) 连接。
"""
def __init__(self, port: str, baudrate: int = 9600, timeout: float = 3.0, logger=None):
self.port = port self.port = port
self.baudrate = baudrate self.baudrate = baudrate
self.serial_port: Optional[serial.Serial] = None self.timeout = timeout
self._voltage: float = 0.0 self.logger = logger
self._ec_value: float = 0.0 self.lock = threading.RLock() # 线程锁,确保多设备共用一个连接时不冲突
self._ec_adc_value: int = 0
self.is_tcp = False
self.serial = None
self.socket = None
# 简单判断: 如果包含 ':' (如 192.168.1.1:8899) 或者看起来像 IP则认为是 TCP
if ':' in self.port or (self.port.count('.') == 3 and not self.port.startswith('/')):
self.is_tcp = True
self._connect_tcp()
else:
self._connect_serial()
def _log(self, msg):
if self.logger:
pass
# self.logger.debug(f"[Transport] {msg}")
def _connect_tcp(self):
try:
if ':' in self.port:
host, p = self.port.split(':')
self.tcp_host = host
self.tcp_port = int(p)
else:
self.tcp_host = self.port
self.tcp_port = 8899 # 默认端口
# if self.logger: self.logger.info(f"Connecting TCP {self.tcp_host}:{self.tcp_port} ...")
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.settimeout(self.timeout)
self.socket.connect((self.tcp_host, self.tcp_port))
except Exception as e:
raise ConnectionError(f"TCP connection failed: {e}")
def _connect_serial(self):
try:
# if self.logger: self.logger.info(f"Opening Serial {self.port} (Baud: {self.baudrate}) ...")
self.serial = serial.Serial(
port=self.port,
baudrate=self.baudrate,
timeout=self.timeout
)
except Exception as e:
raise ConnectionError(f"Serial open failed: {e}")
def close(self):
"""关闭连接"""
if self.is_tcp and self.socket:
try: self.socket.close()
except: pass
elif not self.is_tcp and self.serial and self.serial.is_open:
self.serial.close()
def clear_buffer(self):
"""清空缓冲区 (Thread-safe)"""
with self.lock:
if self.is_tcp:
self.socket.setblocking(False)
try:
while True:
if not self.socket.recv(1024): break
except: pass
finally: self.socket.settimeout(self.timeout)
else:
self.serial.reset_input_buffer()
def write(self, data: bytes):
"""发送原始字节"""
with self.lock:
if self.is_tcp:
self.socket.sendall(data)
else:
self.serial.write(data)
def read(self, size: int) -> bytes:
"""读取指定长度字节"""
if self.is_tcp:
data = b''
start = time.time()
while len(data) < size:
if time.time() - start > self.timeout: break
try:
chunk = self.socket.recv(size - len(data))
if not chunk: break
data += chunk
except socket.timeout: break
return data
else:
return self.serial.read(size)
def send_ascii_command(self, command: str) -> str:
"""
发送 ASCII 字符串命令 (如注射泵指令),读取直到 '\r'
"""
with self.lock:
data = command.encode('ascii') if isinstance(command, str) else command
self.clear_buffer()
self.write(data)
# Read until \r
if self.is_tcp:
resp = b''
start = time.time()
while True:
if time.time() - start > self.timeout: break
try:
char = self.socket.recv(1)
if not char: break
resp += char
if char == b'\r': break
except: break
return resp.decode('ascii', errors='ignore').strip()
else:
return self.serial.read_until(b'\r').decode('ascii', errors='ignore').strip()
# ==============================================================================
# 2. Syringe Pump Driver (注射泵)
# ==============================================================================
class SyringePump:
"""SY-03B 注射泵驱动 (ASCII协议)"""
CMD_INITIALIZE = "Z{speed},{drain_port},{output_port}R"
CMD_SWITCH_VALVE = "I{port}R"
CMD_ASPIRATE = "P{vol}R"
CMD_DISPENSE = "D{vol}R"
CMD_DISPENSE_ALL = "A0R"
CMD_STOP = "TR"
CMD_QUERY_STATUS = "Q"
CMD_QUERY_PLUNGER = "?0"
def __init__(self, device_id: int, transport: TransportManager):
if not 1 <= device_id <= 15:
pass # Allow all IDs for now
self.id = str(device_id)
self.transport = transport
def _send(self, template: str, **kwargs) -> str:
cmd = f"/{self.id}" + template.format(**kwargs) + "\r"
return self.transport.send_ascii_command(cmd)
def is_busy(self) -> bool:
"""查询繁忙状态"""
resp = self._send(self.CMD_QUERY_STATUS)
# 响应如 /0` (Ready, 0x60) 或 /0@ (Busy, 0x40)
if len(resp) >= 3:
status_byte = ord(resp[2])
# Bit 5: 1=Ready, 0=Busy
return (status_byte & 0x20) == 0
return False
def wait_until_idle(self, timeout=30):
"""阻塞等待直到空闲"""
start = time.time()
while time.time() - start < timeout:
if not self.is_busy(): return
time.sleep(0.5)
# raise TimeoutError(f"Pump {self.id} wait idle timeout")
pass
def initialize(self, drain_port=0, output_port=0, speed=10):
"""初始化"""
self._send(self.CMD_INITIALIZE, speed=speed, drain_port=drain_port, output_port=output_port)
def switch_valve(self, port: int):
"""切换阀门 (1-8)"""
self._send(self.CMD_SWITCH_VALVE, port=port)
def aspirate(self, steps: int):
"""吸液 (相对步数)"""
self._send(self.CMD_ASPIRATE, vol=steps)
def dispense(self, steps: int):
"""排液 (相对步数)"""
self._send(self.CMD_DISPENSE, vol=steps)
def stop(self):
"""停止"""
self._send(self.CMD_STOP)
def get_position(self) -> int:
"""获取柱塞位置 (步数)"""
resp = self._send(self.CMD_QUERY_PLUNGER)
m = re.search(r'\d+', resp)
return int(m.group()) if m else -1
# ==============================================================================
# 3. Stepper Motor Driver (步进电机)
# ==============================================================================
class EmmMotor:
"""Emm V5.0 闭环步进电机驱动"""
def __init__(self, device_id: int, transport: TransportManager):
self.id = device_id
self.transport = transport
def _send(self, func_code: int, payload: list) -> bytes:
with self.transport.lock:
self.transport.clear_buffer()
# 格式: [ID] [Func] [Data...] [Check=0x6B]
body = [self.id, func_code] + payload
body.append(0x6B) # Checksum
self.transport.write(bytes(body))
# 根据指令不同,读取不同长度响应
read_len = 10 if func_code in [0x31, 0x32, 0x35, 0x24, 0x27] else 4
return self.transport.read(read_len)
def enable(self, on=True):
"""使能 (True=锁轴, False=松轴)"""
state = 1 if on else 0
self._send(0xF3, [0xAB, state, 0])
def run_speed(self, speed_rpm: int, direction=0, acc=10):
"""速度模式运行"""
sp = struct.pack('>H', int(speed_rpm))
self._send(0xF6, [direction, sp[0], sp[1], acc, 0])
def run_position(self, pulses: int, speed_rpm: int, direction=0, acc=10, absolute=False):
"""位置模式运行"""
sp = struct.pack('>H', int(speed_rpm))
pl = struct.pack('>I', int(pulses))
is_abs = 1 if absolute else 0
self._send(0xFD, [direction, sp[0], sp[1], acc, pl[0], pl[1], pl[2], pl[3], is_abs, 0])
def stop(self):
"""停止"""
self._send(0xFE, [0x98, 0])
def set_zero(self):
"""清零位置"""
self._send(0x0A, [])
def get_position(self) -> int:
"""获取当前脉冲位置"""
resp = self._send(0x32, [])
if len(resp) >= 8:
sign = resp[2]
val = struct.unpack('>I', resp[3:7])[0]
return -val if sign == 1 else val
return 0
# ==============================================================================
# 4. Liquid Sensor Driver (液位传感器)
# ==============================================================================
class XKCSensor:
"""XKC RS485 液位传感器 (Modbus RTU)"""
def __init__(self, device_id: int, transport: TransportManager, threshold: int = 300):
self.id = device_id
self.transport = transport
self.threshold = threshold
def _crc(self, data: bytes) -> bytes:
crc = 0xFFFF
for byte in data:
crc ^= byte
for _ in range(8):
if crc & 0x0001: crc = (crc >> 1) ^ 0xA001
else: crc >>= 1
return struct.pack('<H', crc)
def read_level(self) -> Optional[Dict[str, Any]]:
"""
读取液位。
返回: {'level': bool, 'rssi': int}
"""
with self.transport.lock:
self.transport.clear_buffer()
# Modbus Read Registers: 01 03 00 01 00 02 CRC
payload = struct.pack('>HH', 0x0001, 0x0002)
msg = struct.pack('BB', self.id, 0x03) + payload
msg += self._crc(msg)
self.transport.write(msg)
# Read header
h = self.transport.read(3) # Addr, Func, Len
if len(h) < 3: return None
length = h[2]
# Read body + CRC
body = self.transport.read(length + 2)
if len(body) < length + 2:
# Firmware bug fix specific to some modules
if len(body) == 4 and length == 4:
pass
else:
return None
data = body[:-2]
if len(data) == 2:
rssi = data[1]
elif len(data) >= 4:
rssi = (data[2] << 8) | data[3]
else:
return None
return {
'level': rssi > self.threshold,
'rssi': rssi
}
# ==============================================================================
# 5. Main Device Class (ChinweDevice)
# ==============================================================================
class ChinweDevice(UniversalDriver):
"""
ChinWe 工作站主驱动
继承自 UniversalDriver管理所有子设备泵、电机、传感器
"""
def __init__(self, port: str = "192.168.1.200:8899", baudrate: int = 9600,
pump_ids: List[int] = None, motor_ids: List[int] = None,
sensor_id: int = 6, sensor_threshold: int = 300):
"""
初始化 ChinWe 工作站
:param port: 串口号 或 IP:Port
:param baudrate: 串口波特率
:param pump_ids: 注射泵 ID列表 (默认 [1, 2, 3])
:param motor_ids: 步进电机 ID列表 (默认 [4, 5])
:param sensor_id: 液位传感器 ID (默认 6)
"""
super().__init__()
self.port = port
self.baudrate = baudrate
self.mgr = None
self._is_connected = False self._is_connected = False
# 默认配置
if pump_ids is None: pump_ids = [1, 2, 3]
if motor_ids is None: motor_ids = [4, 5]
# 配置信息
self.pump_ids = pump_ids
self.motor_ids = motor_ids
self.sensor_id = sensor_id
self.sensor_threshold = sensor_threshold
# 子设备实例容器
self.pumps: Dict[int, SyringePump] = {}
self.motors: Dict[int, EmmMotor] = {}
self.sensor: Optional[XKCSensor] = None
# 轮询线程控制
self._stop_event = threading.Event()
self._poll_thread = None
# 实时状态缓存
self.status_cache = {
"sensor_rssi": 0,
"sensor_level": False,
"connected": False
}
# 自动连接
if self.port:
self.connect() self.connect()
def connect(self) -> bool:
if self._is_connected: return True
try:
self.logger.info(f"Connecting to {self.port}...")
self.mgr = TransportManager(self.port, baudrate=self.baudrate, logger=self.logger)
# 初始化所有泵
for pid in self.pump_ids:
self.pumps[pid] = SyringePump(pid, self.mgr)
# 初始化所有电机
for mid in self.motor_ids:
self.motors[mid] = EmmMotor(mid, self.mgr)
# 初始化传感器
self.sensor = XKCSensor(self.sensor_id, self.mgr, self.sensor_threshold)
self._is_connected = True
self.status_cache["connected"] = True
# 启动轮询线程
self._start_polling()
return True
except Exception as e:
self.logger.error(f"Connection failed: {e}")
self._is_connected = False
self.status_cache["connected"] = False
return False
def disconnect(self):
self._stop_event.set()
if self._poll_thread:
self._poll_thread.join(timeout=2.0)
if self.mgr:
self.mgr.close()
self._is_connected = False
self.status_cache["connected"] = False
self.logger.info("Disconnected.")
def _start_polling(self):
"""启动传感器轮询线程"""
if self._poll_thread and self._poll_thread.is_alive():
return
self._stop_event.clear()
self._poll_thread = threading.Thread(target=self._polling_loop, daemon=True, name="ChinwePoll")
self._poll_thread.start()
def _polling_loop(self):
"""轮询主循环"""
self.logger.info("Sensor polling started.")
error_count = 0
while not self._stop_event.is_set():
if not self._is_connected or not self.sensor:
time.sleep(1)
continue
try:
# 获取传感器数据
data = self.sensor.read_level()
if data:
self.status_cache["sensor_rssi"] = data['rssi']
self.status_cache["sensor_level"] = data['level']
error_count = 0
else:
error_count += 1
# 降低轮询频率防止总线拥塞
time.sleep(0.2)
except Exception as e:
error_count += 1
if error_count > 10: # 连续错误记录日志
# self.logger.error(f"Polling error: {e}")
error_count = 0
time.sleep(1)
# --- 对外暴露属性 (Properties) ---
@property
def sensor_level(self) -> bool:
return self.status_cache["sensor_level"]
@property
def sensor_rssi(self) -> int:
return self.status_cache["sensor_rssi"]
@property @property
def is_connected(self) -> bool: def is_connected(self) -> bool:
"""获取连接状态""" return self._is_connected
return self._is_connected and self.serial_port and self.serial_port.is_open
@property # --- 对外功能指令 (Actions) ---
def voltage(self) -> float:
"""获取电源电压值"""
return self._voltage
@property def pump_initialize(self, pump_id: int, drain_port=0, output_port=0, speed=10):
def ec_value(self) -> float: """指定泵初始化"""
"""获取电导率值 (ms/cm)""" pump_id = int(pump_id)
return self._ec_value if pump_id in self.pumps:
self.pumps[pump_id].initialize(drain_port, output_port, speed)
@property self.pumps[pump_id].wait_until_idle()
def ec_adc_value(self) -> int:
"""获取EC ADC原始值"""
return self._ec_adc_value
@property
def device_status(self) -> Dict[str, any]:
"""
获取设备状态信息
Returns:
包含设备状态的字典
"""
return {
"connected": self.is_connected,
"port": self.port,
"baudrate": self.baudrate,
"voltage": self.voltage,
"ec_value": self.ec_value,
"ec_adc_value": self.ec_adc_value
}
def connect(self, port: Optional[str] = None, baudrate: Optional[int] = None) -> bool:
"""
连接到串口设备
Args:
port: 串口名称如果为None则使用初始化时的port或自动检测
baudrate: 波特率如果为None则使用初始化时的baudrate
Returns:
连接是否成功
"""
if self.is_connected:
return True return True
target_port = port or self.port
target_baudrate = baudrate or self.baudrate
try:
self.serial_port = serial.Serial(target_port, target_baudrate, timeout=0.5)
self._is_connected = True
self.port = target_port
self.baudrate = target_baudrate
connect_allow_times = 5
while not self.serial_port.is_open and connect_allow_times > 0:
time.sleep(0.5)
connect_allow_times -= 1
print(f"尝试连接到 {target_port} @ {target_baudrate},剩余尝试次数: {connect_allow_times}", self.debug)
raise ValueError("串口未打开,请检查设备连接")
print(f"已连接到 {target_port} @ {target_baudrate}", self.debug)
threading.Thread(target=self._read_data, daemon=True).start()
return True
except Exception as e:
print(f"ChinweDevice连接失败: {e}")
self._is_connected = False
return False return False
def disconnect(self) -> bool: def pump_aspirate(self, pump_id: int, volume: int, valve_port: int):
""" """
断开串口连接 泵吸液 (阻塞)
:param valve_port: 阀门端口 (1-8)
Returns:
断开是否成功
""" """
if self.serial_port and self.serial_port.is_open: pump_id = int(pump_id)
try: valve_port = int(valve_port)
self.serial_port.close() if pump_id in self.pumps:
self._is_connected = False pump = self.pumps[pump_id]
print("已断开串口连接") # 1. 切换阀门
pump.switch_valve(valve_port)
pump.wait_until_idle()
# 2. 吸液
pump.aspirate(volume)
pump.wait_until_idle()
return True return True
except Exception as e:
print(f"断开连接失败: {e}")
return False
return True
def _send_motor_command(self, command: str) -> bool:
"""
发送电机控制命令
Args:
command: 电机命令字符串,例如 "M 1 CW 1.5"
Returns:
发送是否成功
"""
if not self.is_connected:
print("设备未连接")
return False return False
try: def pump_dispense(self, pump_id: int, volume: int, valve_port: int):
self.serial_port.write((command + "\n").encode('utf-8')) """
print(f"发送命令: {command}") 泵排液 (阻塞)
:param valve_port: 阀门端口 (1-8)
"""
pump_id = int(pump_id)
valve_port = int(valve_port)
if pump_id in self.pumps:
pump = self.pumps[pump_id]
# 1. 切换阀门
pump.switch_valve(valve_port)
pump.wait_until_idle()
# 2. 排液
pump.dispense(volume)
pump.wait_until_idle()
return True return True
except Exception as e:
print(f"发送命令失败: {e}")
return False return False
def rotate_motor(self, motor_id: int, turns: float, clockwise: bool = True) -> bool: def pump_valve(self, pump_id: int, port: int):
"""泵切换阀门 (阻塞)"""
pump_id = int(pump_id)
port = int(port)
if pump_id in self.pumps:
pump = self.pumps[pump_id]
pump.switch_valve(port)
pump.wait_until_idle()
return True
return False
def motor_run_continuous(self, motor_id: int, speed: int, direction: str = "顺时针"):
""" """
使电机转动指定圈数 电机一直旋转 (速度模式)
:param direction: "顺时针" or "逆时针"
Args:
motor_id: 电机ID1, 2, 3...
turns: 转动圈数,支持小数
clockwise: True为顺时针False为逆时针
Returns:
命令发送是否成功
""" """
if clockwise: motor_id = int(motor_id)
command = f"M {motor_id} CW {turns}" if motor_id not in self.motors: return False
else:
command = f"M {motor_id} CCW {turns}"
return self._send_motor_command(command)
def set_motor_speed(self, motor_id: int, speed: float) -> bool: dir_val = 0 if direction == "顺时针" else 1
self.motors[motor_id].run_speed(speed, dir_val)
return True
def motor_rotate_quarter(self, motor_id: int, speed: int = 60, direction: str = "顺时针"):
""" """
设置电机转速(如果设备支持) 电机旋转1/4圈 (阻塞)
假设电机设置为 3200 脉冲/圈1/4圈 = 800脉冲
Args:
motor_id: 电机ID1, 2, 3...
speed: 转速值
Returns:
命令发送是否成功
""" """
command = f"M {motor_id} SPEED {speed}" motor_id = int(motor_id)
return self._send_motor_command(command) if motor_id not in self.motors: return False
def _read_data(self) -> List[str]: pulses = 800
dir_val = 0 if direction == "顺时针" else 1
self.motors[motor_id].run_position(pulses, speed, dir_val, absolute=False)
# 预估时间阻塞 (单位: 分钟 -> 秒)
# Time(s) = revs / (RPM/60). revs = 0.25. time = 15 / RPM.
estimated_time = 15.0 / max(1, speed)
time.sleep(estimated_time + 0.5)
return True
def motor_stop(self, motor_id: int):
"""电机停止"""
motor_id = int(motor_id)
if motor_id in self.motors:
self.motors[motor_id].stop()
return True
return False
def wait_sensor_level(self, target_state: str = "有液", timeout: int = 30) -> bool:
""" """
读取串口数据并解析 等待传感器达到指定电平
:param target_state: "有液" or "无液"
Returns:
读取到的数据行列表
""" """
print("开始读取串口数据...") target_bool = True if target_state == "有液" else False
if not self.is_connected:
return []
data_lines = [] self.logger.info(f"Wait sensor: {target_state} ({target_bool}), timeout: {timeout}")
try: start = time.time()
while self.serial_port.in_waiting: while time.time() - start < timeout:
time.sleep(0.1) # 等待数据稳定 if self.sensor_level == target_bool:
try: return True
line = self.serial_port.readline().decode('utf-8', errors='ignore').strip() time.sleep(0.1)
if line: self.logger.warning("Wait sensor level timeout")
data_lines.append(line) return False
self._parse_sensor_data(line)
except Exception as ex:
print(f"解码数据错误: {ex}")
except Exception as e:
print(f"读取串口数据错误: {e}")
return data_lines def execute_command_from_outer(self, command_dict: Dict[str, Any]) -> bool:
"""支持标准 JSON 指令调用"""
return super().execute_command_from_outer(command_dict)
def _parse_sensor_data(self, line: str) -> None:
"""
解析传感器数据
Args:
line: 接收到的数据行
"""
# 解析电源电压
if "电源电压" in line:
try:
val = float(line.split("")[1].replace("V", "").strip())
self._voltage = val
if self.debug:
print(f"电源电压更新: {val}V")
except Exception:
pass
# 解析电导率和ADC原始值支持两种格式
if "电导率" in line and "ADC原始值" in line:
try:
# 支持格式如电导率2.50ms/cm, ADC原始值2052
ec_match = re.search(r"电导率[:]\s*([\d\.]+)", line)
adc_match = re.search(r"ADC原始值[:]\s*(\d+)", line)
if ec_match:
ec_val = float(ec_match.group(1))
self._ec_value = ec_val
if self.debug:
print(f"电导率更新: {ec_val:.2f} ms/cm")
if adc_match:
adc_val = int(adc_match.group(1))
self._ec_adc_value = adc_val
if self.debug:
print(f"EC ADC原始值更新: {adc_val}")
except Exception:
pass
# 仅电导率无ADC原始值
elif "电导率" in line:
try:
val = float(line.split("")[1].replace("ms/cm", "").strip())
self._ec_value = val
if self.debug:
print(f"电导率更新: {val:.2f} ms/cm")
except Exception:
pass
# 仅ADC原始值如有分开回传场景
elif "ADC原始值" in line:
try:
adc_val = int(line.split("")[1].strip())
self._ec_adc_value = adc_val
if self.debug:
print(f"EC ADC原始值更新: {adc_val}")
except Exception:
pass
def spin_when_ec_ge_0():
pass
def main():
"""测试函数"""
print("=== ChinWe设备测试 ===")
# 创建设备实例
device = ChinweDevice("/dev/tty.usbserial-A5069RR4", debug=True)
try:
# 测试5: 发送电机命令
print("\n5. 发送电机命令测试:")
print(" 5.3 使用通用函数控制电机20顺时针转2圈:")
device.rotate_motor(2, 20.0, clockwise=True)
time.sleep(0.5)
finally:
time.sleep(10)
# 测试7: 断开连接
print("\n7. 断开连接:")
device.disconnect()
if __name__ == "__main__": if __name__ == "__main__":
main() # Test
logging.basicConfig(level=logging.INFO)
dev = ChinweDevice(port="192.168.31.201:8899")
try:
if dev.is_connected:
print(f"Status: Level={dev.sensor_level}, RSSI={dev.sensor_rssi}")
# Test pump 1
# dev.pump_valve(1, 1)
# dev.pump_move(1, 1000, "aspirate")
# Test motor 4
# dev.motor_run(4, 60, 0, 2)
for _ in range(5):
print(f"Level={dev.sensor_level}, RSSI={dev.sensor_rssi}")
time.sleep(1)
finally:
dev.disconnect()

View File

@@ -174,35 +174,6 @@ bioyond_dispensing_station:
title: query_resource_by_name参数 title: query_resource_by_name参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-transfer_materials_to_reaction_station:
feedback: {}
goal: {}
goal_default:
target_device_id: null
transfer_groups: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
target_device_id:
type: string
transfer_groups:
type: array
required:
- target_device_id
- transfer_groups
type: object
result: {}
required:
- goal
title: transfer_materials_to_reaction_station参数
type: object
type: UniLabJsonCommand
auto-workflow_sample_locations: auto-workflow_sample_locations:
feedback: {} feedback: {}
goal: {} goal: {}

View File

@@ -0,0 +1,323 @@
separator.chinwe:
category:
- separator
- chinwe
class:
action_value_mappings:
motor_rotate_quarter:
goal:
direction: 顺时针
motor_id: 4
speed: 60
handles: {}
schema:
description: 电机旋转 1/4 圈
properties:
goal:
properties:
direction:
default: 顺时针
description: 旋转方向
enum:
- 顺时针
- 逆时针
type: string
motor_id:
default: '4'
description: 选择电机 (4:搅拌, 5:旋钮)
enum:
- '4'
- '5'
type: string
speed:
default: 60
description: 速度 (RPM)
type: integer
required:
- motor_id
- speed
type: object
type: UniLabJsonCommand
motor_run_continuous:
goal:
direction: 顺时针
motor_id: 4
speed: 60
handles: {}
schema:
description: 电机一直旋转 (速度模式)
properties:
goal:
properties:
direction:
default: 顺时针
description: 旋转方向
enum:
- 顺时针
- 逆时针
type: string
motor_id:
default: '4'
description: 选择电机 (4:搅拌, 5:旋钮)
enum:
- '4'
- '5'
type: string
speed:
default: 60
description: 速度 (RPM)
type: integer
required:
- motor_id
- speed
type: object
type: UniLabJsonCommand
motor_stop:
goal:
motor_id: 4
handles: {}
schema:
description: 停止指定步进电机
properties:
goal:
properties:
motor_id:
default: '4'
description: 选择电机
enum:
- '4'
- '5'
title: '注: 4=搅拌, 5=旋钮'
type: string
required:
- motor_id
type: object
type: UniLabJsonCommand
pump_aspirate:
goal:
pump_id: 1
valve_port: 1
volume: 1000
handles: {}
schema:
description: 注射泵吸液
properties:
goal:
properties:
pump_id:
default: '1'
description: 选择泵
enum:
- '1'
- '2'
- '3'
type: string
valve_port:
default: '1'
description: 阀门端口
enum:
- '1'
- '2'
- '3'
- '4'
- '5'
- '6'
- '7'
- '8'
type: string
volume:
default: 1000
description: 吸液步数
type: integer
required:
- pump_id
- volume
- valve_port
type: object
type: UniLabJsonCommand
pump_dispense:
goal:
pump_id: 1
valve_port: 1
volume: 1000
handles: {}
schema:
description: 注射泵排液
properties:
goal:
properties:
pump_id:
default: '1'
description: 选择泵
enum:
- '1'
- '2'
- '3'
type: string
valve_port:
default: '1'
description: 阀门端口
enum:
- '1'
- '2'
- '3'
- '4'
- '5'
- '6'
- '7'
- '8'
type: string
volume:
default: 1000
description: 排液步数
type: integer
required:
- pump_id
- volume
- valve_port
type: object
type: UniLabJsonCommand
pump_initialize:
goal:
drain_port: 0
output_port: 0
pump_id: 1
speed: 10
handles: {}
schema:
description: 初始化指定注射泵
properties:
goal:
properties:
drain_port:
default: 0
description: 排液口索引
type: integer
output_port:
default: 0
description: 输出口索引
type: integer
pump_id:
default: '1'
description: 选择泵
enum:
- '1'
- '2'
- '3'
title: '注: 1号泵, 2号泵, 3号泵'
type: string
speed:
default: 10
description: 运动速度
type: integer
required:
- pump_id
type: object
type: UniLabJsonCommand
pump_valve:
goal:
port: 1
pump_id: 1
handles: {}
schema:
description: 切换指定泵的阀门端口
properties:
goal:
properties:
port:
default: '1'
description: 阀门端口号 (1-8)
enum:
- '1'
- '2'
- '3'
- '4'
- '5'
- '6'
- '7'
- '8'
type: string
pump_id:
default: '1'
description: 选择泵
enum:
- '1'
- '2'
- '3'
type: string
required:
- pump_id
- port
type: object
type: UniLabJsonCommand
wait_sensor_level:
goal:
target_state: 有液
timeout: 30
handles: {}
schema:
description: 等待传感器液位条件
properties:
goal:
properties:
target_state:
default: 有液
description: 目标液位状态
enum:
- 有液
- 无液
type: string
timeout:
default: 30
description: 超时时间 (秒)
type: integer
required:
- target_state
type: object
type: UniLabJsonCommand
module: unilabos.devices.separator.chinwe:ChinweDevice
status_types:
is_connected: bool
sensor_level: bool
sensor_rssi: int
type: python
config_info: []
description: ChinWe 简易工作站控制器 (3泵, 2电机, 1传感器)
handles: []
icon: ''
init_param_schema:
goal:
baudrate:
default: 9600
description: 串口波特率
type: integer
motor_ids:
default:
- 4
- 5
description: 步进电机ID列表
items:
type: integer
type: array
port:
default: 192.168.1.200:8899
description: 串口号或 IP:Port
type: string
pump_ids:
default:
- 1
- 2
- 3
description: 注射泵ID列表
items:
type: integer
type: array
sensor_id:
default: 6
description: XKC传感器ID
type: integer
sensor_threshold:
default: 300
description: 传感器液位判定阈值
type: integer
version: 2.1.0

View File

@@ -9333,7 +9333,34 @@ liquid_handler.prcxi:
touch_tip: false touch_tip: false
use_channels: use_channels:
- 0 - 0
handles: {} handles:
input:
- data_key: liquid
data_source: handle
data_type: resource
handler_key: sources
label: sources
- data_key: liquid
data_source: executor
data_type: resource
handler_key: targets
label: targets
- data_key: liquid
data_source: executor
data_type: resource
handler_key: tip_rack
label: tip_rack
output:
- data_key: liquid
data_source: handle
data_type: resource
handler_key: sources_out
label: sources
- data_key: liquid
data_source: executor
data_type: resource
handler_key: targets_out
label: targets
placeholder_keys: placeholder_keys:
sources: unilabos_resources sources: unilabos_resources
targets: unilabos_resources targets: unilabos_resources

View File

@@ -222,7 +222,7 @@ class Registry:
abs_path = Path(path).absolute() abs_path = Path(path).absolute()
resource_path = abs_path / "resources" resource_path = abs_path / "resources"
files = list(resource_path.glob("*/*.yaml")) files = list(resource_path.glob("*/*.yaml"))
logger.debug(f"[UniLab Registry] resources: {resource_path.exists()}, total: {len(files)}") logger.trace(f"[UniLab Registry] load resources? {resource_path.exists()}, total: {len(files)}")
current_resource_number = len(self.resource_type_registry) + 1 current_resource_number = len(self.resource_type_registry) + 1
for i, file in enumerate(files): for i, file in enumerate(files):
with open(file, encoding="utf-8", mode="r") as f: with open(file, encoding="utf-8", mode="r") as f:

View File

@@ -42,7 +42,7 @@ def canonicalize_nodes_data(
Returns: Returns:
ResourceTreeSet: 标准化后的资源树集合 ResourceTreeSet: 标准化后的资源树集合
""" """
print_status(f"{len(nodes)} Resources loaded:", "info") print_status(f"{len(nodes)} Resources loaded", "info")
# 第一步基本预处理处理graphml的label字段 # 第一步基本预处理处理graphml的label字段
outer_host_node_id = None outer_host_node_id = None

View File

@@ -582,7 +582,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
except Exception as e: except Exception as e:
self.lab_logger().error(f"更新资源uuid失败: {e}") self.lab_logger().error(f"更新资源uuid失败: {e}")
self.lab_logger().error(traceback.format_exc()) self.lab_logger().error(traceback.format_exc())
self.lab_logger().debug(f"资源更新结果: {response}") self.lab_logger().trace(f"资源更新结果: {response}")
async def get_resource(self, resources_uuid: List[str], with_children: bool = True) -> ResourceTreeSet: async def get_resource(self, resources_uuid: List[str], with_children: bool = True) -> ResourceTreeSet:
""" """
@@ -1164,7 +1164,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
break break
##### self.lab_logger().info(f"准备执行: {action_kwargs}, 函数: {ACTION.__name__}")
time_start = time.time() time_start = time.time()
time_overall = 100 time_overall = 100
future = None future = None
@@ -1172,35 +1171,36 @@ class BaseROS2DeviceNode(Node, Generic[T]):
# 将阻塞操作放入线程池执行 # 将阻塞操作放入线程池执行
if asyncio.iscoroutinefunction(ACTION): if asyncio.iscoroutinefunction(ACTION):
try: try:
##### self.lab_logger().info(f"异步执行动作 {ACTION}") self.lab_logger().trace(f"异步执行动作 {ACTION}")
future = ROS2DeviceNode.run_async_func(ACTION, trace_error=False, **action_kwargs) def _handle_future_exception(fut: Future):
def _handle_future_exception(fut):
nonlocal execution_error, execution_success, action_return_value nonlocal execution_error, execution_success, action_return_value
try: try:
action_return_value = fut.result() action_return_value = fut.result()
if isinstance(action_return_value, BaseException):
raise action_return_value
execution_success = True execution_success = True
except Exception as e: except Exception as _:
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
error( error(
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}" f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
) )
future = ROS2DeviceNode.run_async_func(ACTION, trace_error=False, **action_kwargs)
future.add_done_callback(_handle_future_exception) future.add_done_callback(_handle_future_exception)
except Exception as e: except Exception as e:
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
execution_success = False execution_success = False
self.lab_logger().error(f"创建异步任务失败: {traceback.format_exc()}") self.lab_logger().error(f"创建异步任务失败: {traceback.format_exc()}")
else: else:
##### self.lab_logger().info(f"同步执行动作 {ACTION}") self.lab_logger().trace(f"同步执行动作 {ACTION}")
future = self._executor.submit(ACTION, **action_kwargs) future = self._executor.submit(ACTION, **action_kwargs)
def _handle_future_exception(fut): def _handle_future_exception(fut: Future):
nonlocal execution_error, execution_success, action_return_value nonlocal execution_error, execution_success, action_return_value
try: try:
action_return_value = fut.result() action_return_value = fut.result()
execution_success = True execution_success = True
except Exception as e: except Exception as _:
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
error( error(
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}" f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
@@ -1305,7 +1305,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
get_result_info_str(execution_error, execution_success, action_return_value), get_result_info_str(execution_error, execution_success, action_return_value),
) )
##### self.lab_logger().info(f"动作 {action_name} 完成并返回结果") self.lab_logger().trace(f"动作 {action_name} 完成并返回结果")
return result_msg return result_msg
return execute_callback return execute_callback
@@ -1540,17 +1540,29 @@ class ROS2DeviceNode:
这个类封装了设备类实例和ROS2节点的功能提供ROS2接口。 这个类封装了设备类实例和ROS2节点的功能提供ROS2接口。
它不继承设备类,而是通过代理模式访问设备类的属性和方法。 它不继承设备类,而是通过代理模式访问设备类的属性和方法。
""" """
@staticmethod
async def safe_task_wrapper(trace_callback, func, **kwargs):
try:
if callable(trace_callback):
trace_callback(await func(**kwargs))
return await func(**kwargs)
except Exception as e:
if callable(trace_callback):
trace_callback(e)
return e
@classmethod @classmethod
def run_async_func(cls, func, trace_error=True, **kwargs) -> Task: def run_async_func(cls, func, trace_error=True, inner_trace_callback=None, **kwargs) -> Task:
def _handle_future_exception(fut): def _handle_future_exception(fut: Future):
try: try:
fut.result() ret = fut.result()
if isinstance(ret, BaseException):
raise ret
except Exception as e: except Exception as e:
error(f"异步任务 {func.__name__} 报错了") error(f"异步任务 {func.__name__} 获取结果失败")
error(traceback.format_exc()) error(traceback.format_exc())
future = rclpy.get_global_executor().create_task(func(**kwargs)) future = rclpy.get_global_executor().create_task(ROS2DeviceNode.safe_task_wrapper(inner_trace_callback, func, **kwargs))
if trace_error: if trace_error:
future.add_done_callback(_handle_future_exception) future.add_done_callback(_handle_future_exception)
return future return future

View File

@@ -66,8 +66,8 @@ class ResourceDict(BaseModel):
klass: str = Field(alias="class", description="Resource class name") klass: str = Field(alias="class", description="Resource class name")
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition) pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
config: Dict[str, Any] = Field(description="Resource configuration") config: Dict[str, Any] = Field(description="Resource configuration")
data: Dict[str, Any] = Field(description="Resource data") data: Dict[str, Any] = Field(description="Resource data, eg: container liquid data")
extra: Dict[str, Any] = Field(description="Extra data") extra: Dict[str, Any] = Field(description="Extra data, eg: slot index")
@field_serializer("parent_uuid") @field_serializer("parent_uuid")
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]): def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):

View File

@@ -0,0 +1,34 @@
{
"nodes": [
{
"id": "ChinWeStation",
"name": "分液工作站",
"children": [],
"parent": null,
"type": "device",
"class": "separator.chinwe",
"position": {
"x": 0,
"y": 0,
"z": 0
},
"config": {
"port": "192.168.31.13:8899",
"baudrate": 9600,
"pump_ids": [
1,
2,
3
],
"motor_ids": [
4,
5
],
"sensor_id": 6,
"sensor_threshold": 300
},
"data": {}
}
],
"links": []
}

View File

@@ -1,94 +0,0 @@
import json
import sys
from datetime import datetime
from pathlib import Path
ROOT_DIR = Path(__file__).resolve().parents[2]
if str(ROOT_DIR) not in sys.path:
sys.path.insert(0, str(ROOT_DIR))
import pytest
from scripts.workflow import build_protocol_graph, draw_protocol_graph, draw_protocol_graph_with_ports
ROOT_DIR = Path(__file__).resolve().parents[2]
if str(ROOT_DIR) not in sys.path:
sys.path.insert(0, str(ROOT_DIR))
def _normalize_steps(data):
normalized = []
for step in data:
action = step.get("action") or step.get("operation")
if not action:
continue
raw_params = step.get("parameters") or step.get("action_args") or {}
params = dict(raw_params)
if "source" in raw_params and "sources" not in raw_params:
params["sources"] = raw_params["source"]
if "target" in raw_params and "targets" not in raw_params:
params["targets"] = raw_params["target"]
description = step.get("description") or step.get("purpose")
step_dict = {"action": action, "parameters": params}
if description:
step_dict["description"] = description
normalized.append(step_dict)
return normalized
def _normalize_labware(data):
labware = {}
for item in data:
reagent_name = item.get("reagent_name")
key = reagent_name or item.get("material_name") or item.get("name")
if not key:
continue
key = str(key)
idx = 1
original_key = key
while key in labware:
idx += 1
key = f"{original_key}_{idx}"
labware[key] = {
"slot": item.get("positions") or item.get("slot"),
"labware": item.get("material_name") or item.get("labware"),
"well": item.get("well", []),
"type": item.get("type", "reagent"),
"role": item.get("role", ""),
"name": key,
}
return labware
@pytest.mark.parametrize("protocol_name", [
"example_bio",
# "bioyond_materials_liquidhandling_1",
"example_prcxi",
])
def test_build_protocol_graph(protocol_name):
data_path = Path(__file__).with_name(f"{protocol_name}.json")
with data_path.open("r", encoding="utf-8") as fp:
d = json.load(fp)
if "workflow" in d and "reagent" in d:
protocol_steps = d["workflow"]
labware_info = d["reagent"]
elif "steps_info" in d and "labware_info" in d:
protocol_steps = _normalize_steps(d["steps_info"])
labware_info = _normalize_labware(d["labware_info"])
else:
raise ValueError("Unsupported protocol format")
graph = build_protocol_graph(
labware_info=labware_info,
protocol_steps=protocol_steps,
workstation_name="PRCXi",
)
timestamp = datetime.now().strftime("%Y%m%d_%H%M")
output_path = data_path.with_name(f"{protocol_name}_graph_{timestamp}.png")
draw_protocol_graph_with_ports(graph, str(output_path))
print(graph)

View File

547
unilabos/workflow/common.py Normal file
View File

@@ -0,0 +1,547 @@
import re
import uuid
import networkx as nx
from networkx.drawing.nx_agraph import to_agraph
import matplotlib.pyplot as plt
from typing import Dict, List, Any, Tuple, Optional
Json = Dict[str, Any]
# ---------------- Graph ----------------
class WorkflowGraph:
"""简单的有向图实现:使用 params 单层参数inputs 内含连线;支持 node-link 导出"""
def __init__(self):
self.nodes: Dict[str, Dict[str, Any]] = {}
self.edges: List[Dict[str, Any]] = []
def add_node(self, node_id: str, **attrs):
self.nodes[node_id] = attrs
def add_edge(self, source: str, target: str, **attrs):
# 将 source_port/target_port 映射为服务端期望的 source_handle_key/target_handle_key
source_handle_key = attrs.pop("source_port", "") or attrs.pop("source_handle_key", "")
target_handle_key = attrs.pop("target_port", "") or attrs.pop("target_handle_key", "")
edge = {
"source": source,
"target": target,
"source_node_uuid": source,
"target_node_uuid": target,
"source_handle_key": source_handle_key,
"source_handle_io": attrs.pop("source_handle_io", "source"),
"target_handle_key": target_handle_key,
"target_handle_io": attrs.pop("target_handle_io", "target"),
**attrs,
}
self.edges.append(edge)
def _materialize_wiring_into_inputs(
self,
obj: Any,
inputs: Dict[str, Any],
variable_sources: Dict[str, Dict[str, Any]],
target_node_id: str,
base_path: List[str],
):
has_var = False
def walk(node: Any, path: List[str]):
nonlocal has_var
if isinstance(node, dict):
if "__var__" in node:
has_var = True
varname = node["__var__"]
placeholder = f"${{{varname}}}"
src = variable_sources.get(varname)
if src:
key = ".".join(path) # e.g. "params.foo.bar.0"
inputs[key] = {"node": src["node_id"], "output": src.get("output_name", "result")}
self.add_edge(
str(src["node_id"]),
target_node_id,
source_handle_io=src.get("output_name", "result"),
target_handle_io=key,
)
return placeholder
return {k: walk(v, path + [k]) for k, v in node.items()}
if isinstance(node, list):
return [walk(v, path + [str(i)]) for i, v in enumerate(node)]
return node
replaced = walk(obj, base_path[:])
return replaced, has_var
def add_workflow_node(
self,
node_id: int,
*,
device_key: Optional[str] = None, # 实例名,如 "ser"
resource_name: Optional[str] = None, # registry key原 device_class
module: Optional[str] = None,
template_name: Optional[str] = None, # 动作/模板名(原 action_key
params: Dict[str, Any],
variable_sources: Dict[str, Dict[str, Any]],
add_ready_if_no_vars: bool = True,
prev_node_id: Optional[int] = None,
**extra_attrs,
) -> None:
"""添加工作流节点params 单层;自动变量连线与 ready 串联;支持附加属性"""
node_id_str = str(node_id)
inputs: Dict[str, Any] = {}
params, has_var = self._materialize_wiring_into_inputs(
params, inputs, variable_sources, node_id_str, base_path=["params"]
)
if add_ready_if_no_vars and not has_var:
last_id = str(prev_node_id) if prev_node_id is not None else "-1"
inputs["ready"] = {"node": int(last_id), "output": "ready"}
self.add_edge(last_id, node_id_str, source_handle_io="ready", target_handle_io="ready")
node_obj = {
"device_key": device_key,
"resource_name": resource_name, # ✅ 新名字
"module": module,
"template_name": template_name, # ✅ 新名字
"params": params,
"inputs": inputs,
}
node_obj.update(extra_attrs or {})
self.add_node(node_id_str, parameters=node_obj)
# 顺序工作流导出(连线在 inputs不返回 edges
def to_dict(self) -> List[Dict[str, Any]]:
result = []
for node_id, attrs in self.nodes.items():
node = {"uuid": node_id}
params = dict(attrs.get("parameters", {}) or {})
flat = {k: v for k, v in attrs.items() if k != "parameters"}
flat.update(params)
node.update(flat)
result.append(node)
return sorted(result, key=lambda n: int(n["uuid"]) if str(n["uuid"]).isdigit() else n["uuid"])
# node-link 导出(含 edges
def to_node_link_dict(self) -> Dict[str, Any]:
nodes_list = []
for node_id, attrs in self.nodes.items():
node_attrs = attrs.copy()
params = node_attrs.pop("parameters", {}) or {}
node_attrs.update(params)
nodes_list.append({"uuid": node_id, **node_attrs})
return {
"directed": True,
"multigraph": False,
"graph": {},
"nodes": nodes_list,
"edges": self.edges,
"links": self.edges,
}
def refactor_data(
data: List[Dict[str, Any]],
action_resource_mapping: Optional[Dict[str, str]] = None,
) -> List[Dict[str, Any]]:
"""统一的数据重构函数,根据操作类型自动选择模板
Args:
data: 原始步骤数据列表
action_resource_mapping: action 到 resource_name 的映射字典,可选
"""
refactored_data = []
# 定义操作映射,包含生物实验和有机化学的所有操作
OPERATION_MAPPING = {
# 生物实验操作
"transfer_liquid": "transfer_liquid",
"transfer": "transfer",
"incubation": "incubation",
"move_labware": "move_labware",
"oscillation": "oscillation",
# 有机化学操作
"HeatChillToTemp": "HeatChillProtocol",
"StopHeatChill": "HeatChillStopProtocol",
"StartHeatChill": "HeatChillStartProtocol",
"HeatChill": "HeatChillProtocol",
"Dissolve": "DissolveProtocol",
"Transfer": "TransferProtocol",
"Evaporate": "EvaporateProtocol",
"Recrystallize": "RecrystallizeProtocol",
"Filter": "FilterProtocol",
"Dry": "DryProtocol",
"Add": "AddProtocol",
}
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
for step in data:
operation = step.get("action")
if not operation or operation in UNSUPPORTED_OPERATIONS:
continue
# 处理重复操作
if operation == "Repeat":
times = step.get("times", step.get("parameters", {}).get("times", 1))
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
for i in range(int(times)):
sub_data = refactor_data(sub_steps, action_resource_mapping)
refactored_data.extend(sub_data)
continue
# 获取模板名称
template_name = OPERATION_MAPPING.get(operation)
if not template_name:
# 自动推断模板类型
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
template_name = f"biomek-{operation}"
else:
template_name = f"{operation}Protocol"
# 获取 resource_name
resource_name = f"device.{operation.lower()}"
if action_resource_mapping:
resource_name = action_resource_mapping.get(operation, resource_name)
# 获取步骤编号,生成 name 字段
step_number = step.get("step_number")
name = f"Step {step_number}" if step_number is not None else None
# 创建步骤数据
step_data = {
"template_name": template_name,
"resource_name": resource_name,
"description": step.get("description", step.get("purpose", f"{operation} operation")),
"lab_node_type": "Device",
"param": step.get("parameters", step.get("action_args", {})),
"footer": f"{template_name}-{resource_name}",
}
if name:
step_data["name"] = name
refactored_data.append(step_data)
return refactored_data
def build_protocol_graph(
labware_info: List[Dict[str, Any]],
protocol_steps: List[Dict[str, Any]],
workstation_name: str,
action_resource_mapping: Optional[Dict[str, str]] = None,
) -> WorkflowGraph:
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
Args:
labware_info: labware 信息字典
protocol_steps: 协议步骤列表
workstation_name: 工作站名称
action_resource_mapping: action 到 resource_name 的映射字典,可选
"""
G = WorkflowGraph()
resource_last_writer = {}
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
# 有机化学&移液站协议图构建
WORKSTATION_ID = workstation_name
# 为所有labware创建资源节点
res_index = 0
for labware_id, item in labware_info.items():
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
node_id = str(uuid.uuid4())
# 判断节点类型
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
lab_node_type = "Labware"
description = f"Prepare Labware: {labware_id}"
liquid_type = []
liquid_volume = []
elif item.get("type") == "hardware" or "reactor" in str(labware_id).lower():
if "reactor" not in str(labware_id).lower():
continue
lab_node_type = "Sample"
description = f"Prepare Reactor: {labware_id}"
liquid_type = []
liquid_volume = []
else:
lab_node_type = "Reagent"
description = f"Add Reagent to Flask: {labware_id}"
liquid_type = [labware_id]
liquid_volume = [1e5]
res_index += 1
G.add_node(
node_id,
template_name="create_resource",
resource_name="host_node",
name=f"Res {res_index}",
description=description,
lab_node_type=lab_node_type,
footer="create_resource-host_node",
param={
"res_id": labware_id,
"device_id": WORKSTATION_ID,
"class_name": "container",
"parent": WORKSTATION_ID,
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
"liquid_input_slot": [-1],
"liquid_type": liquid_type,
"liquid_volume": liquid_volume,
"slot_on_deck": "",
},
)
resource_last_writer[labware_id] = f"{node_id}:labware"
last_control_node_id = None
# 处理协议步骤
for step in protocol_steps:
node_id = str(uuid.uuid4())
G.add_node(node_id, **step)
# 控制流
if last_control_node_id is not None:
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
last_control_node_id = node_id
# 物料流
params = step.get("param", {})
input_resources_possible_names = [
"vessel",
"to_vessel",
"from_vessel",
"reagent",
"solvent",
"compound",
"sources",
"targets",
]
for target_port in input_resources_possible_names:
resource_name = params.get(target_port)
if resource_name and resource_name in resource_last_writer:
source_node, source_port = resource_last_writer[resource_name].split(":")
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
output_resources = {
"vessel_out": params.get("vessel"),
"from_vessel_out": params.get("from_vessel"),
"to_vessel_out": params.get("to_vessel"),
"filtrate_out": params.get("filtrate_vessel"),
"reagent": params.get("reagent"),
"solvent": params.get("solvent"),
"compound": params.get("compound"),
"sources_out": params.get("sources"),
"targets_out": params.get("targets"),
}
for source_port, resource_name in output_resources.items():
if resource_name:
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
return G
def draw_protocol_graph(protocol_graph: WorkflowGraph, output_path: str):
"""
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
"""
if not protocol_graph:
print("Cannot draw graph: Graph object is empty.")
return
G = nx.DiGraph()
for node_id, attrs in protocol_graph.nodes.items():
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
G.add_node(node_id, label=label, **attrs)
for edge in protocol_graph.edges:
G.add_edge(edge["source"], edge["target"])
plt.figure(figsize=(20, 15))
try:
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
except Exception:
pos = nx.shell_layout(G) # Fallback layout
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
nx.draw(
G,
pos,
with_labels=False,
node_size=2500,
node_color="skyblue",
node_shape="o",
edge_color="gray",
width=1.5,
arrowsize=15,
)
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
plt.title("Chemical Protocol Workflow Graph", size=15)
plt.savefig(output_path, dpi=300, bbox_inches="tight")
plt.close()
print(f" - Visualization saved to '{output_path}'")
COMPASS = {"n", "e", "s", "w", "ne", "nw", "se", "sw", "c"}
def _is_compass(port: str) -> bool:
return isinstance(port, str) and port.lower() in COMPASS
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
"""
使用 Graphviz 端口语法绘制协议工作流图。
- 若边上的 source_port/target_port 是 compassn/e/s/w/...),直接用 compass。
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
最终由 PyGraphviz 渲染并输出到 output_path后缀决定格式如 .png/.svg/.pdf
"""
if not protocol_graph:
print("Cannot draw graph: Graph object is empty.")
return
# 1) 先用 networkx 搭建有向图,保留端口属性
G = nx.DiGraph()
for node_id, attrs in protocol_graph.nodes.items():
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
G.add_node(node_id, _core_label=str(label), **{k: v for k, v in attrs.items() if k not in ("label",)})
edges_data = []
in_ports_by_node = {} # 收集命名输入端口
out_ports_by_node = {} # 收集命名输出端口
for edge in protocol_graph.edges:
u = edge["source"]
v = edge["target"]
sp = edge.get("source_handle_key") or edge.get("source_port")
tp = edge.get("target_handle_key") or edge.get("target_port")
# 记录到图里(保留原始端口信息)
G.add_edge(u, v, source_handle_key=sp, target_handle_key=tp)
edges_data.append((u, v, sp, tp))
# 如果不是 compass就按“命名端口”先归类等会儿给节点造 record
if sp and not _is_compass(sp):
out_ports_by_node.setdefault(u, set()).add(str(sp))
if tp and not _is_compass(tp):
in_ports_by_node.setdefault(v, set()).add(str(tp))
# 2) 转为 AGraph使用 Graphviz 渲染
A = to_agraph(G)
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
A.node_attr.update(
shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica"
)
A.edge_attr.update(arrowsize="0.8", color="#666666")
# 3) 为需要命名端口的节点设置 record 形状与 label
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
for n in A.nodes():
node = A.get_node(n)
core = G.nodes[n].get("_core_label", n)
in_ports = sorted(in_ports_by_node.get(n, []))
out_ports = sorted(out_ports_by_node.get(n, []))
# 如果该节点涉及命名端口,则用 record否则保留原 box
if in_ports or out_ports:
def port_fields(ports):
if not ports:
return " " # 必须留一个空槽占位
# 每个端口一个小格子,<p> name
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
left = port_fields(in_ports)
right = port_fields(out_ports)
# 三栏:左(入) | 中(节点名) | 右(出)
record_label = f"{{ {left} | {core} | {right} }}"
node.attr.update(shape="record", label=record_label)
else:
# 没有命名端口:普通盒子,显示核心标签
node.attr.update(label=str(core))
# 4) 给边设置 headport / tailport
# - 若端口为 compass直接用 compasse.g., headport="e"
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
for u, v, sp, tp in edges_data:
e = A.get_edge(u, v)
# Graphviz 属性tail 是源head 是目标
if sp:
if _is_compass(sp):
e.attr["tailport"] = sp.lower()
else:
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
e.attr["tailport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(sp))
if tp:
if _is_compass(tp):
e.attr["headport"] = tp.lower()
else:
e.attr["headport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(tp))
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
# e.attr["arrowhead"] = "vee"
# 5) 输出
A.draw(output_path, prog="dot")
print(f" - Port-aware workflow rendered to '{output_path}'")
# ---------------- Registry Adapter ----------------
class RegistryAdapter:
"""根据 module 的类名(冒号右侧)反查 registry 的 resource_name原 device_class并抽取参数顺序"""
def __init__(self, device_registry: Dict[str, Any]):
self.device_registry = device_registry or {}
self.module_class_to_resource = self._build_module_class_index()
def _build_module_class_index(self) -> Dict[str, str]:
idx = {}
for resource_name, info in self.device_registry.items():
module = info.get("module")
if isinstance(module, str) and ":" in module:
cls = module.split(":")[-1]
idx[cls] = resource_name
idx[cls.lower()] = resource_name
return idx
def resolve_resource_by_classname(self, class_name: str) -> Optional[str]:
if not class_name:
return None
return self.module_class_to_resource.get(class_name) or self.module_class_to_resource.get(class_name.lower())
def get_device_module(self, resource_name: Optional[str]) -> Optional[str]:
if not resource_name:
return None
return self.device_registry.get(resource_name, {}).get("module")
def get_actions(self, resource_name: Optional[str]) -> Dict[str, Any]:
if not resource_name:
return {}
return (self.device_registry.get(resource_name, {}).get("class", {}).get("action_value_mappings", {})) or {}
def get_action_schema(self, resource_name: Optional[str], template_name: str) -> Optional[Json]:
return (self.get_actions(resource_name).get(template_name) or {}).get("schema")
def get_action_goal_default(self, resource_name: Optional[str], template_name: str) -> Json:
return (self.get_actions(resource_name).get(template_name) or {}).get("goal_default", {}) or {}
def get_action_input_keys(self, resource_name: Optional[str], template_name: str) -> List[str]:
schema = self.get_action_schema(resource_name, template_name) or {}
goal = (schema.get("properties") or {}).get("goal") or {}
props = goal.get("properties") or {}
required = goal.get("required") or []
return list(dict.fromkeys(required + list(props.keys())))

View File

@@ -0,0 +1,356 @@
"""
JSON 工作流转换模块
提供从多种 JSON 格式转换为统一工作流格式的功能。
支持的格式:
1. workflow/reagent 格式
2. steps_info/labware_info 格式
"""
import json
from os import PathLike
from pathlib import Path
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
from unilabos.registry.registry import lab_registry
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
"""
从 registry 获取指定设备和动作的 handles 配置
Args:
resource_name: 设备资源名称,如 "liquid_handler.prcxi"
template_name: 动作模板名称,如 "transfer_liquid"
Returns:
包含 source 和 target handler_keys 的字典:
{"source": ["sources_out", "targets_out", ...], "target": ["sources", "targets", ...]}
"""
result = {"source": [], "target": []}
device_info = lab_registry.device_type_registry.get(resource_name, {})
if not device_info:
return result
action_mappings = device_info.get("class", {}).get("action_value_mappings", {})
action_config = action_mappings.get(template_name, {})
handles = action_config.get("handles", {})
if isinstance(handles, dict):
# 处理 input handles (作为 target)
for handle in handles.get("input", []):
handler_key = handle.get("handler_key", "")
if handler_key:
result["source"].append(handler_key)
# 处理 output handles (作为 source)
for handle in handles.get("output", []):
handler_key = handle.get("handler_key", "")
if handler_key:
result["target"].append(handler_key)
return result
def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
"""
校验工作流图中所有边的句柄配置是否正确
Args:
graph: 工作流图对象
Returns:
(is_valid, errors): 是否有效,错误信息列表
"""
errors = []
nodes = graph.nodes
for edge in graph.edges:
left_uuid = edge.get("source")
right_uuid = edge.get("target")
# target_handle_key是target, right的输入节点入节点
# source_handle_key是source, left的输出节点出节点
right_source_conn_key = edge.get("target_handle_key", "")
left_target_conn_key = edge.get("source_handle_key", "")
# 获取源节点和目标节点信息
left_node = nodes.get(left_uuid, {})
right_node = nodes.get(right_uuid, {})
left_res_name = left_node.get("resource_name", "")
left_template_name = left_node.get("template_name", "")
right_res_name = right_node.get("resource_name", "")
right_template_name = right_node.get("template_name", "")
# 获取源节点的 output handles
left_node_handles = get_action_handles(left_res_name, left_template_name)
target_valid_keys = left_node_handles.get("target", [])
target_valid_keys.append("ready")
# 获取目标节点的 input handles
right_node_handles = get_action_handles(right_res_name, right_template_name)
source_valid_keys = right_node_handles.get("source", [])
source_valid_keys.append("ready")
# 如果节点配置了 output handles则 source_port 必须有效
if not right_source_conn_key:
node_name = left_node.get("name", left_uuid[:8])
errors.append(f"源节点 '{node_name}' 的 source_handle_key 为空," f"应设置为: {source_valid_keys}")
elif right_source_conn_key not in source_valid_keys:
node_name = left_node.get("name", left_uuid[:8])
errors.append(
f"源节点 '{node_name}' 的 source 端点 '{right_source_conn_key}' 不存在," f"支持的端点: {source_valid_keys}"
)
# 如果节点配置了 input handles则 target_port 必须有效
if not left_target_conn_key:
node_name = right_node.get("name", right_uuid[:8])
errors.append(f"目标节点 '{node_name}' 的 target_handle_key 为空," f"应设置为: {target_valid_keys}")
elif left_target_conn_key not in target_valid_keys:
node_name = right_node.get("name", right_uuid[:8])
errors.append(
f"目标节点 '{node_name}' 的 target 端点 '{left_target_conn_key}' 不存在,"
f"支持的端点: {target_valid_keys}"
)
return len(errors) == 0, errors
# action 到 resource_name 的映射
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
# 生物实验操作
"transfer_liquid": "liquid_handler.prcxi",
"transfer": "liquid_handler.prcxi",
"incubation": "incubator.prcxi",
"move_labware": "labware_mover.prcxi",
"oscillation": "shaker.prcxi",
# 有机化学操作
"HeatChillToTemp": "heatchill.chemputer",
"StopHeatChill": "heatchill.chemputer",
"StartHeatChill": "heatchill.chemputer",
"HeatChill": "heatchill.chemputer",
"Dissolve": "stirrer.chemputer",
"Transfer": "liquid_handler.chemputer",
"Evaporate": "rotavap.chemputer",
"Recrystallize": "reactor.chemputer",
"Filter": "filter.chemputer",
"Dry": "dryer.chemputer",
"Add": "liquid_handler.chemputer",
}
def normalize_steps(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""
将不同格式的步骤数据规范化为统一格式
支持的输入格式:
- action + parameters
- action + action_args
- operation + parameters
Args:
data: 原始步骤数据列表
Returns:
规范化后的步骤列表,格式为 [{"action": str, "parameters": dict, "description": str?, "step_number": int?}, ...]
"""
normalized = []
for idx, step in enumerate(data):
# 获取动作名称(支持 action 或 operation 字段)
action = step.get("action") or step.get("operation")
if not action:
continue
# 获取参数(支持 parameters 或 action_args 字段)
raw_params = step.get("parameters") or step.get("action_args") or {}
params = dict(raw_params)
# 规范化 source/target -> sources/targets
if "source" in raw_params and "sources" not in raw_params:
params["sources"] = raw_params["source"]
if "target" in raw_params and "targets" not in raw_params:
params["targets"] = raw_params["target"]
# 获取描述(支持 description 或 purpose 字段)
description = step.get("description") or step.get("purpose")
# 获取步骤编号(优先使用原始数据中的 step_number否则使用索引+1
step_number = step.get("step_number", idx + 1)
step_dict = {"action": action, "parameters": params, "step_number": step_number}
if description:
step_dict["description"] = description
normalized.append(step_dict)
return normalized
def normalize_labware(data: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
"""
将不同格式的 labware 数据规范化为统一的字典格式
支持的输入格式:
- reagent_name + material_name + positions
- name + labware + slot
Args:
data: 原始 labware 数据列表
Returns:
规范化后的 labware 字典,格式为 {name: {"slot": int, "labware": str, "well": list, "type": str, "role": str, "name": str}, ...}
"""
labware = {}
for item in data:
# 获取 key 名称(优先使用 reagent_name其次是 material_name 或 name
reagent_name = item.get("reagent_name")
key = reagent_name or item.get("material_name") or item.get("name")
if not key:
continue
key = str(key)
# 处理重复 key自动添加后缀
idx = 1
original_key = key
while key in labware:
idx += 1
key = f"{original_key}_{idx}"
labware[key] = {
"slot": item.get("positions") or item.get("slot"),
"labware": item.get("material_name") or item.get("labware"),
"well": item.get("well", []),
"type": item.get("type", "reagent"),
"role": item.get("role", ""),
"name": key,
}
return labware
def convert_from_json(
data: Union[str, PathLike, Dict[str, Any]],
workstation_name: str = "PRCXi",
validate: bool = True,
) -> WorkflowGraph:
"""
从 JSON 数据或文件转换为 WorkflowGraph
支持的 JSON 格式:
1. {"workflow": [...], "reagent": {...}} - 直接格式
2. {"steps_info": [...], "labware_info": [...]} - 需要规范化的格式
Args:
data: JSON 文件路径、字典数据、或 JSON 字符串
workstation_name: 工作站名称,默认 "PRCXi"
validate: 是否校验句柄配置,默认 True
Returns:
WorkflowGraph: 构建好的工作流图
Raises:
ValueError: 不支持的 JSON 格式 或 句柄校验失败
FileNotFoundError: 文件不存在
json.JSONDecodeError: JSON 解析失败
"""
# 处理输入数据
if isinstance(data, (str, PathLike)):
path = Path(data)
if path.exists():
with path.open("r", encoding="utf-8") as fp:
json_data = json.load(fp)
elif isinstance(data, str):
# 尝试作为 JSON 字符串解析
json_data = json.loads(data)
else:
raise FileNotFoundError(f"文件不存在: {data}")
elif isinstance(data, dict):
json_data = data
else:
raise TypeError(f"不支持的数据类型: {type(data)}")
# 根据格式解析数据
if "workflow" in json_data and "reagent" in json_data:
# 格式1: workflow/reagent已经是规范格式
protocol_steps = json_data["workflow"]
labware_info = json_data["reagent"]
elif "steps_info" in json_data and "labware_info" in json_data:
# 格式2: steps_info/labware_info需要规范化
protocol_steps = normalize_steps(json_data["steps_info"])
labware_info = normalize_labware(json_data["labware_info"])
elif "steps" in json_data and "labware" in json_data:
# 格式3: steps/labware另一种常见格式
protocol_steps = normalize_steps(json_data["steps"])
if isinstance(json_data["labware"], list):
labware_info = normalize_labware(json_data["labware"])
else:
labware_info = json_data["labware"]
else:
raise ValueError(
"不支持的 JSON 格式。支持的格式:\n"
"1. {'workflow': [...], 'reagent': {...}}\n"
"2. {'steps_info': [...], 'labware_info': [...]}\n"
"3. {'steps': [...], 'labware': [...]}"
)
# 构建工作流图
graph = build_protocol_graph(
labware_info=labware_info,
protocol_steps=protocol_steps,
workstation_name=workstation_name,
action_resource_mapping=ACTION_RESOURCE_MAPPING,
)
# 校验句柄配置
if validate:
is_valid, errors = validate_workflow_handles(graph)
if not is_valid:
import warnings
for error in errors:
warnings.warn(f"句柄校验警告: {error}")
return graph
def convert_json_to_node_link(
data: Union[str, PathLike, Dict[str, Any]],
workstation_name: str = "PRCXi",
) -> Dict[str, Any]:
"""
将 JSON 数据转换为 node-link 格式的字典
Args:
data: JSON 文件路径、字典数据、或 JSON 字符串
workstation_name: 工作站名称,默认 "PRCXi"
Returns:
Dict: node-link 格式的工作流数据
"""
graph = convert_from_json(data, workstation_name)
return graph.to_node_link_dict()
def convert_json_to_workflow_list(
data: Union[str, PathLike, Dict[str, Any]],
workstation_name: str = "PRCXi",
) -> List[Dict[str, Any]]:
"""
将 JSON 数据转换为工作流列表格式
Args:
data: JSON 文件路径、字典数据、或 JSON 字符串
workstation_name: 工作站名称,默认 "PRCXi"
Returns:
List: 工作流节点列表
"""
graph = convert_from_json(data, workstation_name)
return graph.to_dict()
# 为了向后兼容,保留下划线前缀的别名
_normalize_steps = normalize_steps
_normalize_labware = normalize_labware

View File

@@ -0,0 +1,241 @@
import ast
import json
from typing import Dict, List, Any, Tuple, Optional
from .common import WorkflowGraph, RegistryAdapter
Json = Dict[str, Any]
# ---------------- Converter ----------------
class DeviceMethodConverter:
"""
- 字段统一resource_name原 device_class、template_name原 action_key
- params 单层inputs 使用 'params.' 前缀
- SimpleGraph.add_workflow_node 负责变量连线与边
"""
def __init__(self, device_registry: Optional[Dict[str, Any]] = None):
self.graph = WorkflowGraph()
self.variable_sources: Dict[str, Dict[str, Any]] = {} # var -> {node_id, output_name}
self.instance_to_resource: Dict[str, Optional[str]] = {} # 实例名 -> resource_name
self.node_id_counter: int = 0
self.registry = RegistryAdapter(device_registry or {})
# ---- helpers ----
def _new_node_id(self) -> int:
nid = self.node_id_counter
self.node_id_counter += 1
return nid
def _assign_targets(self, targets) -> List[str]:
names: List[str] = []
import ast
if isinstance(targets, ast.Tuple):
for elt in targets.elts:
if isinstance(elt, ast.Name):
names.append(elt.id)
elif isinstance(targets, ast.Name):
names.append(targets.id)
return names
def _extract_device_instantiation(self, node) -> Optional[Tuple[str, str]]:
import ast
if not isinstance(node.value, ast.Call):
return None
callee = node.value.func
if isinstance(callee, ast.Name):
class_name = callee.id
elif isinstance(callee, ast.Attribute) and isinstance(callee.value, ast.Name):
class_name = callee.attr
else:
return None
if isinstance(node.targets[0], ast.Name):
instance = node.targets[0].id
return instance, class_name
return None
def _extract_call(self, call) -> Tuple[str, str, Dict[str, Any], str]:
import ast
owner_name, method_name, call_kind = "", "", "func"
if isinstance(call.func, ast.Attribute):
method_name = call.func.attr
if isinstance(call.func.value, ast.Name):
owner_name = call.func.value.id
call_kind = "instance" if owner_name in self.instance_to_resource else "class_or_module"
elif isinstance(call.func.value, ast.Attribute) and isinstance(call.func.value.value, ast.Name):
owner_name = call.func.value.attr
call_kind = "class_or_module"
elif isinstance(call.func, ast.Name):
method_name = call.func.id
call_kind = "func"
def pack(node):
if isinstance(node, ast.Name):
return {"type": "variable", "value": node.id}
if isinstance(node, ast.Constant):
return {"type": "constant", "value": node.value}
if isinstance(node, ast.Dict):
return {"type": "dict", "value": self._parse_dict(node)}
if isinstance(node, ast.List):
return {"type": "list", "value": self._parse_list(node)}
return {"type": "raw", "value": ast.unparse(node) if hasattr(ast, "unparse") else str(node)}
args: Dict[str, Any] = {}
pos: List[Any] = []
for a in call.args:
pos.append(pack(a))
for kw in call.keywords:
args[kw.arg] = pack(kw.value)
if pos:
args["_positional"] = pos
return owner_name, method_name, args, call_kind
def _parse_dict(self, node) -> Dict[str, Any]:
import ast
out: Dict[str, Any] = {}
for k, v in zip(node.keys, node.values):
if isinstance(k, ast.Constant):
key = str(k.value)
if isinstance(v, ast.Name):
out[key] = f"var:{v.id}"
elif isinstance(v, ast.Constant):
out[key] = v.value
elif isinstance(v, ast.Dict):
out[key] = self._parse_dict(v)
elif isinstance(v, ast.List):
out[key] = self._parse_list(v)
return out
def _parse_list(self, node) -> List[Any]:
import ast
out: List[Any] = []
for elt in node.elts:
if isinstance(elt, ast.Name):
out.append(f"var:{elt.id}")
elif isinstance(elt, ast.Constant):
out.append(elt.value)
elif isinstance(elt, ast.Dict):
out.append(self._parse_dict(elt))
elif isinstance(elt, ast.List):
out.append(self._parse_list(elt))
return out
def _normalize_var_tokens(self, x: Any) -> Any:
if isinstance(x, str) and x.startswith("var:"):
return {"__var__": x[4:]}
if isinstance(x, list):
return [self._normalize_var_tokens(i) for i in x]
if isinstance(x, dict):
return {k: self._normalize_var_tokens(v) for k, v in x.items()}
return x
def _make_params_payload(self, resource_name: Optional[str], template_name: str, call_args: Dict[str, Any]) -> Dict[str, Any]:
input_keys = self.registry.get_action_input_keys(resource_name, template_name) if resource_name else []
defaults = self.registry.get_action_goal_default(resource_name, template_name) if resource_name else {}
params: Dict[str, Any] = dict(defaults)
def unpack(p):
t, v = p.get("type"), p.get("value")
if t == "variable":
return {"__var__": v}
if t == "dict":
return self._normalize_var_tokens(v)
if t == "list":
return self._normalize_var_tokens(v)
return v
for k, p in call_args.items():
if k == "_positional":
continue
params[k] = unpack(p)
pos = call_args.get("_positional", [])
if pos:
if input_keys:
for i, p in enumerate(pos):
if i >= len(input_keys):
break
name = input_keys[i]
if name in params:
continue
params[name] = unpack(p)
else:
for i, p in enumerate(pos):
params[f"arg_{i}"] = unpack(p)
return params
# ---- handlers ----
def _on_assign(self, stmt):
import ast
inst = self._extract_device_instantiation(stmt)
if inst:
instance, code_class = inst
resource_name = self.registry.resolve_resource_by_classname(code_class)
self.instance_to_resource[instance] = resource_name
return
if isinstance(stmt.value, ast.Call):
owner, method, call_args, kind = self._extract_call(stmt.value)
if kind == "instance":
device_key = owner
resource_name = self.instance_to_resource.get(owner)
else:
device_key = owner
resource_name = self.registry.resolve_resource_by_classname(owner)
module = self.registry.get_device_module(resource_name)
params = self._make_params_payload(resource_name, method, call_args)
nid = self._new_node_id()
self.graph.add_workflow_node(
nid,
device_key=device_key,
resource_name=resource_name, # ✅
module=module,
template_name=method, # ✅
params=params,
variable_sources=self.variable_sources,
add_ready_if_no_vars=True,
prev_node_id=(nid - 1) if nid > 0 else None,
)
out_vars = self._assign_targets(stmt.targets[0])
for var in out_vars:
self.variable_sources[var] = {"node_id": nid, "output_name": "result"}
def _on_expr(self, stmt):
import ast
if not isinstance(stmt.value, ast.Call):
return
owner, method, call_args, kind = self._extract_call(stmt.value)
if kind == "instance":
device_key = owner
resource_name = self.instance_to_resource.get(owner)
else:
device_key = owner
resource_name = self.registry.resolve_resource_by_classname(owner)
module = self.registry.get_device_module(resource_name)
params = self._make_params_payload(resource_name, method, call_args)
nid = self._new_node_id()
self.graph.add_workflow_node(
nid,
device_key=device_key,
resource_name=resource_name, # ✅
module=module,
template_name=method, # ✅
params=params,
variable_sources=self.variable_sources,
add_ready_if_no_vars=True,
prev_node_id=(nid - 1) if nid > 0 else None,
)
def convert(self, python_code: str):
tree = ast.parse(python_code)
for stmt in tree.body:
if isinstance(stmt, ast.Assign):
self._on_assign(stmt)
elif isinstance(stmt, ast.Expr):
self._on_expr(stmt)
return self

View File

@@ -0,0 +1,131 @@
from typing import List, Any, Dict
import xml.etree.ElementTree as ET
def convert_to_type(val: str) -> Any:
"""将字符串值转换为适当的数据类型"""
if val == "True":
return True
if val == "False":
return False
if val == "?":
return None
if val.endswith(" g"):
return float(val.split(" ")[0])
if val.endswith("mg"):
return float(val.split("mg")[0])
elif val.endswith("mmol"):
return float(val.split("mmol")[0]) / 1000
elif val.endswith("mol"):
return float(val.split("mol")[0])
elif val.endswith("ml"):
return float(val.split("ml")[0])
elif val.endswith("RPM"):
return float(val.split("RPM")[0])
elif val.endswith(" °C"):
return float(val.split(" ")[0])
elif val.endswith(" %"):
return float(val.split(" ")[0])
return val
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
"""展平嵌套的XDL程序结构"""
flattened_operations = []
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
def extract_operations(element: ET.Element):
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
flattened_operations.append(element)
for child in element:
extract_operations(child)
for child in procedure_elem:
extract_operations(child)
return flattened_operations
def parse_xdl_content(xdl_content: str) -> tuple:
"""解析XDL内容"""
try:
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
root = ET.fromstring(xdl_content_cleaned)
synthesis_elem = root.find("Synthesis")
if synthesis_elem is None:
return None, None, None
# 解析硬件组件
hardware_elem = synthesis_elem.find("Hardware")
hardware = []
if hardware_elem is not None:
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
# 解析试剂
reagents_elem = synthesis_elem.find("Reagents")
reagents = []
if reagents_elem is not None:
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
# 解析程序
procedure_elem = synthesis_elem.find("Procedure")
if procedure_elem is None:
return None, None, None
flattened_operations = flatten_xdl_procedure(procedure_elem)
return hardware, reagents, flattened_operations
except ET.ParseError as e:
raise ValueError(f"Invalid XDL format: {e}")
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
"""
将XDL XML格式转换为标准的字典格式
Args:
xdl_content: XDL XML内容
Returns:
转换结果,包含步骤和器材信息
"""
try:
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
if hardware is None:
return {"error": "Failed to parse XDL content", "success": False}
# 将XDL元素转换为字典格式
steps_data = []
for elem in flattened_operations:
# 转换参数类型
parameters = {}
for key, val in elem.attrib.items():
converted_val = convert_to_type(val)
if converted_val is not None:
parameters[key] = converted_val
step_dict = {
"operation": elem.tag,
"parameters": parameters,
"description": elem.get("purpose", f"Operation: {elem.tag}"),
}
steps_data.append(step_dict)
# 合并硬件和试剂为统一的labware_info格式
labware_data = []
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
return {
"success": True,
"steps": steps_data,
"labware": labware_data,
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
}
except Exception as e:
error_msg = f"XDL conversion failed: {str(e)}"
return {"error": error_msg, "success": False}

View File

@@ -0,0 +1,138 @@
"""
工作流工具模块
提供工作流上传等功能
"""
import json
import os
import uuid
from typing import Any, Dict, List, Optional
from unilabos.utils.banner_print import print_status
def _is_node_link_format(data: Dict[str, Any]) -> bool:
"""检查数据是否为 node-link 格式"""
return "nodes" in data and "edges" in data
def _convert_to_node_link(workflow_file: str, workflow_data: Dict[str, Any]) -> Dict[str, Any]:
"""
将非 node-link 格式的工作流数据转换为 node-link 格式
Args:
workflow_file: 工作流文件路径(用于日志)
workflow_data: 原始工作流数据
Returns:
node-link 格式的工作流数据
"""
from unilabos.workflow.convert_from_json import convert_json_to_node_link
print_status(f"检测到非 node-link 格式,正在转换...", "info")
node_link_data = convert_json_to_node_link(workflow_data)
print_status(f"转换完成", "success")
return node_link_data
def upload_workflow(
workflow_file: str,
workflow_name: Optional[str] = None,
tags: Optional[List[str]] = None,
published: bool = False,
) -> Dict[str, Any]:
"""
上传工作流到服务器
支持的输入格式:
1. node-link 格式: {"nodes": [...], "edges": [...]}
2. workflow/reagent 格式: {"workflow": [...], "reagent": {...}}
3. steps_info/labware_info 格式: {"steps_info": [...], "labware_info": [...]}
4. steps/labware 格式: {"steps": [...], "labware": [...]}
Args:
workflow_file: 工作流文件路径JSON格式
workflow_name: 工作流名称,如果不提供则从文件中读取或使用文件名
tags: 工作流标签列表,默认为空列表
published: 是否发布工作流默认为False
Returns:
Dict: API响应数据
"""
# 延迟导入,避免在配置文件加载之前初始化 http_client
from unilabos.app.web import http_client
if not os.path.exists(workflow_file):
print_status(f"工作流文件不存在: {workflow_file}", "error")
return {"code": -1, "message": f"文件不存在: {workflow_file}"}
# 读取工作流文件
try:
with open(workflow_file, "r", encoding="utf-8") as f:
workflow_data = json.load(f)
except json.JSONDecodeError as e:
print_status(f"工作流文件JSON解析失败: {e}", "error")
return {"code": -1, "message": f"JSON解析失败: {e}"}
# 自动检测并转换格式
if not _is_node_link_format(workflow_data):
try:
workflow_data = _convert_to_node_link(workflow_file, workflow_data)
except Exception as e:
print_status(f"工作流格式转换失败: {e}", "error")
return {"code": -1, "message": f"格式转换失败: {e}"}
# 提取工作流数据
nodes = workflow_data.get("nodes", [])
edges = workflow_data.get("edges", [])
workflow_uuid_val = workflow_data.get("workflow_uuid", str(uuid.uuid4()))
wf_name_from_file = workflow_data.get("workflow_name", os.path.basename(workflow_file).replace(".json", ""))
# 确定工作流名称
final_name = workflow_name or wf_name_from_file
print_status(f"正在上传工作流: {final_name}", "info")
print_status(f" - 节点数量: {len(nodes)}", "info")
print_status(f" - 边数量: {len(edges)}", "info")
print_status(f" - 标签: {tags or []}", "info")
print_status(f" - 发布状态: {published}", "info")
# 调用 http_client 上传
result = http_client.workflow_import(
name=final_name,
workflow_uuid=workflow_uuid_val,
workflow_name=final_name,
nodes=nodes,
edges=edges,
tags=tags,
published=published,
)
if result.get("code") == 0:
data = result.get("data", {})
print_status("工作流上传成功!", "success")
print_status(f" - UUID: {data.get('uuid', 'N/A')}", "info")
print_status(f" - 名称: {data.get('name', 'N/A')}", "info")
else:
print_status(f"工作流上传失败: {result.get('message', '未知错误')}", "error")
return result
def handle_workflow_upload_command(args_dict: Dict[str, Any]) -> None:
"""
处理 workflow_upload 子命令
Args:
args_dict: 命令行参数字典
"""
workflow_file = args_dict.get("workflow_file")
workflow_name = args_dict.get("workflow_name")
tags = args_dict.get("tags", [])
published = args_dict.get("published", False)
if workflow_file:
upload_workflow(workflow_file, workflow_name, tags, published)
else:
print_status("未指定工作流文件路径,请使用 -f/--workflow_file 参数", "error")

View File

@@ -2,7 +2,7 @@
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?> <?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3"> <package format="3">
<name>unilabos_msgs</name> <name>unilabos_msgs</name>
<version>0.10.11</version> <version>0.10.12</version>
<description>ROS2 Messages package for unilabos devices</description> <description>ROS2 Messages package for unilabos devices</description>
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer> <maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer> <maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>