mirror of
https://github.com/dptech-corp/Uni-Lab-OS.git
synced 2026-02-08 07:55:12 +00:00
Compare commits
66 Commits
6f143b068b
...
v0.10.14
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
266366cc25 | ||
|
|
121c3985cc | ||
|
|
6ca5c72fc6 | ||
|
|
bc8c49ddda | ||
|
|
28f93737ac | ||
|
|
5dc81ec9be | ||
|
|
13a6795657 | ||
|
|
53219d8b04 | ||
|
|
b1cdef9185 | ||
|
|
9854ed8c9c | ||
|
|
52544a2c69 | ||
|
|
5ce433e235 | ||
|
|
c7c14d2332 | ||
|
|
6fdd482649 | ||
|
|
d390236318 | ||
|
|
ed8ee29732 | ||
|
|
ffc583e9d5 | ||
|
|
f1ad0c9c96 | ||
|
|
8fa3407649 | ||
|
|
d3282822fc | ||
|
|
554bcade24 | ||
|
|
a662c75de1 | ||
|
|
931614fe64 | ||
|
|
d39662f65f | ||
|
|
acf5fdebf8 | ||
|
|
7f7b1c13c0 | ||
|
|
75f09034ff | ||
|
|
549a50220b | ||
|
|
4189a2cfbe | ||
|
|
48895a9bb1 | ||
|
|
891f126ed6 | ||
|
|
4d3475a849 | ||
|
|
b475db66df | ||
|
|
a625a86e3e | ||
|
|
37e0f1037c | ||
|
|
a242253145 | ||
|
|
448e0074b7 | ||
|
|
304827fc8d | ||
|
|
872b3d781f | ||
|
|
813400f2b4 | ||
|
|
b6dfe2b944 | ||
|
|
8807865649 | ||
|
|
5fc7eb7586 | ||
|
|
9bd72b48e1 | ||
|
|
42b78ab4c1 | ||
|
|
9645609a05 | ||
|
|
a2a827d7ac | ||
|
|
bb3ca645a4 | ||
|
|
37ee43d19a | ||
|
|
bc30f23e34 | ||
|
|
166d84afe1 | ||
|
|
1b43c53015 | ||
|
|
d4415f5a35 | ||
|
|
0260cbbedb | ||
|
|
7c440d10ab | ||
|
|
c85c49817d | ||
|
|
c70eafa5f0 | ||
|
|
b64466d443 | ||
|
|
ef3f24ed48 | ||
|
|
2a8e8d014b | ||
|
|
e0da1c7217 | ||
|
|
51d3e61723 | ||
|
|
6b5765bbf3 | ||
|
|
eb1f3fbe1c | ||
|
|
fb93b1cd94 | ||
|
|
9aeffebde1 |
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: 0.10.15
|
version: 0.10.14
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../unilabos
|
path: ../unilabos
|
||||||
|
|||||||
2
.github/workflows/conda-pack-build.yml
vendored
2
.github/workflows/conda-pack-build.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
|
|||||||
2
.github/workflows/multi-platform-build.yml
vendored
2
.github/workflows/multi-platform-build.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
|
|||||||
2
.github/workflows/unilabos-conda-build.yml
vendored
2
.github/workflows/unilabos-conda-build.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
platform: osx-arm64
|
platform: osx-arm64
|
||||||
|
|||||||
14
README.md
14
README.md
@@ -31,9 +31,7 @@ Detailed documentation can be found at:
|
|||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
1. Setup Conda Environment
|
Uni-Lab-OS recommends using `mamba` for environment management. Choose the appropriate environment file for your operating system:
|
||||||
|
|
||||||
Uni-Lab-OS recommends using `mamba` for environment management:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create new environment
|
# Create new environment
|
||||||
@@ -42,7 +40,7 @@ mamba activate unilab
|
|||||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Install Dev Uni-Lab-OS
|
## Install Dev Uni-Lab-OS
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository
|
||||||
@@ -53,21 +51,17 @@ cd Uni-Lab-OS
|
|||||||
pip install .
|
pip install .
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Start Uni-Lab System
|
3. Start Uni-Lab System:
|
||||||
|
|
||||||
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
4. Best Practice
|
|
||||||
|
|
||||||
See [Best Practice Guide](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
|
||||||
|
|
||||||
## Message Format
|
## Message Format
|
||||||
|
|
||||||
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
||||||
|
|
||||||
## Citation
|
## Citation
|
||||||
|
|
||||||
If you use [Uni-Lab-OS](https://arxiv.org/abs/2512.21766) in academic research, please cite:
|
If you use Uni-Lab-OS in academic research, please cite:
|
||||||
|
|
||||||
```bibtex
|
```bibtex
|
||||||
@article{gao2025unilabos,
|
@article{gao2025unilabos,
|
||||||
|
|||||||
@@ -53,21 +53,17 @@ cd Uni-Lab-OS
|
|||||||
pip install .
|
pip install .
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 启动 Uni-Lab 系统
|
3. 启动 Uni-Lab 系统:
|
||||||
|
|
||||||
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
4. 最佳实践
|
|
||||||
|
|
||||||
请见[最佳实践指南](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
|
||||||
|
|
||||||
## 消息格式
|
## 消息格式
|
||||||
|
|
||||||
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
||||||
|
|
||||||
## 引用
|
## 引用
|
||||||
|
|
||||||
如果您在学术研究中使用 [Uni-Lab-OS](https://arxiv.org/abs/2512.21766),请引用:
|
如果您在学术研究中使用 Uni-Lab-OS,请引用:
|
||||||
|
|
||||||
```bibtex
|
```bibtex
|
||||||
@article{gao2025unilabos,
|
@article{gao2025unilabos,
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ extensions = [
|
|||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
||||||
"sphinx_rtd_theme",
|
"sphinx_rtd_theme",
|
||||||
"sphinxcontrib.mermaid",
|
"sphinxcontrib.mermaid"
|
||||||
]
|
]
|
||||||
|
|
||||||
source_suffix = {
|
source_suffix = {
|
||||||
@@ -58,7 +58,7 @@ html_theme = "sphinx_rtd_theme"
|
|||||||
|
|
||||||
# sphinx-book-theme 主题选项
|
# sphinx-book-theme 主题选项
|
||||||
html_theme_options = {
|
html_theme_options = {
|
||||||
"repository_url": "https://github.com/deepmodeling/Uni-Lab-OS",
|
"repository_url": "https://github.com/用户名/Uni-Lab",
|
||||||
"use_repository_button": True,
|
"use_repository_button": True,
|
||||||
"use_issues_button": True,
|
"use_issues_button": True,
|
||||||
"use_edit_page_button": True,
|
"use_edit_page_button": True,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,3 @@ sphinx-copybutton>=0.5.0
|
|||||||
|
|
||||||
# 用于自动摘要生成
|
# 用于自动摘要生成
|
||||||
sphinx-autobuild>=2024.2.4
|
sphinx-autobuild>=2024.2.4
|
||||||
|
|
||||||
# 用于PDF导出 (rinohtype方案,纯Python无需LaTeX)
|
|
||||||
rinohtype>=0.5.4
|
|
||||||
sphinx-simplepdf>=1.6.0
|
|
||||||
@@ -463,7 +463,7 @@ Uni-Lab 使用 `ResourceDictInstance.get_resource_instance_from_dict()` 方法
|
|||||||
### 使用示例
|
### 使用示例
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 旧格式节点
|
# 旧格式节点
|
||||||
old_format_node = {
|
old_format_node = {
|
||||||
@@ -477,10 +477,10 @@ old_format_node = {
|
|||||||
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
||||||
|
|
||||||
# 访问标准化后的数据
|
# 访问标准化后的数据
|
||||||
print(instance.res_content.id) # "pump_1"
|
print(instance.res_content.id) # "pump_1"
|
||||||
print(instance.res_content.uuid) # 自动生成的 UUID
|
print(instance.res_content.uuid) # 自动生成的 UUID
|
||||||
print(instance.res_content.config) # {}
|
print(instance.res_content.config) # {}
|
||||||
print(instance.res_content.data) # {}
|
print(instance.res_content.data) # {}
|
||||||
```
|
```
|
||||||
|
|
||||||
### 格式迁移建议
|
### 格式迁移建议
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: ros-humble-unilabos-msgs
|
name: ros-humble-unilabos-msgs
|
||||||
version: 0.10.15
|
version: 0.10.14
|
||||||
source:
|
source:
|
||||||
path: ../../unilabos_msgs
|
path: ../../unilabos_msgs
|
||||||
target_directory: src
|
target_directory: src
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: "0.10.15"
|
version: "0.10.14"
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../..
|
path: ../..
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
@@ -24,15 +25,7 @@ class SimpleGraph:
|
|||||||
|
|
||||||
def add_edge(self, source, target, **attrs):
|
def add_edge(self, source, target, **attrs):
|
||||||
"""添加边"""
|
"""添加边"""
|
||||||
# edge = {"source": source, "target": target, **attrs}
|
edge = {"source": source, "target": target, **attrs}
|
||||||
edge = {
|
|
||||||
"source": source, "target": target,
|
|
||||||
"source_node_uuid": source,
|
|
||||||
"target_node_uuid": target,
|
|
||||||
"source_handle_io": "source",
|
|
||||||
"target_handle_io": "target",
|
|
||||||
**attrs
|
|
||||||
}
|
|
||||||
self.edges.append(edge)
|
self.edges.append(edge)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
@@ -49,7 +42,6 @@ class SimpleGraph:
|
|||||||
"multigraph": False,
|
"multigraph": False,
|
||||||
"graph": {},
|
"graph": {},
|
||||||
"nodes": nodes_list,
|
"nodes": nodes_list,
|
||||||
"edges": self.edges,
|
|
||||||
"links": self.edges,
|
"links": self.edges,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,8 +58,495 @@ def extract_json_from_markdown(text: str) -> str:
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_type(val: str) -> Any:
|
||||||
|
"""将字符串值转换为适当的数据类型"""
|
||||||
|
if val == "True":
|
||||||
|
return True
|
||||||
|
if val == "False":
|
||||||
|
return False
|
||||||
|
if val == "?":
|
||||||
|
return None
|
||||||
|
if val.endswith(" g"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
if val.endswith("mg"):
|
||||||
|
return float(val.split("mg")[0])
|
||||||
|
elif val.endswith("mmol"):
|
||||||
|
return float(val.split("mmol")[0]) / 1000
|
||||||
|
elif val.endswith("mol"):
|
||||||
|
return float(val.split("mol")[0])
|
||||||
|
elif val.endswith("ml"):
|
||||||
|
return float(val.split("ml")[0])
|
||||||
|
elif val.endswith("RPM"):
|
||||||
|
return float(val.split("RPM")[0])
|
||||||
|
elif val.endswith(" °C"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
elif val.endswith(" %"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def refactor_data(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""统一的数据重构函数,根据操作类型自动选择模板"""
|
||||||
|
refactored_data = []
|
||||||
|
|
||||||
|
# 定义操作映射,包含生物实验和有机化学的所有操作
|
||||||
|
OPERATION_MAPPING = {
|
||||||
|
# 生物实验操作
|
||||||
|
"transfer_liquid": "SynBioFactory-liquid_handler.prcxi-transfer_liquid",
|
||||||
|
"transfer": "SynBioFactory-liquid_handler.biomek-transfer",
|
||||||
|
"incubation": "SynBioFactory-liquid_handler.biomek-incubation",
|
||||||
|
"move_labware": "SynBioFactory-liquid_handler.biomek-move_labware",
|
||||||
|
"oscillation": "SynBioFactory-liquid_handler.biomek-oscillation",
|
||||||
|
# 有机化学操作
|
||||||
|
"HeatChillToTemp": "SynBioFactory-workstation-HeatChillProtocol",
|
||||||
|
"StopHeatChill": "SynBioFactory-workstation-HeatChillStopProtocol",
|
||||||
|
"StartHeatChill": "SynBioFactory-workstation-HeatChillStartProtocol",
|
||||||
|
"HeatChill": "SynBioFactory-workstation-HeatChillProtocol",
|
||||||
|
"Dissolve": "SynBioFactory-workstation-DissolveProtocol",
|
||||||
|
"Transfer": "SynBioFactory-workstation-TransferProtocol",
|
||||||
|
"Evaporate": "SynBioFactory-workstation-EvaporateProtocol",
|
||||||
|
"Recrystallize": "SynBioFactory-workstation-RecrystallizeProtocol",
|
||||||
|
"Filter": "SynBioFactory-workstation-FilterProtocol",
|
||||||
|
"Dry": "SynBioFactory-workstation-DryProtocol",
|
||||||
|
"Add": "SynBioFactory-workstation-AddProtocol",
|
||||||
|
}
|
||||||
|
|
||||||
|
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||||
|
|
||||||
|
for step in data:
|
||||||
|
operation = step.get("action")
|
||||||
|
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 处理重复操作
|
||||||
|
if operation == "Repeat":
|
||||||
|
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
||||||
|
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
||||||
|
for i in range(int(times)):
|
||||||
|
sub_data = refactor_data(sub_steps)
|
||||||
|
refactored_data.extend(sub_data)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 获取模板名称
|
||||||
|
template = OPERATION_MAPPING.get(operation)
|
||||||
|
if not template:
|
||||||
|
# 自动推断模板类型
|
||||||
|
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
||||||
|
template = f"SynBioFactory-liquid_handler.biomek-{operation}"
|
||||||
|
else:
|
||||||
|
template = f"SynBioFactory-workstation-{operation}Protocol"
|
||||||
|
|
||||||
|
# 创建步骤数据
|
||||||
|
step_data = {
|
||||||
|
"template": template,
|
||||||
|
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
||||||
|
"lab_node_type": "Device",
|
||||||
|
"parameters": step.get("parameters", step.get("action_args", {})),
|
||||||
|
}
|
||||||
|
refactored_data.append(step_data)
|
||||||
|
|
||||||
|
return refactored_data
|
||||||
|
|
||||||
|
|
||||||
|
def build_protocol_graph(
|
||||||
|
labware_info: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]], workstation_name: str
|
||||||
|
) -> SimpleGraph:
|
||||||
|
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑"""
|
||||||
|
G = SimpleGraph()
|
||||||
|
resource_last_writer = {}
|
||||||
|
LAB_NAME = "SynBioFactory"
|
||||||
|
|
||||||
|
protocol_steps = refactor_data(protocol_steps)
|
||||||
|
|
||||||
|
# 检查协议步骤中的模板来判断协议类型
|
||||||
|
has_biomek_template = any(
|
||||||
|
("biomek" in step.get("template", "")) or ("prcxi" in step.get("template", ""))
|
||||||
|
for step in protocol_steps
|
||||||
|
)
|
||||||
|
|
||||||
|
if has_biomek_template:
|
||||||
|
# 生物实验协议图构建
|
||||||
|
for labware_id, labware in labware_info.items():
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
labware_attrs = labware.copy()
|
||||||
|
labware_id = labware_attrs.pop("id", labware_attrs.get("name", f"labware_{uuid.uuid4()}"))
|
||||||
|
labware_attrs["description"] = labware_id
|
||||||
|
labware_attrs["lab_node_type"] = (
|
||||||
|
"Reagent" if "Plate" in str(labware_id) else "Labware" if "Rack" in str(labware_id) else "Sample"
|
||||||
|
)
|
||||||
|
labware_attrs["device_id"] = workstation_name
|
||||||
|
|
||||||
|
G.add_node(node_id, template=f"{LAB_NAME}-host_node-create_resource", **labware_attrs)
|
||||||
|
resource_last_writer[labware_id] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
# 处理协议步骤
|
||||||
|
prev_node = None
|
||||||
|
for i, step in enumerate(protocol_steps):
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
G.add_node(node_id, **step)
|
||||||
|
|
||||||
|
# 添加控制流边
|
||||||
|
if prev_node is not None:
|
||||||
|
G.add_edge(prev_node, node_id, source_port="ready", target_port="ready")
|
||||||
|
prev_node = node_id
|
||||||
|
|
||||||
|
# 处理物料流
|
||||||
|
params = step.get("parameters", {})
|
||||||
|
if "sources" in params and params["sources"] in resource_last_writer:
|
||||||
|
source_node, source_port = resource_last_writer[params["sources"]].split(":")
|
||||||
|
G.add_edge(source_node, node_id, source_port=source_port, target_port="labware")
|
||||||
|
|
||||||
|
if "targets" in params:
|
||||||
|
resource_last_writer[params["targets"]] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
# 添加协议结束节点
|
||||||
|
end_id = str(uuid.uuid4())
|
||||||
|
G.add_node(end_id, template=f"{LAB_NAME}-liquid_handler.biomek-run_protocol")
|
||||||
|
if prev_node is not None:
|
||||||
|
G.add_edge(prev_node, end_id, source_port="ready", target_port="ready")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# 有机化学协议图构建
|
||||||
|
WORKSTATION_ID = workstation_name
|
||||||
|
|
||||||
|
# 为所有labware创建资源节点
|
||||||
|
for item_id, item in labware_info.items():
|
||||||
|
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# 判断节点类型
|
||||||
|
if item.get("type") == "hardware" or "reactor" in str(item_id).lower():
|
||||||
|
if "reactor" not in str(item_id).lower():
|
||||||
|
continue
|
||||||
|
lab_node_type = "Sample"
|
||||||
|
description = f"Prepare Reactor: {item_id}"
|
||||||
|
liquid_type = []
|
||||||
|
liquid_volume = []
|
||||||
|
else:
|
||||||
|
lab_node_type = "Reagent"
|
||||||
|
description = f"Add Reagent to Flask: {item_id}"
|
||||||
|
liquid_type = [item_id]
|
||||||
|
liquid_volume = [1e5]
|
||||||
|
|
||||||
|
G.add_node(
|
||||||
|
node_id,
|
||||||
|
template=f"{LAB_NAME}-host_node-create_resource",
|
||||||
|
description=description,
|
||||||
|
lab_node_type=lab_node_type,
|
||||||
|
res_id=item_id,
|
||||||
|
device_id=WORKSTATION_ID,
|
||||||
|
class_name="container",
|
||||||
|
parent=WORKSTATION_ID,
|
||||||
|
bind_locations={"x": 0.0, "y": 0.0, "z": 0.0},
|
||||||
|
liquid_input_slot=[-1],
|
||||||
|
liquid_type=liquid_type,
|
||||||
|
liquid_volume=liquid_volume,
|
||||||
|
slot_on_deck="",
|
||||||
|
role=item.get("role", ""),
|
||||||
|
)
|
||||||
|
resource_last_writer[item_id] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
last_control_node_id = None
|
||||||
|
|
||||||
|
# 处理协议步骤
|
||||||
|
for step in protocol_steps:
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
G.add_node(node_id, **step)
|
||||||
|
|
||||||
|
# 控制流
|
||||||
|
if last_control_node_id is not None:
|
||||||
|
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
||||||
|
last_control_node_id = node_id
|
||||||
|
|
||||||
|
# 物料流
|
||||||
|
params = step.get("parameters", {})
|
||||||
|
input_resources = {
|
||||||
|
"Vessel": params.get("vessel"),
|
||||||
|
"ToVessel": params.get("to_vessel"),
|
||||||
|
"FromVessel": params.get("from_vessel"),
|
||||||
|
"reagent": params.get("reagent"),
|
||||||
|
"solvent": params.get("solvent"),
|
||||||
|
"compound": params.get("compound"),
|
||||||
|
"sources": params.get("sources"),
|
||||||
|
"targets": params.get("targets"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for target_port, resource_name in input_resources.items():
|
||||||
|
if resource_name and resource_name in resource_last_writer:
|
||||||
|
source_node, source_port = resource_last_writer[resource_name].split(":")
|
||||||
|
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
||||||
|
|
||||||
|
output_resources = {
|
||||||
|
"VesselOut": params.get("vessel"),
|
||||||
|
"FromVesselOut": params.get("from_vessel"),
|
||||||
|
"ToVesselOut": params.get("to_vessel"),
|
||||||
|
"FiltrateOut": params.get("filtrate_vessel"),
|
||||||
|
"reagent": params.get("reagent"),
|
||||||
|
"solvent": params.get("solvent"),
|
||||||
|
"compound": params.get("compound"),
|
||||||
|
"sources_out": params.get("sources"),
|
||||||
|
"targets_out": params.get("targets"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for source_port, resource_name in output_resources.items():
|
||||||
|
if resource_name:
|
||||||
|
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
||||||
|
|
||||||
|
return G
|
||||||
|
|
||||||
|
|
||||||
|
def draw_protocol_graph(protocol_graph: SimpleGraph, output_path: str):
|
||||||
|
"""
|
||||||
|
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
||||||
|
"""
|
||||||
|
if not protocol_graph:
|
||||||
|
print("Cannot draw graph: Graph object is empty.")
|
||||||
|
return
|
||||||
|
|
||||||
|
G = nx.DiGraph()
|
||||||
|
|
||||||
|
for node_id, attrs in protocol_graph.nodes.items():
|
||||||
|
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||||
|
G.add_node(node_id, label=label, **attrs)
|
||||||
|
|
||||||
|
for edge in protocol_graph.edges:
|
||||||
|
G.add_edge(edge["source"], edge["target"])
|
||||||
|
|
||||||
|
plt.figure(figsize=(20, 15))
|
||||||
|
try:
|
||||||
|
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
||||||
|
except Exception:
|
||||||
|
pos = nx.shell_layout(G) # Fallback layout
|
||||||
|
|
||||||
|
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
||||||
|
nx.draw(
|
||||||
|
G,
|
||||||
|
pos,
|
||||||
|
with_labels=False,
|
||||||
|
node_size=2500,
|
||||||
|
node_color="skyblue",
|
||||||
|
node_shape="o",
|
||||||
|
edge_color="gray",
|
||||||
|
width=1.5,
|
||||||
|
arrowsize=15,
|
||||||
|
)
|
||||||
|
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
||||||
|
|
||||||
|
plt.title("Chemical Protocol Workflow Graph", size=15)
|
||||||
|
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
||||||
|
plt.close()
|
||||||
|
print(f" - Visualization saved to '{output_path}'")
|
||||||
|
|
||||||
|
|
||||||
|
from networkx.drawing.nx_agraph import to_agraph
|
||||||
|
import re
|
||||||
|
|
||||||
|
COMPASS = {"n","e","s","w","ne","nw","se","sw","c"}
|
||||||
|
|
||||||
|
def _is_compass(port: str) -> bool:
|
||||||
|
return isinstance(port, str) and port.lower() in COMPASS
|
||||||
|
|
||||||
|
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
||||||
|
"""
|
||||||
|
使用 Graphviz 端口语法绘制协议工作流图。
|
||||||
|
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
||||||
|
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
||||||
|
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
||||||
|
"""
|
||||||
|
if not protocol_graph:
|
||||||
|
print("Cannot draw graph: Graph object is empty.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1) 先用 networkx 搭建有向图,保留端口属性
|
||||||
|
G = nx.DiGraph()
|
||||||
|
for node_id, attrs in protocol_graph.nodes.items():
|
||||||
|
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||||
|
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
||||||
|
G.add_node(node_id, _core_label=str(label), **{k:v for k,v in attrs.items() if k not in ("label",)})
|
||||||
|
|
||||||
|
edges_data = []
|
||||||
|
in_ports_by_node = {} # 收集命名输入端口
|
||||||
|
out_ports_by_node = {} # 收集命名输出端口
|
||||||
|
|
||||||
|
for edge in protocol_graph.edges:
|
||||||
|
u = edge["source"]
|
||||||
|
v = edge["target"]
|
||||||
|
sp = edge.get("source_port")
|
||||||
|
tp = edge.get("target_port")
|
||||||
|
|
||||||
|
# 记录到图里(保留原始端口信息)
|
||||||
|
G.add_edge(u, v, source_port=sp, target_port=tp)
|
||||||
|
edges_data.append((u, v, sp, tp))
|
||||||
|
|
||||||
|
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
||||||
|
if sp and not _is_compass(sp):
|
||||||
|
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
||||||
|
if tp and not _is_compass(tp):
|
||||||
|
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
||||||
|
|
||||||
|
# 2) 转为 AGraph,使用 Graphviz 渲染
|
||||||
|
A = to_agraph(G)
|
||||||
|
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
||||||
|
A.node_attr.update(shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica")
|
||||||
|
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
||||||
|
|
||||||
|
# 3) 为需要命名端口的节点设置 record 形状与 label
|
||||||
|
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
||||||
|
for n in A.nodes():
|
||||||
|
node = A.get_node(n)
|
||||||
|
core = G.nodes[n].get("_core_label", n)
|
||||||
|
|
||||||
|
in_ports = sorted(in_ports_by_node.get(n, []))
|
||||||
|
out_ports = sorted(out_ports_by_node.get(n, []))
|
||||||
|
|
||||||
|
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
||||||
|
if in_ports or out_ports:
|
||||||
|
def port_fields(ports):
|
||||||
|
if not ports:
|
||||||
|
return " " # 必须留一个空槽占位
|
||||||
|
# 每个端口一个小格子,<p> name
|
||||||
|
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
||||||
|
|
||||||
|
left = port_fields(in_ports)
|
||||||
|
right = port_fields(out_ports)
|
||||||
|
|
||||||
|
# 三栏:左(入) | 中(节点名) | 右(出)
|
||||||
|
record_label = f"{{ {left} | {core} | {right} }}"
|
||||||
|
node.attr.update(shape="record", label=record_label)
|
||||||
|
else:
|
||||||
|
# 没有命名端口:普通盒子,显示核心标签
|
||||||
|
node.attr.update(label=str(core))
|
||||||
|
|
||||||
|
# 4) 给边设置 headport / tailport
|
||||||
|
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
||||||
|
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
||||||
|
for (u, v, sp, tp) in edges_data:
|
||||||
|
e = A.get_edge(u, v)
|
||||||
|
|
||||||
|
# Graphviz 属性:tail 是源,head 是目标
|
||||||
|
if sp:
|
||||||
|
if _is_compass(sp):
|
||||||
|
e.attr["tailport"] = sp.lower()
|
||||||
|
else:
|
||||||
|
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
||||||
|
e.attr["tailport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(sp))
|
||||||
|
|
||||||
|
if tp:
|
||||||
|
if _is_compass(tp):
|
||||||
|
e.attr["headport"] = tp.lower()
|
||||||
|
else:
|
||||||
|
e.attr["headport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(tp))
|
||||||
|
|
||||||
|
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
||||||
|
# e.attr["arrowhead"] = "vee"
|
||||||
|
|
||||||
|
# 5) 输出
|
||||||
|
A.draw(output_path, prog="dot")
|
||||||
|
print(f" - Port-aware workflow rendered to '{output_path}'")
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
||||||
|
"""展平嵌套的XDL程序结构"""
|
||||||
|
flattened_operations = []
|
||||||
|
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||||
|
|
||||||
|
def extract_operations(element: ET.Element):
|
||||||
|
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
||||||
|
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
||||||
|
flattened_operations.append(element)
|
||||||
|
|
||||||
|
for child in element:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
for child in procedure_elem:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
return flattened_operations
|
||||||
|
|
||||||
|
|
||||||
|
def parse_xdl_content(xdl_content: str) -> tuple:
|
||||||
|
"""解析XDL内容"""
|
||||||
|
try:
|
||||||
|
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
||||||
|
root = ET.fromstring(xdl_content_cleaned)
|
||||||
|
|
||||||
|
synthesis_elem = root.find("Synthesis")
|
||||||
|
if synthesis_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
# 解析硬件组件
|
||||||
|
hardware_elem = synthesis_elem.find("Hardware")
|
||||||
|
hardware = []
|
||||||
|
if hardware_elem is not None:
|
||||||
|
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
||||||
|
|
||||||
|
# 解析试剂
|
||||||
|
reagents_elem = synthesis_elem.find("Reagents")
|
||||||
|
reagents = []
|
||||||
|
if reagents_elem is not None:
|
||||||
|
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
||||||
|
|
||||||
|
# 解析程序
|
||||||
|
procedure_elem = synthesis_elem.find("Procedure")
|
||||||
|
if procedure_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
||||||
|
return hardware, reagents, flattened_operations
|
||||||
|
|
||||||
|
except ET.ParseError as e:
|
||||||
|
raise ValueError(f"Invalid XDL format: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
将XDL XML格式转换为标准的字典格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xdl_content: XDL XML内容
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
转换结果,包含步骤和器材信息
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
||||||
|
if hardware is None:
|
||||||
|
return {"error": "Failed to parse XDL content", "success": False}
|
||||||
|
|
||||||
|
# 将XDL元素转换为字典格式
|
||||||
|
steps_data = []
|
||||||
|
for elem in flattened_operations:
|
||||||
|
# 转换参数类型
|
||||||
|
parameters = {}
|
||||||
|
for key, val in elem.attrib.items():
|
||||||
|
converted_val = convert_to_type(val)
|
||||||
|
if converted_val is not None:
|
||||||
|
parameters[key] = converted_val
|
||||||
|
|
||||||
|
step_dict = {
|
||||||
|
"operation": elem.tag,
|
||||||
|
"parameters": parameters,
|
||||||
|
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
||||||
|
}
|
||||||
|
steps_data.append(step_dict)
|
||||||
|
|
||||||
|
# 合并硬件和试剂为统一的labware_info格式
|
||||||
|
labware_data = []
|
||||||
|
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
||||||
|
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"steps": steps_data,
|
||||||
|
"labware": labware_data,
|
||||||
|
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"XDL conversion failed: {str(e)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
return {"error": error_msg, "success": False}
|
||||||
|
|
||||||
|
|
||||||
def create_workflow(
|
def create_workflow(
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -4,7 +4,7 @@ package_name = 'unilabos'
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='0.10.15',
|
version='0.10.14',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=['setuptools'],
|
install_requires=['setuptools'],
|
||||||
|
|||||||
@@ -2,8 +2,9 @@ import pytest
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from pylabrobot.resources import Resource as ResourcePLR
|
||||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = "0.10.15"
|
__version__ = "0.10.14"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -19,12 +19,6 @@ if unilabos_dir not in sys.path:
|
|||||||
|
|
||||||
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
||||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||||
from unilabos.app.utils import cleanup_for_restart
|
|
||||||
|
|
||||||
# Global restart flags (used by ws_client and web/server)
|
|
||||||
_restart_requested: bool = False
|
|
||||||
_restart_reason: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
def load_config_from_file(config_path):
|
def load_config_from_file(config_path):
|
||||||
if config_path is None:
|
if config_path is None:
|
||||||
@@ -47,7 +41,7 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
|
|||||||
for i, arg in enumerate(sys.argv):
|
for i, arg in enumerate(sys.argv):
|
||||||
for option_string in option_strings:
|
for option_string in option_strings:
|
||||||
if arg.startswith(option_string):
|
if arg.startswith(option_string):
|
||||||
new_arg = arg[:2] + arg[2 : len(option_string)].replace("-", "_") + arg[len(option_string) :]
|
new_arg = arg[:2] + arg[2:len(option_string)].replace("-", "_") + arg[len(option_string):]
|
||||||
sys.argv[i] = new_arg
|
sys.argv[i] = new_arg
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -55,8 +49,6 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
|
|||||||
def parse_args():
|
def parse_args():
|
||||||
"""解析命令行参数"""
|
"""解析命令行参数"""
|
||||||
parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.")
|
parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.")
|
||||||
subparsers = parser.add_subparsers(title="Valid subcommands", dest="command")
|
|
||||||
|
|
||||||
parser.add_argument("-g", "--graph", help="Physical setup graph file path.")
|
parser.add_argument("-g", "--graph", help="Physical setup graph file path.")
|
||||||
parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.")
|
parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -161,44 +153,6 @@ def parse_args():
|
|||||||
default=False,
|
default=False,
|
||||||
help="Complete registry information",
|
help="Complete registry information",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--no_update_feedback",
|
|
||||||
action="store_true",
|
|
||||||
help="Disable sending update feedback to server",
|
|
||||||
)
|
|
||||||
# workflow upload subcommand
|
|
||||||
workflow_parser = subparsers.add_parser(
|
|
||||||
"workflow_upload",
|
|
||||||
aliases=["wf"],
|
|
||||||
help="Upload workflow from xdl/json/python files",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"-f",
|
|
||||||
"--workflow_file",
|
|
||||||
type=str,
|
|
||||||
required=True,
|
|
||||||
help="Path to the workflow file (JSON format)",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"-n",
|
|
||||||
"--workflow_name",
|
|
||||||
type=str,
|
|
||||||
default=None,
|
|
||||||
help="Workflow name, if not provided will use the name from file or filename",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"--tags",
|
|
||||||
type=str,
|
|
||||||
nargs="*",
|
|
||||||
default=[],
|
|
||||||
help="Tags for the workflow (space-separated)",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"--published",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Whether to publish the workflow (default: False)",
|
|
||||||
)
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
@@ -214,6 +168,7 @@ def main():
|
|||||||
if not args_dict.get("skip_env_check", False):
|
if not args_dict.get("skip_env_check", False):
|
||||||
from unilabos.utils.environment_check import check_environment
|
from unilabos.utils.environment_check import check_environment
|
||||||
|
|
||||||
|
print_status("正在进行环境依赖检查...", "info")
|
||||||
if not check_environment(auto_install=True):
|
if not check_environment(auto_install=True):
|
||||||
print_status("环境检查失败,程序退出", "error")
|
print_status("环境检查失败,程序退出", "error")
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
@@ -286,12 +241,9 @@ def main():
|
|||||||
if args_dict.get("sk", ""):
|
if args_dict.get("sk", ""):
|
||||||
BasicConfig.sk = args_dict.get("sk", "")
|
BasicConfig.sk = args_dict.get("sk", "")
|
||||||
print_status("传入了sk参数,优先采用传入参数!", "info")
|
print_status("传入了sk参数,优先采用传入参数!", "info")
|
||||||
BasicConfig.working_dir = working_dir
|
|
||||||
|
|
||||||
workflow_upload = args_dict.get("command") in ("workflow_upload", "wf")
|
|
||||||
|
|
||||||
# 使用远程资源启动
|
# 使用远程资源启动
|
||||||
if not workflow_upload and args_dict["use_remote_resource"]:
|
if args_dict["use_remote_resource"]:
|
||||||
print_status("使用远程资源启动", "info")
|
print_status("使用远程资源启动", "info")
|
||||||
from unilabos.app.web import http_client
|
from unilabos.app.web import http_client
|
||||||
|
|
||||||
@@ -304,10 +256,10 @@ def main():
|
|||||||
|
|
||||||
BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port
|
BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port
|
||||||
BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser
|
BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser
|
||||||
|
BasicConfig.working_dir = working_dir
|
||||||
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
||||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||||
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
||||||
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
|
||||||
BasicConfig.communication_protocol = "websocket"
|
BasicConfig.communication_protocol = "websocket"
|
||||||
machine_name = os.popen("hostname").read().strip()
|
machine_name = os.popen("hostname").read().strip()
|
||||||
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
||||||
@@ -326,38 +278,16 @@ def main():
|
|||||||
from unilabos.app.web import start_server
|
from unilabos.app.web import start_server
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.resources.graphio import modify_to_backend_format
|
from unilabos.resources.graphio import modify_to_backend_format
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDict
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDict
|
||||||
|
|
||||||
# 显示启动横幅
|
# 显示启动横幅
|
||||||
print_unilab_banner(args_dict)
|
print_unilab_banner(args_dict)
|
||||||
|
|
||||||
# 注册表
|
# 注册表
|
||||||
lab_registry = build_registry(
|
lab_registry = build_registry(
|
||||||
args_dict["registry_path"], args_dict.get("complete_registry", False), BasicConfig.upload_registry
|
args_dict["registry_path"], args_dict.get("complete_registry", False), args_dict["upload_registry"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if BasicConfig.upload_registry:
|
|
||||||
# 设备注册到服务端 - 需要 ak 和 sk
|
|
||||||
if BasicConfig.ak and BasicConfig.sk:
|
|
||||||
print_status("开始注册设备到服务端...", "info")
|
|
||||||
try:
|
|
||||||
register_devices_and_resources(lab_registry)
|
|
||||||
print_status("设备注册完成", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"设备注册失败: {e}", "error")
|
|
||||||
else:
|
|
||||||
print_status("未提供 ak 和 sk,跳过设备注册", "info")
|
|
||||||
else:
|
|
||||||
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
|
|
||||||
|
|
||||||
# 处理 workflow_upload 子命令
|
|
||||||
if workflow_upload:
|
|
||||||
from unilabos.workflow.wf_utils import handle_workflow_upload_command
|
|
||||||
|
|
||||||
handle_workflow_upload_command(args_dict)
|
|
||||||
print_status("工作流上传完成,程序退出", "info")
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
if not BasicConfig.ak or not BasicConfig.sk:
|
if not BasicConfig.ak or not BasicConfig.sk:
|
||||||
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
@@ -438,6 +368,20 @@ def main():
|
|||||||
args_dict["devices_config"] = resource_tree_set
|
args_dict["devices_config"] = resource_tree_set
|
||||||
args_dict["graph"] = graph_res.physical_setup_graph
|
args_dict["graph"] = graph_res.physical_setup_graph
|
||||||
|
|
||||||
|
if BasicConfig.upload_registry:
|
||||||
|
# 设备注册到服务端 - 需要 ak 和 sk
|
||||||
|
if BasicConfig.ak and BasicConfig.sk:
|
||||||
|
print_status("开始注册设备到服务端...", "info")
|
||||||
|
try:
|
||||||
|
register_devices_and_resources(lab_registry)
|
||||||
|
print_status("设备注册完成", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"设备注册失败: {e}", "error")
|
||||||
|
else:
|
||||||
|
print_status("未提供 ak 和 sk,跳过设备注册", "info")
|
||||||
|
else:
|
||||||
|
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
|
||||||
|
|
||||||
if args_dict["controllers"] is not None:
|
if args_dict["controllers"] is not None:
|
||||||
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
|
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
|
||||||
else:
|
else:
|
||||||
@@ -452,7 +396,6 @@ def main():
|
|||||||
comm_client = get_communication_client()
|
comm_client = get_communication_client()
|
||||||
if "websocket" in args_dict["app_bridges"]:
|
if "websocket" in args_dict["app_bridges"]:
|
||||||
args_dict["bridges"].append(comm_client)
|
args_dict["bridges"].append(comm_client)
|
||||||
|
|
||||||
def _exit(signum, frame):
|
def _exit(signum, frame):
|
||||||
comm_client.stop()
|
comm_client.stop()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
@@ -494,13 +437,16 @@ def main():
|
|||||||
resource_visualization.start()
|
resource_visualization.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if "AMENT_PREFIX_PATH" in str(e):
|
if "AMENT_PREFIX_PATH" in str(e):
|
||||||
print_status(f"ROS 2环境未正确设置,跳过3D可视化启动。错误详情: {e}", "warning")
|
print_status(
|
||||||
|
f"ROS 2环境未正确设置,跳过3D可视化启动。错误详情: {e}",
|
||||||
|
"warning"
|
||||||
|
)
|
||||||
print_status(
|
print_status(
|
||||||
"建议解决方案:\n"
|
"建议解决方案:\n"
|
||||||
"1. 激活Conda环境: conda activate unilab\n"
|
"1. 激活Conda环境: conda activate unilab\n"
|
||||||
"2. 或使用 --backend simple 参数\n"
|
"2. 或使用 --backend simple 参数\n"
|
||||||
"3. 或使用 --visual disable 参数禁用可视化",
|
"3. 或使用 --visual disable 参数禁用可视化",
|
||||||
"info",
|
"info"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
@@ -508,19 +454,13 @@ def main():
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
restart_requested = start_server(
|
start_server(
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
if restart_requested:
|
|
||||||
print_status("[Main] Restart requested, cleaning up...", "info")
|
|
||||||
cleanup_for_restart()
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
|
start_server(
|
||||||
# 启动服务器(默认支持WebSocket触发重启)
|
|
||||||
restart_requested = start_server(
|
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
"""
|
|
||||||
UniLabOS 应用工具函数
|
|
||||||
|
|
||||||
提供清理、重启等工具函数
|
|
||||||
"""
|
|
||||||
|
|
||||||
import gc
|
|
||||||
import os
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
from unilabos.utils.banner_print import print_status
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_for_restart() -> bool:
|
|
||||||
"""
|
|
||||||
Clean up all resources for restart without exiting the process.
|
|
||||||
|
|
||||||
This function prepares the system for re-initialization by:
|
|
||||||
1. Stopping all communication clients
|
|
||||||
2. Destroying ROS nodes
|
|
||||||
3. Resetting singletons
|
|
||||||
4. Waiting for threads to finish
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if cleanup was successful, False otherwise
|
|
||||||
"""
|
|
||||||
print_status("[Restart] Starting cleanup for restart...", "info")
|
|
||||||
|
|
||||||
# Step 1: Stop WebSocket communication client
|
|
||||||
print_status("[Restart] Step 1: Stopping WebSocket client...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.app.communication import get_communication_client
|
|
||||||
|
|
||||||
comm_client = get_communication_client()
|
|
||||||
if comm_client is not None:
|
|
||||||
comm_client.stop()
|
|
||||||
print_status("[Restart] WebSocket client stopped", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error stopping WebSocket: {e}", "warning")
|
|
||||||
|
|
||||||
# Step 2: Get HostNode and cleanup ROS
|
|
||||||
print_status("[Restart] Step 2: Cleaning up ROS nodes...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.ros.nodes.presets.host_node import HostNode
|
|
||||||
import rclpy
|
|
||||||
from rclpy.timer import Timer
|
|
||||||
|
|
||||||
host_instance = HostNode.get_instance(timeout=5)
|
|
||||||
if host_instance is not None:
|
|
||||||
print_status(f"[Restart] Found HostNode: {host_instance.device_id}", "info")
|
|
||||||
|
|
||||||
# Gracefully shutdown background threads
|
|
||||||
print_status("[Restart] Shutting down background threads...", "info")
|
|
||||||
HostNode.shutdown_background_threads(timeout=5.0)
|
|
||||||
print_status("[Restart] Background threads shutdown complete", "info")
|
|
||||||
|
|
||||||
# Stop discovery timer
|
|
||||||
if hasattr(host_instance, "_discovery_timer") and isinstance(host_instance._discovery_timer, Timer):
|
|
||||||
host_instance._discovery_timer.cancel()
|
|
||||||
print_status("[Restart] Discovery timer cancelled", "info")
|
|
||||||
|
|
||||||
# Destroy device nodes
|
|
||||||
device_count = len(host_instance.devices_instances)
|
|
||||||
print_status(f"[Restart] Destroying {device_count} device instances...", "info")
|
|
||||||
for device_id, device_node in list(host_instance.devices_instances.items()):
|
|
||||||
try:
|
|
||||||
if hasattr(device_node, "ros_node_instance") and device_node.ros_node_instance is not None:
|
|
||||||
device_node.ros_node_instance.destroy_node()
|
|
||||||
print_status(f"[Restart] Device {device_id} destroyed", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error destroying device {device_id}: {e}", "warning")
|
|
||||||
|
|
||||||
# Clear devices instances
|
|
||||||
host_instance.devices_instances.clear()
|
|
||||||
host_instance.devices_names.clear()
|
|
||||||
|
|
||||||
# Destroy host node
|
|
||||||
try:
|
|
||||||
host_instance.destroy_node()
|
|
||||||
print_status("[Restart] HostNode destroyed", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error destroying HostNode: {e}", "warning")
|
|
||||||
|
|
||||||
# Reset HostNode state
|
|
||||||
HostNode.reset_state()
|
|
||||||
print_status("[Restart] HostNode state reset", "info")
|
|
||||||
|
|
||||||
# Shutdown executor first (to stop executor.spin() gracefully)
|
|
||||||
if hasattr(rclpy, "__executor") and rclpy.__executor is not None:
|
|
||||||
try:
|
|
||||||
rclpy.__executor.shutdown()
|
|
||||||
rclpy.__executor = None # Clear for restart
|
|
||||||
print_status("[Restart] ROS executor shutdown complete", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error shutting down executor: {e}", "warning")
|
|
||||||
|
|
||||||
# Shutdown rclpy
|
|
||||||
if rclpy.ok():
|
|
||||||
rclpy.shutdown()
|
|
||||||
print_status("[Restart] rclpy shutdown complete", "info")
|
|
||||||
|
|
||||||
except ImportError as e:
|
|
||||||
print_status(f"[Restart] ROS modules not available: {e}", "warning")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error in ROS cleanup: {e}", "warning")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Step 3: Reset communication client singleton
|
|
||||||
print_status("[Restart] Step 3: Resetting singletons...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.app import communication
|
|
||||||
|
|
||||||
if hasattr(communication, "_communication_client"):
|
|
||||||
communication._communication_client = None
|
|
||||||
print_status("[Restart] Communication client singleton reset", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error resetting communication singleton: {e}", "warning")
|
|
||||||
|
|
||||||
# Step 4: Wait for threads to finish
|
|
||||||
print_status("[Restart] Step 4: Waiting for threads to finish...", "info")
|
|
||||||
time.sleep(3) # Give threads time to finish
|
|
||||||
|
|
||||||
# Check remaining threads
|
|
||||||
remaining_threads = []
|
|
||||||
for t in threading.enumerate():
|
|
||||||
if t.name != "MainThread" and t.is_alive():
|
|
||||||
remaining_threads.append(t.name)
|
|
||||||
|
|
||||||
if remaining_threads:
|
|
||||||
print_status(
|
|
||||||
f"[Restart] Warning: {len(remaining_threads)} threads still running: {remaining_threads}", "warning"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
print_status("[Restart] All threads stopped", "info")
|
|
||||||
|
|
||||||
# Step 5: Force garbage collection
|
|
||||||
print_status("[Restart] Step 5: Running garbage collection...", "info")
|
|
||||||
gc.collect()
|
|
||||||
gc.collect() # Run twice for weak references
|
|
||||||
print_status("[Restart] Garbage collection complete", "info")
|
|
||||||
|
|
||||||
print_status("[Restart] Cleanup complete. Ready for re-initialization.", "info")
|
|
||||||
return True
|
|
||||||
@@ -6,10 +6,12 @@ HTTP客户端模块
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
from threading import Thread
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils.log import info
|
from unilabos.utils.log import info
|
||||||
from unilabos.config.config import HTTPConfig, BasicConfig
|
from unilabos.config.config import HTTPConfig, BasicConfig
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
@@ -74,8 +76,7 @@ class HTTPClient:
|
|||||||
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
||||||
"""
|
"""
|
||||||
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
||||||
payload = {"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}
|
f.write(json.dumps({"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}, indent=4))
|
||||||
f.write(json.dumps(payload, indent=4))
|
|
||||||
# 从序列化数据中提取所有节点的UUID(保存旧UUID)
|
# 从序列化数据中提取所有节点的UUID(保存旧UUID)
|
||||||
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
|
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
|
||||||
if not self.initialized or first_add:
|
if not self.initialized or first_add:
|
||||||
@@ -334,67 +335,6 @@ class HTTPClient:
|
|||||||
logger.error(f"响应内容: {response.text}")
|
logger.error(f"响应内容: {response.text}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def workflow_import(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
workflow_uuid: str,
|
|
||||||
workflow_name: str,
|
|
||||||
nodes: List[Dict[str, Any]],
|
|
||||||
edges: List[Dict[str, Any]],
|
|
||||||
tags: Optional[List[str]] = None,
|
|
||||||
published: bool = False,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
导入工作流到服务器
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: 工作流名称(顶层)
|
|
||||||
workflow_uuid: 工作流UUID
|
|
||||||
workflow_name: 工作流名称(data内部)
|
|
||||||
nodes: 工作流节点列表
|
|
||||||
edges: 工作流边列表
|
|
||||||
tags: 工作流标签列表,默认为空列表
|
|
||||||
published: 是否发布工作流,默认为False
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: API响应数据,包含 code 和 data (uuid, name)
|
|
||||||
"""
|
|
||||||
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
|
|
||||||
payload = {
|
|
||||||
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
|
|
||||||
"name": name,
|
|
||||||
"data": {
|
|
||||||
"workflow_uuid": workflow_uuid,
|
|
||||||
"workflow_name": workflow_name,
|
|
||||||
"nodes": nodes,
|
|
||||||
"edges": edges,
|
|
||||||
"tags": tags if tags is not None else [],
|
|
||||||
"published": published,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
# 保存请求到文件
|
|
||||||
with open(os.path.join(BasicConfig.working_dir, "req_workflow_upload.json"), "w", encoding="utf-8") as f:
|
|
||||||
f.write(json.dumps(payload, indent=4, ensure_ascii=False))
|
|
||||||
|
|
||||||
response = requests.post(
|
|
||||||
f"{self.remote_addr}/lab/workflow/owner/import",
|
|
||||||
json=payload,
|
|
||||||
headers={"Authorization": f"Lab {self.auth}"},
|
|
||||||
timeout=60,
|
|
||||||
)
|
|
||||||
# 保存响应到文件
|
|
||||||
with open(os.path.join(BasicConfig.working_dir, "res_workflow_upload.json"), "w", encoding="utf-8") as f:
|
|
||||||
f.write(f"{response.status_code}" + "\n" + response.text)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
res = response.json()
|
|
||||||
if "code" in res and res["code"] != 0:
|
|
||||||
logger.error(f"导入工作流失败: {response.text}")
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
|
|
||||||
return {"code": response.status_code, "message": response.text}
|
|
||||||
|
|
||||||
|
|
||||||
# 创建默认客户端实例
|
# 创建默认客户端实例
|
||||||
http_client = HTTPClient()
|
http_client = HTTPClient()
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ Web服务器模块
|
|||||||
|
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from starlette.responses import Response
|
from starlette.responses import Response
|
||||||
@@ -95,7 +96,7 @@ def setup_server() -> FastAPI:
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> bool:
|
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> None:
|
||||||
"""
|
"""
|
||||||
启动服务器
|
启动服务器
|
||||||
|
|
||||||
@@ -103,14 +104,7 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
host: 服务器主机
|
host: 服务器主机
|
||||||
port: 服务器端口
|
port: 服务器端口
|
||||||
open_browser: 是否自动打开浏览器
|
open_browser: 是否自动打开浏览器
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if restart was requested, False otherwise
|
|
||||||
"""
|
"""
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from uvicorn import Config, Server
|
|
||||||
|
|
||||||
# 设置服务器
|
# 设置服务器
|
||||||
setup_server()
|
setup_server()
|
||||||
|
|
||||||
@@ -129,37 +123,7 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
|
|
||||||
# 启动服务器
|
# 启动服务器
|
||||||
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
||||||
|
uvicorn.run(app, host=host, port=port, log_config=log_config)
|
||||||
# 使用支持重启的模式
|
|
||||||
config = Config(app=app, host=host, port=port, log_config=log_config)
|
|
||||||
server = Server(config)
|
|
||||||
|
|
||||||
# 启动服务器线程
|
|
||||||
server_thread = threading.Thread(target=server.run, daemon=True, name="uvicorn_server")
|
|
||||||
server_thread.start()
|
|
||||||
|
|
||||||
info("[Web] Server started, monitoring for restart requests...")
|
|
||||||
|
|
||||||
# 监控重启标志
|
|
||||||
import unilabos.app.main as main_module
|
|
||||||
|
|
||||||
while server_thread.is_alive():
|
|
||||||
if hasattr(main_module, "_restart_requested") and main_module._restart_requested:
|
|
||||||
info(
|
|
||||||
f"[Web] Restart requested via WebSocket, reason: {getattr(main_module, '_restart_reason', 'unknown')}"
|
|
||||||
)
|
|
||||||
main_module._restart_requested = False
|
|
||||||
|
|
||||||
# 停止服务器
|
|
||||||
server.should_exit = True
|
|
||||||
server_thread.join(timeout=5)
|
|
||||||
|
|
||||||
info("[Web] Server stopped, ready for restart")
|
|
||||||
return True
|
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# 当脚本直接运行时启动服务器
|
# 当脚本直接运行时启动服务器
|
||||||
|
|||||||
@@ -359,7 +359,7 @@ class MessageProcessor:
|
|||||||
self.device_manager = device_manager
|
self.device_manager = device_manager
|
||||||
self.queue_processor = None # 延迟设置
|
self.queue_processor = None # 延迟设置
|
||||||
self.websocket_client = None # 延迟设置
|
self.websocket_client = None # 延迟设置
|
||||||
self.session_id = str(uuid.uuid4())[:6] # 产生一个随机的session_id
|
self.session_id = ""
|
||||||
|
|
||||||
# WebSocket连接
|
# WebSocket连接
|
||||||
self.websocket = None
|
self.websocket = None
|
||||||
@@ -438,7 +438,7 @@ class MessageProcessor:
|
|||||||
self.connected = True
|
self.connected = True
|
||||||
self.reconnect_count = 0
|
self.reconnect_count = 0
|
||||||
|
|
||||||
logger.trace(f"[MessageProcessor] Connected to {self.websocket_url}")
|
logger.info(f"[MessageProcessor] Connected to {self.websocket_url}")
|
||||||
|
|
||||||
# 启动发送协程
|
# 启动发送协程
|
||||||
send_task = asyncio.create_task(self._send_handler())
|
send_task = asyncio.create_task(self._send_handler())
|
||||||
@@ -488,16 +488,7 @@ class MessageProcessor:
|
|||||||
async for message in self.websocket:
|
async for message in self.websocket:
|
||||||
try:
|
try:
|
||||||
data = json.loads(message)
|
data = json.loads(message)
|
||||||
message_type = data.get("action", "")
|
await self._process_message(data)
|
||||||
message_data = data.get("data")
|
|
||||||
if self.session_id and self.session_id == data.get("edge_session"):
|
|
||||||
await self._process_message(message_type, message_data)
|
|
||||||
else:
|
|
||||||
if message_type.endswith("_material"):
|
|
||||||
logger.trace(f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息:{data}")
|
|
||||||
logger.debug(f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}")
|
|
||||||
else:
|
|
||||||
await self._process_message(message_type, message_data)
|
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -512,7 +503,7 @@ class MessageProcessor:
|
|||||||
|
|
||||||
async def _send_handler(self):
|
async def _send_handler(self):
|
||||||
"""处理发送队列中的消息"""
|
"""处理发送队列中的消息"""
|
||||||
logger.trace("[MessageProcessor] Send handler started")
|
logger.debug("[MessageProcessor] Send handler started")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while self.connected and self.websocket:
|
while self.connected and self.websocket:
|
||||||
@@ -563,8 +554,11 @@ class MessageProcessor:
|
|||||||
finally:
|
finally:
|
||||||
logger.debug("[MessageProcessor] Send handler stopped")
|
logger.debug("[MessageProcessor] Send handler stopped")
|
||||||
|
|
||||||
async def _process_message(self, message_type: str, message_data: Dict[str, Any]):
|
async def _process_message(self, data: Dict[str, Any]):
|
||||||
"""处理收到的消息"""
|
"""处理收到的消息"""
|
||||||
|
message_type = data.get("action", "")
|
||||||
|
message_data = data.get("data")
|
||||||
|
|
||||||
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -577,19 +571,16 @@ class MessageProcessor:
|
|||||||
elif message_type == "cancel_action" or message_type == "cancel_task":
|
elif message_type == "cancel_action" or message_type == "cancel_task":
|
||||||
await self._handle_cancel_action(message_data)
|
await self._handle_cancel_action(message_data)
|
||||||
elif message_type == "add_material":
|
elif message_type == "add_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "add")
|
await self._handle_resource_tree_update(message_data, "add")
|
||||||
elif message_type == "update_material":
|
elif message_type == "update_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "update")
|
await self._handle_resource_tree_update(message_data, "update")
|
||||||
elif message_type == "remove_material":
|
elif message_type == "remove_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "remove")
|
await self._handle_resource_tree_update(message_data, "remove")
|
||||||
# elif message_type == "session_id":
|
elif message_type == "session_id":
|
||||||
# self.session_id = message_data.get("session_id")
|
self.session_id = message_data.get("session_id")
|
||||||
# logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
||||||
elif message_type == "request_restart":
|
elif message_type == "request_reload":
|
||||||
await self._handle_request_restart(message_data)
|
await self._handle_request_reload(message_data)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
||||||
|
|
||||||
@@ -864,7 +855,6 @@ class MessageProcessor:
|
|||||||
device_action_groups[key].append(item["uuid"])
|
device_action_groups[key].append(item["uuid"])
|
||||||
|
|
||||||
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
||||||
logger.trace(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, {resource_uuid_list}")
|
|
||||||
|
|
||||||
# 为每个(device_id, action)创建独立的更新线程
|
# 为每个(device_id, action)创建独立的更新线程
|
||||||
for (device_id, actual_action), items in device_action_groups.items():
|
for (device_id, actual_action), items in device_action_groups.items():
|
||||||
@@ -900,48 +890,19 @@ class MessageProcessor:
|
|||||||
)
|
)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
async def _handle_request_restart(self, data: Dict[str, Any]):
|
async def _handle_request_reload(self, data: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
处理重启请求
|
处理重载请求
|
||||||
|
|
||||||
当LabGo发送request_restart时,执行清理并触发重启
|
当LabGo发送request_reload时,重新发送设备注册信息
|
||||||
"""
|
"""
|
||||||
reason = data.get("reason", "unknown")
|
reason = data.get("reason", "unknown")
|
||||||
delay = data.get("delay", 2) # 默认延迟2秒
|
logger.info(f"[MessageProcessor] Received reload request, reason: {reason}")
|
||||||
logger.info(f"[MessageProcessor] Received restart request, reason: {reason}, delay: {delay}s")
|
|
||||||
|
|
||||||
# 发送确认消息
|
# 重新发送host_node_ready信息
|
||||||
if self.websocket_client:
|
if self.websocket_client:
|
||||||
await self.websocket_client.send_message({
|
self.websocket_client.publish_host_ready()
|
||||||
"action": "restart_acknowledged",
|
logger.info("[MessageProcessor] Re-sent host_node_ready after reload request")
|
||||||
"data": {"reason": reason, "delay": delay}
|
|
||||||
})
|
|
||||||
|
|
||||||
# 设置全局重启标志
|
|
||||||
import unilabos.app.main as main_module
|
|
||||||
main_module._restart_requested = True
|
|
||||||
main_module._restart_reason = reason
|
|
||||||
|
|
||||||
# 延迟后执行清理
|
|
||||||
await asyncio.sleep(delay)
|
|
||||||
|
|
||||||
# 在新线程中执行清理,避免阻塞当前事件循环
|
|
||||||
def do_cleanup():
|
|
||||||
import time
|
|
||||||
time.sleep(0.5) # 给当前消息处理完成的时间
|
|
||||||
logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}")
|
|
||||||
try:
|
|
||||||
from unilabos.app.utils import cleanup_for_restart
|
|
||||||
if cleanup_for_restart():
|
|
||||||
logger.info("[MessageProcessor] Cleanup successful, main() will restart")
|
|
||||||
else:
|
|
||||||
logger.error("[MessageProcessor] Cleanup failed")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"[MessageProcessor] Error during cleanup: {e}")
|
|
||||||
|
|
||||||
cleanup_thread = threading.Thread(target=do_cleanup, name="RestartCleanupThread", daemon=True)
|
|
||||||
cleanup_thread.start()
|
|
||||||
logger.info(f"[MessageProcessor] Restart cleanup scheduled")
|
|
||||||
|
|
||||||
async def _send_action_state_response(
|
async def _send_action_state_response(
|
||||||
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
||||||
@@ -1020,7 +981,7 @@ class QueueProcessor:
|
|||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""运行队列处理主循环"""
|
"""运行队列处理主循环"""
|
||||||
logger.trace("[QueueProcessor] Queue processor started")
|
logger.debug("[QueueProcessor] Queue processor started")
|
||||||
|
|
||||||
while self.is_running:
|
while self.is_running:
|
||||||
try:
|
try:
|
||||||
@@ -1230,6 +1191,7 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
else:
|
else:
|
||||||
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
||||||
|
|
||||||
|
logger.debug(f"[WebSocketClient] URL: {url}")
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
@@ -1242,11 +1204,13 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
logger.error("[WebSocketClient] WebSocket URL not configured")
|
logger.error("[WebSocketClient] WebSocket URL not configured")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.info(f"[WebSocketClient] Starting connection to {self.websocket_url}")
|
||||||
|
|
||||||
# 启动两个核心线程
|
# 启动两个核心线程
|
||||||
self.message_processor.start()
|
self.message_processor.start()
|
||||||
self.queue_processor.start()
|
self.queue_processor.start()
|
||||||
|
|
||||||
logger.trace("[WebSocketClient] All threads started")
|
logger.info("[WebSocketClient] All threads started")
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
"""停止WebSocket客户端"""
|
"""停止WebSocket客户端"""
|
||||||
|
|||||||
@@ -16,14 +16,12 @@ class BasicConfig:
|
|||||||
upload_registry = False
|
upload_registry = False
|
||||||
machine_name = "undefined"
|
machine_name = "undefined"
|
||||||
vis_2d_enable = False
|
vis_2d_enable = False
|
||||||
no_update_feedback = False
|
|
||||||
enable_resource_load = True
|
enable_resource_load = True
|
||||||
communication_protocol = "websocket"
|
communication_protocol = "websocket"
|
||||||
startup_json_path = None # 填写绝对路径
|
startup_json_path = None # 填写绝对路径
|
||||||
disable_browser = False # 禁止浏览器自动打开
|
disable_browser = False # 禁止浏览器自动打开
|
||||||
port = 8002 # 本地HTTP服务
|
port = 8002 # 本地HTTP服务
|
||||||
# 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
log_level: Literal['TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] = "DEBUG" # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
||||||
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def auth_secret(cls):
|
def auth_secret(cls):
|
||||||
@@ -67,14 +65,13 @@ def _update_config_from_module(module):
|
|||||||
if not attr.startswith("_"):
|
if not attr.startswith("_"):
|
||||||
setattr(obj, attr, getattr(getattr(module, name), attr))
|
setattr(obj, attr, getattr(getattr(module, name), attr))
|
||||||
|
|
||||||
|
|
||||||
def _update_config_from_env():
|
def _update_config_from_env():
|
||||||
prefix = "UNILABOS_"
|
prefix = "UNILABOS_"
|
||||||
for env_key, env_value in os.environ.items():
|
for env_key, env_value in os.environ.items():
|
||||||
if not env_key.startswith(prefix):
|
if not env_key.startswith(prefix):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
key_path = env_key[len(prefix) :] # Remove UNILAB_ prefix
|
key_path = env_key[len(prefix):] # Remove UNILAB_ prefix
|
||||||
class_field = key_path.upper().split("_", 1)
|
class_field = key_path.upper().split("_", 1)
|
||||||
if len(class_field) != 2:
|
if len(class_field) != 2:
|
||||||
logger.warning(f"[ENV] 环境变量格式不正确:{env_key}")
|
logger.warning(f"[ENV] 环境变量格式不正确:{env_key}")
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Coin Cell Assembly Workstation
|
|||||||
"""
|
"""
|
||||||
from typing import Dict, Any, List, Optional, Union
|
from typing import Dict, Any, List, Optional, Union
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
||||||
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
||||||
from unilabos.device_comms.workstation_communication import (
|
from unilabos.device_comms.workstation_communication import (
|
||||||
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
||||||
@@ -61,7 +61,7 @@ class CoinCellAssemblyWorkstation(WorkstationBase):
|
|||||||
|
|
||||||
# 创建资源跟踪器(如果没有提供)
|
# 创建资源跟踪器(如果没有提供)
|
||||||
if resource_tracker is None:
|
if resource_tracker is None:
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
||||||
resource_tracker = DeviceNodeResourceTracker()
|
resource_tracker = DeviceNodeResourceTracker()
|
||||||
|
|
||||||
# 初始化基类
|
# 初始化基类
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@ class Base(ABC):
|
|||||||
self._type = typ
|
self._type = typ
|
||||||
self._data_type = data_type
|
self._data_type = data_type
|
||||||
self._node: Optional[Node] = None
|
self._node: Optional[Node] = None
|
||||||
|
|
||||||
def _get_node(self) -> Node:
|
def _get_node(self) -> Node:
|
||||||
if self._node is None:
|
if self._node is None:
|
||||||
try:
|
try:
|
||||||
@@ -66,7 +66,7 @@ class Base(ABC):
|
|||||||
# 直接以字符串形式处理
|
# 直接以字符串形式处理
|
||||||
if isinstance(nid, str):
|
if isinstance(nid, str):
|
||||||
nid = nid.strip()
|
nid = nid.strip()
|
||||||
|
|
||||||
# 处理包含类名的格式,如 'StringNodeId(ns=4;s=...)' 或 'NumericNodeId(ns=2;i=...)'
|
# 处理包含类名的格式,如 'StringNodeId(ns=4;s=...)' 或 'NumericNodeId(ns=2;i=...)'
|
||||||
# 提取括号内的内容
|
# 提取括号内的内容
|
||||||
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
|
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
|
||||||
@@ -116,16 +116,16 @@ class Base(ABC):
|
|||||||
def read(self) -> Tuple[Any, bool]:
|
def read(self) -> Tuple[Any, bool]:
|
||||||
"""读取节点值,返回(值, 是否出错)"""
|
"""读取节点值,返回(值, 是否出错)"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def write(self, value: Any) -> bool:
|
def write(self, value: Any) -> bool:
|
||||||
"""写入节点值,返回是否出错"""
|
"""写入节点值,返回是否出错"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self) -> NodeType:
|
def type(self) -> NodeType:
|
||||||
return self._type
|
return self._type
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def node_id(self) -> str:
|
def node_id(self) -> str:
|
||||||
return self._node_id
|
return self._node_id
|
||||||
@@ -210,15 +210,15 @@ class Method(Base):
|
|||||||
super().__init__(client, name, node_id, NodeType.METHOD, data_type)
|
super().__init__(client, name, node_id, NodeType.METHOD, data_type)
|
||||||
self._parent_node_id = parent_node_id
|
self._parent_node_id = parent_node_id
|
||||||
self._parent_node = None
|
self._parent_node = None
|
||||||
|
|
||||||
def _get_parent_node(self) -> Node:
|
def _get_parent_node(self) -> Node:
|
||||||
if self._parent_node is None:
|
if self._parent_node is None:
|
||||||
try:
|
try:
|
||||||
# 处理父节点ID,使用与_get_node相同的解析逻辑
|
# 处理父节点ID,使用与_get_node相同的解析逻辑
|
||||||
import re
|
import re
|
||||||
|
|
||||||
nid = self._parent_node_id
|
nid = self._parent_node_id
|
||||||
|
|
||||||
# 如果已经是 NodeId 对象,直接使用
|
# 如果已经是 NodeId 对象,直接使用
|
||||||
try:
|
try:
|
||||||
from opcua.ua import NodeId as UaNodeId
|
from opcua.ua import NodeId as UaNodeId
|
||||||
@@ -227,16 +227,16 @@ class Method(Base):
|
|||||||
return self._parent_node
|
return self._parent_node
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# 字符串处理
|
# 字符串处理
|
||||||
if isinstance(nid, str):
|
if isinstance(nid, str):
|
||||||
nid = nid.strip()
|
nid = nid.strip()
|
||||||
|
|
||||||
# 处理包含类名的格式
|
# 处理包含类名的格式
|
||||||
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
|
match_wrapped = re.match(r'(String|Numeric|Byte|Guid|TwoByteNode|FourByteNode)NodeId\((.*)\)', nid)
|
||||||
if match_wrapped:
|
if match_wrapped:
|
||||||
nid = match_wrapped.group(2).strip()
|
nid = match_wrapped.group(2).strip()
|
||||||
|
|
||||||
# 常见短格式
|
# 常见短格式
|
||||||
if re.match(r'^ns=\d+;[is]=', nid):
|
if re.match(r'^ns=\d+;[is]=', nid):
|
||||||
self._parent_node = self._client.get_node(nid)
|
self._parent_node = self._client.get_node(nid)
|
||||||
@@ -271,7 +271,7 @@ class Method(Base):
|
|||||||
def write(self, value: Any) -> bool:
|
def write(self, value: Any) -> bool:
|
||||||
"""方法节点不支持写入操作"""
|
"""方法节点不支持写入操作"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def call(self, *args) -> Tuple[Any, bool]:
|
def call(self, *args) -> Tuple[Any, bool]:
|
||||||
"""调用方法,返回(返回值, 是否出错)"""
|
"""调用方法,返回(返回值, 是否出错)"""
|
||||||
try:
|
try:
|
||||||
@@ -285,7 +285,7 @@ class Method(Base):
|
|||||||
class Object(Base):
|
class Object(Base):
|
||||||
def __init__(self, client: Client, name: str, node_id: str):
|
def __init__(self, client: Client, name: str, node_id: str):
|
||||||
super().__init__(client, name, node_id, NodeType.OBJECT, None)
|
super().__init__(client, name, node_id, NodeType.OBJECT, None)
|
||||||
|
|
||||||
def read(self) -> Tuple[Any, bool]:
|
def read(self) -> Tuple[Any, bool]:
|
||||||
"""对象节点不支持直接读取操作"""
|
"""对象节点不支持直接读取操作"""
|
||||||
return None, True
|
return None, True
|
||||||
@@ -293,7 +293,7 @@ class Object(Base):
|
|||||||
def write(self, value: Any) -> bool:
|
def write(self, value: Any) -> bool:
|
||||||
"""对象节点不支持直接写入操作"""
|
"""对象节点不支持直接写入操作"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_children(self) -> Tuple[List[Node], bool]:
|
def get_children(self) -> Tuple[List[Node], bool]:
|
||||||
"""获取子节点列表,返回(子节点列表, 是否出错)"""
|
"""获取子节点列表,返回(子节点列表, 是否出错)"""
|
||||||
try:
|
try:
|
||||||
@@ -301,4 +301,4 @@ class Object(Base):
|
|||||||
return children, False
|
return children, False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"获取对象 {self._name} 的子节点失败: {e}")
|
print(f"获取对象 {self._name} 的子节点失败: {e}")
|
||||||
return [], True
|
return [], True
|
||||||
@@ -13,7 +13,7 @@ from pylabrobot.resources import (
|
|||||||
import copy
|
import copy
|
||||||
from unilabos_msgs.msg import Resource
|
from unilabos_msgs.msg import Resource
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class LiquidHandlerBiomek:
|
class LiquidHandlerBiomek:
|
||||||
|
|||||||
@@ -176,40 +176,7 @@ class BioyondV1RPC(BaseRequest):
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
print(f"add material data: {response['data']}")
|
print(f"add material data: {response['data']}")
|
||||||
|
return response.get("data", {})
|
||||||
# 自动更新缓存
|
|
||||||
data = response.get("data", {})
|
|
||||||
if data:
|
|
||||||
if isinstance(data, str):
|
|
||||||
# 如果返回的是字符串,通常是ID
|
|
||||||
mat_id = data
|
|
||||||
name = params.get("name")
|
|
||||||
else:
|
|
||||||
# 如果返回的是字典,尝试获取name和id
|
|
||||||
name = data.get("name") or params.get("name")
|
|
||||||
mat_id = data.get("id")
|
|
||||||
|
|
||||||
if name and mat_id:
|
|
||||||
self.material_cache[name] = mat_id
|
|
||||||
print(f"已自动更新缓存: {name} -> {mat_id}")
|
|
||||||
|
|
||||||
# 处理返回数据中的 details (如果有)
|
|
||||||
# 有些 API 返回结构可能直接包含 details,或者在 data 字段中
|
|
||||||
details = data.get("details", []) if isinstance(data, dict) else []
|
|
||||||
if not details and isinstance(data, dict):
|
|
||||||
details = data.get("detail", [])
|
|
||||||
|
|
||||||
if details:
|
|
||||||
for detail in details:
|
|
||||||
d_name = detail.get("name")
|
|
||||||
# 尝试从不同字段获取 ID
|
|
||||||
d_id = detail.get("id") or detail.get("detailMaterialId")
|
|
||||||
|
|
||||||
if d_name and d_id:
|
|
||||||
self.material_cache[d_name] = d_id
|
|
||||||
print(f"已自动更新 detail 缓存: {d_name} -> {d_id}")
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def query_matial_type_id(self, data) -> list:
|
def query_matial_type_id(self, data) -> list:
|
||||||
"""查找物料typeid"""
|
"""查找物料typeid"""
|
||||||
@@ -236,7 +203,7 @@ class BioyondV1RPC(BaseRequest):
|
|||||||
params={
|
params={
|
||||||
"apiKey": self.api_key,
|
"apiKey": self.api_key,
|
||||||
"requestTime": self.get_current_time_iso8601(),
|
"requestTime": self.get_current_time_iso8601(),
|
||||||
"data": 0,
|
"data": {},
|
||||||
})
|
})
|
||||||
if not response or response['code'] != 1:
|
if not response or response['code'] != 1:
|
||||||
return []
|
return []
|
||||||
@@ -306,14 +273,6 @@ class BioyondV1RPC(BaseRequest):
|
|||||||
|
|
||||||
if not response or response['code'] != 1:
|
if not response or response['code'] != 1:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# 自动更新缓存 - 移除被删除的物料
|
|
||||||
for name, mid in list(self.material_cache.items()):
|
|
||||||
if mid == material_id:
|
|
||||||
del self.material_cache[name]
|
|
||||||
print(f"已从缓存移除物料: {name}")
|
|
||||||
break
|
|
||||||
|
|
||||||
return response.get("data", {})
|
return response.get("data", {})
|
||||||
|
|
||||||
def material_outbound(self, material_id: str, location_name: str, quantity: int) -> dict:
|
def material_outbound(self, material_id: str, location_name: str, quantity: int) -> dict:
|
||||||
@@ -1144,10 +1103,6 @@ class BioyondV1RPC(BaseRequest):
|
|||||||
for detail_material in detail_materials:
|
for detail_material in detail_materials:
|
||||||
detail_name = detail_material.get("name")
|
detail_name = detail_material.get("name")
|
||||||
detail_id = detail_material.get("detailMaterialId")
|
detail_id = detail_material.get("detailMaterialId")
|
||||||
if not detail_id:
|
|
||||||
# 尝试其他可能的字段
|
|
||||||
detail_id = detail_material.get("id")
|
|
||||||
|
|
||||||
if detail_name and detail_id:
|
if detail_name and detail_id:
|
||||||
self.material_cache[detail_name] = detail_id
|
self.material_cache[detail_name] = detail_id
|
||||||
print(f"加载detail材料: {detail_name} -> ID: {detail_id}")
|
print(f"加载detail材料: {detail_name} -> ID: {detail_id}")
|
||||||
@@ -1168,14 +1123,6 @@ class BioyondV1RPC(BaseRequest):
|
|||||||
print(f"从缓存找到材料: {material_name_or_id} -> ID: {material_id}")
|
print(f"从缓存找到材料: {material_name_or_id} -> ID: {material_id}")
|
||||||
return material_id
|
return material_id
|
||||||
|
|
||||||
# 如果缓存中没有,尝试刷新缓存
|
|
||||||
print(f"缓存中未找到材料 '{material_name_or_id}',尝试刷新缓存...")
|
|
||||||
self.refresh_material_cache()
|
|
||||||
if material_name_or_id in self.material_cache:
|
|
||||||
material_id = self.material_cache[material_name_or_id]
|
|
||||||
print(f"刷新缓存后找到材料: {material_name_or_id} -> ID: {material_id}")
|
|
||||||
return material_id
|
|
||||||
|
|
||||||
print(f"警告: 未在缓存中找到材料名称 '{material_name_or_id}',将使用原值")
|
print(f"警告: 未在缓存中找到材料名称 '{material_name_or_id}',将使用原值")
|
||||||
return material_name_or_id
|
return material_name_or_id
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import time
|
|||||||
from typing import Optional, Dict, Any, List
|
from typing import Optional, Dict, Any, List
|
||||||
from typing_extensions import TypedDict
|
from typing_extensions import TypedDict
|
||||||
import requests
|
import requests
|
||||||
import pint
|
|
||||||
from unilabos.devices.workstation.bioyond_studio.config import API_CONFIG
|
from unilabos.devices.workstation.bioyond_studio.config import API_CONFIG
|
||||||
|
|
||||||
from unilabos.devices.workstation.bioyond_studio.bioyond_rpc import BioyondException
|
from unilabos.devices.workstation.bioyond_studio.bioyond_rpc import BioyondException
|
||||||
@@ -44,41 +43,6 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
# 用于跟踪任务完成状态的字典: {orderCode: {status, order_id, timestamp}}
|
# 用于跟踪任务完成状态的字典: {orderCode: {status, order_id, timestamp}}
|
||||||
self.order_completion_status = {}
|
self.order_completion_status = {}
|
||||||
|
|
||||||
# 初始化 pint 单位注册表
|
|
||||||
self.ureg = pint.UnitRegistry()
|
|
||||||
|
|
||||||
# 化合物信息
|
|
||||||
self.compound_info = {
|
|
||||||
"MolWt": {
|
|
||||||
"MDA": 108.14 * self.ureg.g / self.ureg.mol,
|
|
||||||
"TDA": 122.16 * self.ureg.g / self.ureg.mol,
|
|
||||||
"PAPP": 521.62 * self.ureg.g / self.ureg.mol,
|
|
||||||
"BTDA": 322.23 * self.ureg.g / self.ureg.mol,
|
|
||||||
"BPDA": 294.22 * self.ureg.g / self.ureg.mol,
|
|
||||||
"6FAP": 366.26 * self.ureg.g / self.ureg.mol,
|
|
||||||
"PMDA": 218.12 * self.ureg.g / self.ureg.mol,
|
|
||||||
"MPDA": 108.14 * self.ureg.g / self.ureg.mol,
|
|
||||||
"SIDA": 248.51 * self.ureg.g / self.ureg.mol,
|
|
||||||
"ODA": 200.236 * self.ureg.g / self.ureg.mol,
|
|
||||||
"4,4'-ODA": 200.236 * self.ureg.g / self.ureg.mol,
|
|
||||||
"134": 292.34 * self.ureg.g / self.ureg.mol,
|
|
||||||
},
|
|
||||||
"FuncGroup": {
|
|
||||||
"MDA": "Amine",
|
|
||||||
"TDA": "Amine",
|
|
||||||
"PAPP": "Amine",
|
|
||||||
"BTDA": "Anhydride",
|
|
||||||
"BPDA": "Anhydride",
|
|
||||||
"6FAP": "Amine",
|
|
||||||
"MPDA": "Amine",
|
|
||||||
"SIDA": "Amine",
|
|
||||||
"PMDA": "Anhydride",
|
|
||||||
"ODA": "Amine",
|
|
||||||
"4,4'-ODA": "Amine",
|
|
||||||
"134": "Amine",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def _post_project_api(self, endpoint: str, data: Any) -> Dict[str, Any]:
|
def _post_project_api(self, endpoint: str, data: Any) -> Dict[str, Any]:
|
||||||
"""项目接口通用POST调用
|
"""项目接口通用POST调用
|
||||||
|
|
||||||
@@ -154,22 +118,20 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
ratio = json.loads(ratio)
|
ratio = json.loads(ratio)
|
||||||
except Exception:
|
except Exception:
|
||||||
ratio = {}
|
ratio = {}
|
||||||
|
root = str(Path(__file__).resolve().parents[3])
|
||||||
|
if root not in sys.path:
|
||||||
|
sys.path.append(root)
|
||||||
|
try:
|
||||||
|
mod = importlib.import_module("tem.compute")
|
||||||
|
except Exception as e:
|
||||||
|
raise BioyondException(f"无法导入计算模块: {e}")
|
||||||
try:
|
try:
|
||||||
wp = float(wt_percent) if isinstance(wt_percent, str) else wt_percent
|
wp = float(wt_percent) if isinstance(wt_percent, str) else wt_percent
|
||||||
mt = float(m_tot) if isinstance(m_tot, str) else m_tot
|
mt = float(m_tot) if isinstance(m_tot, str) else m_tot
|
||||||
tp = float(titration_percent) if isinstance(titration_percent, str) else titration_percent
|
tp = float(titration_percent) if isinstance(titration_percent, str) else titration_percent
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BioyondException(f"参数解析失败: {e}")
|
raise BioyondException(f"参数解析失败: {e}")
|
||||||
|
res = mod.generate_experiment_design(ratio=ratio, wt_percent=wp, m_tot=mt, titration_percent=tp)
|
||||||
# 2. 调用内部计算方法
|
|
||||||
res = self._generate_experiment_design(
|
|
||||||
ratio=ratio,
|
|
||||||
wt_percent=wp,
|
|
||||||
m_tot=mt,
|
|
||||||
titration_percent=tp
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3. 构造返回结果
|
|
||||||
out = {
|
out = {
|
||||||
"solutions": res.get("solutions", []),
|
"solutions": res.get("solutions", []),
|
||||||
"titration": res.get("titration", {}),
|
"titration": res.get("titration", {}),
|
||||||
@@ -178,248 +140,11 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
"return_info": json.dumps(res, ensure_ascii=False)
|
"return_info": json.dumps(res, ensure_ascii=False)
|
||||||
}
|
}
|
||||||
return out
|
return out
|
||||||
|
|
||||||
except BioyondException:
|
except BioyondException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise BioyondException(str(e))
|
raise BioyondException(str(e))
|
||||||
|
|
||||||
def _generate_experiment_design(
|
|
||||||
self,
|
|
||||||
ratio: dict,
|
|
||||||
wt_percent: float = 0.25,
|
|
||||||
m_tot: float = 70,
|
|
||||||
titration_percent: float = 0.03,
|
|
||||||
) -> dict:
|
|
||||||
"""内部方法:生成实验设计
|
|
||||||
|
|
||||||
根据FuncGroup自动区分二胺和二酐,每种二胺单独配溶液,严格按照ratio顺序投料。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
ratio: 化合物配比字典,格式: {"compound_name": ratio_value}
|
|
||||||
wt_percent: 固体重量百分比
|
|
||||||
m_tot: 反应混合物总质量(g)
|
|
||||||
titration_percent: 滴定溶液百分比
|
|
||||||
|
|
||||||
返回:
|
|
||||||
包含实验设计详细参数的字典
|
|
||||||
"""
|
|
||||||
# 溶剂密度
|
|
||||||
ρ_solvent = 1.03 * self.ureg.g / self.ureg.ml
|
|
||||||
# 二酐溶解度
|
|
||||||
solubility = 0.02 * self.ureg.g / self.ureg.ml
|
|
||||||
# 投入固体时最小溶剂体积
|
|
||||||
V_min = 30 * self.ureg.ml
|
|
||||||
m_tot = m_tot * self.ureg.g
|
|
||||||
|
|
||||||
# 保持ratio中的顺序
|
|
||||||
compound_names = list(ratio.keys())
|
|
||||||
compound_ratios = list(ratio.values())
|
|
||||||
|
|
||||||
# 验证所有化合物是否在 compound_info 中定义
|
|
||||||
undefined_compounds = [name for name in compound_names if name not in self.compound_info["MolWt"]]
|
|
||||||
if undefined_compounds:
|
|
||||||
available = list(self.compound_info["MolWt"].keys())
|
|
||||||
raise ValueError(
|
|
||||||
f"以下化合物未在 compound_info 中定义: {undefined_compounds}。"
|
|
||||||
f"可用的化合物: {available}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 获取各化合物的分子量和官能团类型
|
|
||||||
molecular_weights = [self.compound_info["MolWt"][name] for name in compound_names]
|
|
||||||
func_groups = [self.compound_info["FuncGroup"][name] for name in compound_names]
|
|
||||||
|
|
||||||
# 记录化合物信息用于调试
|
|
||||||
self.hardware_interface._logger.info(f"化合物名称: {compound_names}")
|
|
||||||
self.hardware_interface._logger.info(f"官能团类型: {func_groups}")
|
|
||||||
|
|
||||||
# 按原始顺序分离二胺和二酐
|
|
||||||
ordered_compounds = list(zip(compound_names, compound_ratios, molecular_weights, func_groups))
|
|
||||||
diamine_compounds = [(name, ratio_val, mw, i) for i, (name, ratio_val, mw, fg) in enumerate(ordered_compounds) if fg == "Amine"]
|
|
||||||
anhydride_compounds = [(name, ratio_val, mw, i) for i, (name, ratio_val, mw, fg) in enumerate(ordered_compounds) if fg == "Anhydride"]
|
|
||||||
|
|
||||||
if not diamine_compounds or not anhydride_compounds:
|
|
||||||
raise ValueError(
|
|
||||||
f"需要同时包含二胺(Amine)和二酐(Anhydride)化合物。"
|
|
||||||
f"当前二胺: {[c[0] for c in diamine_compounds]}, "
|
|
||||||
f"当前二酐: {[c[0] for c in anhydride_compounds]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 计算加权平均分子量 (基于摩尔比)
|
|
||||||
total_molar_ratio = sum(compound_ratios)
|
|
||||||
weighted_molecular_weight = sum(ratio_val * mw for ratio_val, mw in zip(compound_ratios, molecular_weights))
|
|
||||||
|
|
||||||
# 取最后一个二酐用于滴定
|
|
||||||
titration_anhydride = anhydride_compounds[-1]
|
|
||||||
solid_anhydrides = anhydride_compounds[:-1] if len(anhydride_compounds) > 1 else []
|
|
||||||
|
|
||||||
# 二胺溶液配制参数 - 每种二胺单独配制
|
|
||||||
diamine_solutions = []
|
|
||||||
total_diamine_volume = 0 * self.ureg.ml
|
|
||||||
|
|
||||||
# 计算反应物的总摩尔量
|
|
||||||
n_reactant = m_tot * wt_percent / weighted_molecular_weight
|
|
||||||
|
|
||||||
for name, ratio_val, mw, order_index in diamine_compounds:
|
|
||||||
# 跳过 SIDA
|
|
||||||
if name == "SIDA":
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 计算该二胺需要的摩尔数
|
|
||||||
n_diamine_needed = n_reactant * ratio_val
|
|
||||||
|
|
||||||
# 二胺溶液配制参数 (每种二胺固定配制参数)
|
|
||||||
m_diamine_solid = 5.0 * self.ureg.g # 每种二胺固体质量
|
|
||||||
V_solvent_for_this = 20 * self.ureg.ml # 每种二胺溶剂体积
|
|
||||||
m_solvent_for_this = ρ_solvent * V_solvent_for_this
|
|
||||||
|
|
||||||
# 计算该二胺溶液的浓度
|
|
||||||
c_diamine = (m_diamine_solid / mw) / V_solvent_for_this
|
|
||||||
|
|
||||||
# 计算需要移取的溶液体积
|
|
||||||
V_diamine_needed = n_diamine_needed / c_diamine
|
|
||||||
|
|
||||||
diamine_solutions.append({
|
|
||||||
"name": name,
|
|
||||||
"order": order_index,
|
|
||||||
"solid_mass": m_diamine_solid.magnitude,
|
|
||||||
"solvent_volume": V_solvent_for_this.magnitude,
|
|
||||||
"concentration": c_diamine.magnitude,
|
|
||||||
"volume_needed": V_diamine_needed.magnitude,
|
|
||||||
"molar_ratio": ratio_val
|
|
||||||
})
|
|
||||||
|
|
||||||
total_diamine_volume += V_diamine_needed
|
|
||||||
|
|
||||||
# 按原始顺序排序
|
|
||||||
diamine_solutions.sort(key=lambda x: x["order"])
|
|
||||||
|
|
||||||
# 计算滴定二酐的质量
|
|
||||||
titration_name, titration_ratio, titration_mw, _ = titration_anhydride
|
|
||||||
m_titration_anhydride = n_reactant * titration_ratio * titration_mw
|
|
||||||
m_titration_90 = m_titration_anhydride * (1 - titration_percent)
|
|
||||||
m_titration_10 = m_titration_anhydride * titration_percent
|
|
||||||
|
|
||||||
# 计算其他固体二酐的质量 (按顺序)
|
|
||||||
solid_anhydride_masses = []
|
|
||||||
for name, ratio_val, mw, order_index in solid_anhydrides:
|
|
||||||
mass = n_reactant * ratio_val * mw
|
|
||||||
solid_anhydride_masses.append({
|
|
||||||
"name": name,
|
|
||||||
"order": order_index,
|
|
||||||
"mass": mass.magnitude,
|
|
||||||
"molar_ratio": ratio_val
|
|
||||||
})
|
|
||||||
|
|
||||||
# 按原始顺序排序
|
|
||||||
solid_anhydride_masses.sort(key=lambda x: x["order"])
|
|
||||||
|
|
||||||
# 计算溶剂用量
|
|
||||||
total_diamine_solution_mass = sum(
|
|
||||||
sol["volume_needed"] * ρ_solvent for sol in diamine_solutions
|
|
||||||
) * self.ureg.ml
|
|
||||||
|
|
||||||
# 预估滴定溶剂量、计算补加溶剂量
|
|
||||||
m_solvent_titration = m_titration_10 / solubility * ρ_solvent
|
|
||||||
m_solvent_add = m_tot * (1 - wt_percent) - total_diamine_solution_mass - m_solvent_titration
|
|
||||||
|
|
||||||
# 检查最小溶剂体积要求
|
|
||||||
total_liquid_volume = (total_diamine_solution_mass + m_solvent_add) / ρ_solvent
|
|
||||||
m_tot_min = V_min / total_liquid_volume * m_tot
|
|
||||||
|
|
||||||
# 如果需要,按比例放大
|
|
||||||
scale_factor = 1.0
|
|
||||||
if m_tot_min > m_tot:
|
|
||||||
scale_factor = (m_tot_min / m_tot).magnitude
|
|
||||||
m_titration_90 *= scale_factor
|
|
||||||
m_titration_10 *= scale_factor
|
|
||||||
m_solvent_add *= scale_factor
|
|
||||||
m_solvent_titration *= scale_factor
|
|
||||||
|
|
||||||
# 更新二胺溶液用量
|
|
||||||
for sol in diamine_solutions:
|
|
||||||
sol["volume_needed"] *= scale_factor
|
|
||||||
|
|
||||||
# 更新固体二酐用量
|
|
||||||
for anhydride in solid_anhydride_masses:
|
|
||||||
anhydride["mass"] *= scale_factor
|
|
||||||
|
|
||||||
m_tot = m_tot_min
|
|
||||||
|
|
||||||
# 生成投料顺序
|
|
||||||
feeding_order = []
|
|
||||||
|
|
||||||
# 1. 固体二酐 (按顺序)
|
|
||||||
for anhydride in solid_anhydride_masses:
|
|
||||||
feeding_order.append({
|
|
||||||
"step": len(feeding_order) + 1,
|
|
||||||
"type": "solid_anhydride",
|
|
||||||
"name": anhydride["name"],
|
|
||||||
"amount": anhydride["mass"],
|
|
||||||
"order": anhydride["order"]
|
|
||||||
})
|
|
||||||
|
|
||||||
# 2. 二胺溶液 (按顺序)
|
|
||||||
for sol in diamine_solutions:
|
|
||||||
feeding_order.append({
|
|
||||||
"step": len(feeding_order) + 1,
|
|
||||||
"type": "diamine_solution",
|
|
||||||
"name": sol["name"],
|
|
||||||
"amount": sol["volume_needed"],
|
|
||||||
"order": sol["order"]
|
|
||||||
})
|
|
||||||
|
|
||||||
# 3. 主要二酐粉末
|
|
||||||
feeding_order.append({
|
|
||||||
"step": len(feeding_order) + 1,
|
|
||||||
"type": "main_anhydride",
|
|
||||||
"name": titration_name,
|
|
||||||
"amount": m_titration_90.magnitude,
|
|
||||||
"order": titration_anhydride[3]
|
|
||||||
})
|
|
||||||
|
|
||||||
# 4. 补加溶剂
|
|
||||||
if m_solvent_add > 0:
|
|
||||||
feeding_order.append({
|
|
||||||
"step": len(feeding_order) + 1,
|
|
||||||
"type": "additional_solvent",
|
|
||||||
"name": "溶剂",
|
|
||||||
"amount": m_solvent_add.magnitude,
|
|
||||||
"order": 999
|
|
||||||
})
|
|
||||||
|
|
||||||
# 5. 滴定二酐溶液
|
|
||||||
feeding_order.append({
|
|
||||||
"step": len(feeding_order) + 1,
|
|
||||||
"type": "titration_anhydride",
|
|
||||||
"name": f"{titration_name} 滴定液",
|
|
||||||
"amount": m_titration_10.magnitude,
|
|
||||||
"titration_solvent": m_solvent_titration.magnitude,
|
|
||||||
"order": titration_anhydride[3]
|
|
||||||
})
|
|
||||||
|
|
||||||
# 返回实验设计结果
|
|
||||||
results = {
|
|
||||||
"total_mass": m_tot.magnitude,
|
|
||||||
"scale_factor": scale_factor,
|
|
||||||
"solutions": diamine_solutions,
|
|
||||||
"solids": solid_anhydride_masses,
|
|
||||||
"titration": {
|
|
||||||
"name": titration_name,
|
|
||||||
"main_portion": m_titration_90.magnitude,
|
|
||||||
"titration_portion": m_titration_10.magnitude,
|
|
||||||
"titration_solvent": m_solvent_titration.magnitude,
|
|
||||||
},
|
|
||||||
"solvents": {
|
|
||||||
"additional_solvent": m_solvent_add.magnitude,
|
|
||||||
"total_liquid_volume": total_liquid_volume.magnitude
|
|
||||||
},
|
|
||||||
"feeding_order": feeding_order,
|
|
||||||
"minimum_required_mass": m_tot_min.magnitude
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
# 90%10%小瓶投料任务创建方法
|
# 90%10%小瓶投料任务创建方法
|
||||||
def create_90_10_vial_feeding_task(self,
|
def create_90_10_vial_feeding_task(self,
|
||||||
order_name: str = None,
|
order_name: str = None,
|
||||||
@@ -1236,108 +961,6 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
'actualVolume': actual_volume
|
'actualVolume': actual_volume
|
||||||
}
|
}
|
||||||
|
|
||||||
def _simplify_report(self, report) -> Dict[str, Any]:
|
|
||||||
"""简化实验报告,只保留关键信息,去除冗余的工作流参数"""
|
|
||||||
if not isinstance(report, dict):
|
|
||||||
return report
|
|
||||||
|
|
||||||
data = report.get('data', {})
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
return report
|
|
||||||
|
|
||||||
# 提取关键信息
|
|
||||||
simplified = {
|
|
||||||
'name': data.get('name'),
|
|
||||||
'code': data.get('code'),
|
|
||||||
'requester': data.get('requester'),
|
|
||||||
'workflowName': data.get('workflowName'),
|
|
||||||
'workflowStep': data.get('workflowStep'),
|
|
||||||
'requestTime': data.get('requestTime'),
|
|
||||||
'startPreparationTime': data.get('startPreparationTime'),
|
|
||||||
'completeTime': data.get('completeTime'),
|
|
||||||
'useTime': data.get('useTime'),
|
|
||||||
'status': data.get('status'),
|
|
||||||
'statusName': data.get('statusName'),
|
|
||||||
}
|
|
||||||
|
|
||||||
# 提取物料信息(简化版)
|
|
||||||
pre_intakes = data.get('preIntakes', [])
|
|
||||||
if pre_intakes and isinstance(pre_intakes, list):
|
|
||||||
first_intake = pre_intakes[0]
|
|
||||||
sample_materials = first_intake.get('sampleMaterials', [])
|
|
||||||
|
|
||||||
# 简化物料信息
|
|
||||||
simplified_materials = []
|
|
||||||
for material in sample_materials:
|
|
||||||
if isinstance(material, dict):
|
|
||||||
mat_info = {
|
|
||||||
'materialName': material.get('materialName'),
|
|
||||||
'materialTypeName': material.get('materialTypeName'),
|
|
||||||
'materialCode': material.get('materialCode'),
|
|
||||||
'materialLocation': material.get('materialLocation'),
|
|
||||||
}
|
|
||||||
|
|
||||||
# 解析parameters中的关键信息(如密度、加料历史等)
|
|
||||||
params_str = material.get('parameters', '{}')
|
|
||||||
try:
|
|
||||||
params = json.loads(params_str) if isinstance(params_str, str) else params_str
|
|
||||||
if isinstance(params, dict):
|
|
||||||
# 只保留关键参数
|
|
||||||
if 'density' in params:
|
|
||||||
mat_info['density'] = params['density']
|
|
||||||
if 'feedingHistory' in params:
|
|
||||||
mat_info['feedingHistory'] = params['feedingHistory']
|
|
||||||
if 'liquidVolume' in params:
|
|
||||||
mat_info['liquidVolume'] = params['liquidVolume']
|
|
||||||
if 'm_diamine_tot' in params:
|
|
||||||
mat_info['m_diamine_tot'] = params['m_diamine_tot']
|
|
||||||
if 'wt_diamine' in params:
|
|
||||||
mat_info['wt_diamine'] = params['wt_diamine']
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
simplified_materials.append(mat_info)
|
|
||||||
|
|
||||||
simplified['sampleMaterials'] = simplified_materials
|
|
||||||
|
|
||||||
# 提取extraProperties中的实际值
|
|
||||||
extra_props = first_intake.get('extraProperties', {})
|
|
||||||
if isinstance(extra_props, dict):
|
|
||||||
simplified_extra = {}
|
|
||||||
for key, value in extra_props.items():
|
|
||||||
try:
|
|
||||||
parsed_value = json.loads(value) if isinstance(value, str) else value
|
|
||||||
simplified_extra[key] = parsed_value
|
|
||||||
except:
|
|
||||||
simplified_extra[key] = value
|
|
||||||
simplified['extraProperties'] = simplified_extra
|
|
||||||
|
|
||||||
return {
|
|
||||||
'data': simplified,
|
|
||||||
'code': report.get('code'),
|
|
||||||
'message': report.get('message'),
|
|
||||||
'timestamp': report.get('timestamp')
|
|
||||||
}
|
|
||||||
|
|
||||||
def scheduler_start(self) -> dict:
|
|
||||||
"""启动调度器 - 启动Bioyond工作站的任务调度器,开始执行队列中的任务
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: 包含return_info的字典,return_info为整型(1=成功)
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
BioyondException: 调度器启动失败时抛出异常
|
|
||||||
"""
|
|
||||||
result = self.hardware_interface.scheduler_start()
|
|
||||||
self.hardware_interface._logger.info(f"调度器启动结果: {result}")
|
|
||||||
|
|
||||||
if result != 1:
|
|
||||||
error_msg = "启动调度器失败: 有未处理错误,调度无法启动。请检查Bioyond系统状态。"
|
|
||||||
self.hardware_interface._logger.error(error_msg)
|
|
||||||
raise BioyondException(error_msg)
|
|
||||||
|
|
||||||
return {"return_info": result}
|
|
||||||
|
|
||||||
# 等待多个任务完成并获取实验报告
|
# 等待多个任务完成并获取实验报告
|
||||||
def wait_for_multiple_orders_and_get_reports(self,
|
def wait_for_multiple_orders_and_get_reports(self,
|
||||||
batch_create_result: str = None,
|
batch_create_result: str = None,
|
||||||
@@ -1379,12 +1002,7 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
|
|
||||||
# 验证batch_create_result参数
|
# 验证batch_create_result参数
|
||||||
if not batch_create_result or batch_create_result == "":
|
if not batch_create_result or batch_create_result == "":
|
||||||
raise BioyondException(
|
raise BioyondException("batch_create_result参数为空,请确保从batch_create节点正确连接handle")
|
||||||
"batch_create_result参数为空,请确保:\n"
|
|
||||||
"1. batch_create节点与wait节点之间正确连接了handle\n"
|
|
||||||
"2. batch_create节点成功执行并返回了结果\n"
|
|
||||||
"3. 检查上游batch_create任务是否成功创建了订单"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 解析batch_create_result JSON对象
|
# 解析batch_create_result JSON对象
|
||||||
try:
|
try:
|
||||||
@@ -1413,17 +1031,7 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
|
|
||||||
# 验证提取的数据
|
# 验证提取的数据
|
||||||
if not order_codes:
|
if not order_codes:
|
||||||
self.hardware_interface._logger.error(
|
raise BioyondException("batch_create_result中未找到order_codes字段或为空")
|
||||||
f"batch_create任务未生成任何订单。batch_create_result内容: {batch_create_result}"
|
|
||||||
)
|
|
||||||
raise BioyondException(
|
|
||||||
"batch_create_result中未找到order_codes或为空。\n"
|
|
||||||
"可能的原因:\n"
|
|
||||||
"1. batch_create任务执行失败(检查任务是否报错)\n"
|
|
||||||
"2. 物料配置问题(如'物料样品板分配失败')\n"
|
|
||||||
"3. Bioyond系统状态异常\n"
|
|
||||||
f"请检查batch_create任务的执行结果"
|
|
||||||
)
|
|
||||||
if not order_ids:
|
if not order_ids:
|
||||||
raise BioyondException("batch_create_result中未找到order_ids字段或为空")
|
raise BioyondException("batch_create_result中未找到order_ids字段或为空")
|
||||||
|
|
||||||
@@ -1506,8 +1114,6 @@ class BioyondDispensingStation(BioyondWorkstation):
|
|||||||
self.hardware_interface._logger.info(
|
self.hardware_interface._logger.info(
|
||||||
f"成功获取任务 {order_code} 的实验报告"
|
f"成功获取任务 {order_code} 的实验报告"
|
||||||
)
|
)
|
||||||
# 简化报告,去除冗余信息
|
|
||||||
report = self._simplify_report(report)
|
|
||||||
|
|
||||||
reports.append({
|
reports.append({
|
||||||
"order_code": order_code,
|
"order_code": order_code,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@ Bioyond Workstation Implementation
|
|||||||
"""
|
"""
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import threading
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Any, List, Optional, Union
|
from typing import Dict, Any, List, Optional, Union
|
||||||
import json
|
import json
|
||||||
@@ -30,90 +29,6 @@ from unilabos.devices.workstation.bioyond_studio.config import (
|
|||||||
from unilabos.devices.workstation.workstation_http_service import WorkstationHTTPService
|
from unilabos.devices.workstation.workstation_http_service import WorkstationHTTPService
|
||||||
|
|
||||||
|
|
||||||
class ConnectionMonitor:
|
|
||||||
"""Bioyond连接监控器"""
|
|
||||||
def __init__(self, workstation, check_interval=30):
|
|
||||||
self.workstation = workstation
|
|
||||||
self.check_interval = check_interval
|
|
||||||
self._running = False
|
|
||||||
self._thread = None
|
|
||||||
self._last_status = "unknown"
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
if self._running:
|
|
||||||
return
|
|
||||||
self._running = True
|
|
||||||
self._thread = threading.Thread(target=self._monitor_loop, daemon=True, name="BioyondConnectionMonitor")
|
|
||||||
self._thread.start()
|
|
||||||
logger.info("Bioyond连接监控器已启动")
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self._running = False
|
|
||||||
if self._thread:
|
|
||||||
self._thread.join(timeout=2)
|
|
||||||
logger.info("Bioyond连接监控器已停止")
|
|
||||||
|
|
||||||
def _monitor_loop(self):
|
|
||||||
while self._running:
|
|
||||||
try:
|
|
||||||
# 使用 lightweight API 检查连接
|
|
||||||
# query_matial_type_list 是比较快的查询
|
|
||||||
start_time = time.time()
|
|
||||||
result = self.workstation.hardware_interface.material_type_list()
|
|
||||||
|
|
||||||
status = "online" if result else "offline"
|
|
||||||
msg = "Connection established" if status == "online" else "Failed to get material type list"
|
|
||||||
|
|
||||||
if status != self._last_status:
|
|
||||||
logger.info(f"Bioyond连接状态变更: {self._last_status} -> {status}")
|
|
||||||
self._publish_event(status, msg)
|
|
||||||
self._last_status = status
|
|
||||||
|
|
||||||
# 发布心跳 (可选,或者只在状态变更时发布)
|
|
||||||
# self._publish_event(status, msg)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Bioyond连接检查异常: {e}")
|
|
||||||
if self._last_status != "error":
|
|
||||||
self._publish_event("error", str(e))
|
|
||||||
self._last_status = "error"
|
|
||||||
|
|
||||||
time.sleep(self.check_interval)
|
|
||||||
|
|
||||||
def _publish_event(self, status, message):
|
|
||||||
try:
|
|
||||||
if hasattr(self.workstation, "_ros_node") and self.workstation._ros_node:
|
|
||||||
event_data = {
|
|
||||||
"status": status,
|
|
||||||
"message": message,
|
|
||||||
"timestamp": datetime.now().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# 动态发布消息,需要在 ROS2DeviceNode 中有对应支持
|
|
||||||
# 这里假设通用事件发布机制,使用 String 类型的 topic
|
|
||||||
# 话题: /<namespace>/events/device_status
|
|
||||||
ns = self.workstation._ros_node.namespace
|
|
||||||
topic = f"{ns}/events/device_status"
|
|
||||||
|
|
||||||
# 使用 ROS2DeviceNode 的发布功能
|
|
||||||
# 如果没有预定义的 publisher,需要动态创建
|
|
||||||
# 注意:workstation base node 可能没有自动创建 arbitrary publishers 的机制
|
|
||||||
# 这里我们先尝试用 String json 发布
|
|
||||||
|
|
||||||
# 在 ROS2DeviceNode 中通常需要先 create_publisher
|
|
||||||
# 为了简单起见,我们检查是否已有 publisher,没有则创建
|
|
||||||
if not hasattr(self.workstation, "_device_status_pub"):
|
|
||||||
self.workstation._device_status_pub = self.workstation._ros_node.create_publisher(
|
|
||||||
String, topic, 10
|
|
||||||
)
|
|
||||||
|
|
||||||
self.workstation._device_status_pub.publish(
|
|
||||||
convert_to_ros_msg(String, json.dumps(event_data, ensure_ascii=False))
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"发布设备状态事件失败: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
class BioyondResourceSynchronizer(ResourceSynchronizer):
|
class BioyondResourceSynchronizer(ResourceSynchronizer):
|
||||||
"""Bioyond资源同步器
|
"""Bioyond资源同步器
|
||||||
|
|
||||||
@@ -324,18 +239,13 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
|
|||||||
logger.info(f"[同步→Bioyond] 🔄 转换物料为 Bioyond 格式...")
|
logger.info(f"[同步→Bioyond] 🔄 转换物料为 Bioyond 格式...")
|
||||||
|
|
||||||
# 导入物料默认参数配置
|
# 导入物料默认参数配置
|
||||||
from .config import MATERIAL_DEFAULT_PARAMETERS, MATERIAL_TYPE_PARAMETERS
|
from .config import MATERIAL_DEFAULT_PARAMETERS
|
||||||
|
|
||||||
# 合并参数配置:物料名称参数 + typeId参数(转换为 type:<uuid> 格式)
|
|
||||||
merged_params = MATERIAL_DEFAULT_PARAMETERS.copy()
|
|
||||||
for type_id, params in MATERIAL_TYPE_PARAMETERS.items():
|
|
||||||
merged_params[f"type:{type_id}"] = params
|
|
||||||
|
|
||||||
bioyond_material = resource_plr_to_bioyond(
|
bioyond_material = resource_plr_to_bioyond(
|
||||||
[resource],
|
[resource],
|
||||||
type_mapping=self.workstation.bioyond_config["material_type_mappings"],
|
type_mapping=self.workstation.bioyond_config["material_type_mappings"],
|
||||||
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
|
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
|
||||||
material_params=merged_params
|
material_params=MATERIAL_DEFAULT_PARAMETERS
|
||||||
)[0]
|
)[0]
|
||||||
|
|
||||||
logger.info(f"[同步→Bioyond] 🔧 准备覆盖locations字段,目标仓库: {parent_name}, 库位: {update_site}, UUID: {target_location_uuid[:8]}...")
|
logger.info(f"[同步→Bioyond] 🔧 准备覆盖locations字段,目标仓库: {parent_name}, 库位: {update_site}, UUID: {target_location_uuid[:8]}...")
|
||||||
@@ -558,18 +468,13 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
|
|||||||
return material_bioyond_id
|
return material_bioyond_id
|
||||||
|
|
||||||
# 转换为 Bioyond 格式
|
# 转换为 Bioyond 格式
|
||||||
from .config import MATERIAL_DEFAULT_PARAMETERS, MATERIAL_TYPE_PARAMETERS
|
from .config import MATERIAL_DEFAULT_PARAMETERS
|
||||||
|
|
||||||
# 合并参数配置:物料名称参数 + typeId参数(转换为 type:<uuid> 格式)
|
|
||||||
merged_params = MATERIAL_DEFAULT_PARAMETERS.copy()
|
|
||||||
for type_id, params in MATERIAL_TYPE_PARAMETERS.items():
|
|
||||||
merged_params[f"type:{type_id}"] = params
|
|
||||||
|
|
||||||
bioyond_material = resource_plr_to_bioyond(
|
bioyond_material = resource_plr_to_bioyond(
|
||||||
[resource],
|
[resource],
|
||||||
type_mapping=self.workstation.bioyond_config["material_type_mappings"],
|
type_mapping=self.workstation.bioyond_config["material_type_mappings"],
|
||||||
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
|
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
|
||||||
material_params=merged_params
|
material_params=MATERIAL_DEFAULT_PARAMETERS
|
||||||
)[0]
|
)[0]
|
||||||
|
|
||||||
# ⚠️ 关键:创建物料时不设置 locations,让 Bioyond 系统暂不分配库位
|
# ⚠️ 关键:创建物料时不设置 locations,让 Bioyond 系统暂不分配库位
|
||||||
@@ -679,44 +584,6 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
集成Bioyond物料管理的工作站实现
|
集成Bioyond物料管理的工作站实现
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _publish_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
task_type: str,
|
|
||||||
status: str,
|
|
||||||
result: dict = None,
|
|
||||||
progress: float = 0.0,
|
|
||||||
task_code: str = None
|
|
||||||
):
|
|
||||||
"""发布任务状态事件"""
|
|
||||||
try:
|
|
||||||
if not getattr(self, "_ros_node", None):
|
|
||||||
return
|
|
||||||
|
|
||||||
event_data = {
|
|
||||||
"task_id": task_id,
|
|
||||||
"task_code": task_code,
|
|
||||||
"task_type": task_type,
|
|
||||||
"status": status,
|
|
||||||
"progress": progress,
|
|
||||||
"timestamp": datetime.now().isoformat()
|
|
||||||
}
|
|
||||||
if result:
|
|
||||||
event_data["result"] = result
|
|
||||||
|
|
||||||
topic = f"{self._ros_node.namespace}/events/task_status"
|
|
||||||
|
|
||||||
if not hasattr(self, "_task_status_pub"):
|
|
||||||
self._task_status_pub = self._ros_node.create_publisher(
|
|
||||||
String, topic, 10
|
|
||||||
)
|
|
||||||
|
|
||||||
self._task_status_pub.publish(
|
|
||||||
convert_to_ros_msg(String, json.dumps(event_data, ensure_ascii=False))
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"发布任务状态事件失败: {e}")
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
bioyond_config: Optional[Dict[str, Any]] = None,
|
bioyond_config: Optional[Dict[str, Any]] = None,
|
||||||
@@ -765,16 +632,13 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
"host": bioyond_config.get("http_service_host", HTTP_SERVICE_CONFIG["http_service_host"]),
|
"host": bioyond_config.get("http_service_host", HTTP_SERVICE_CONFIG["http_service_host"]),
|
||||||
"port": bioyond_config.get("http_service_port", HTTP_SERVICE_CONFIG["http_service_port"])
|
"port": bioyond_config.get("http_service_port", HTTP_SERVICE_CONFIG["http_service_port"])
|
||||||
}
|
}
|
||||||
self.http_service = None # 将在 post_init 启动
|
self.http_service = None # 将在 post_init 中启动
|
||||||
self.connection_monitor = None # 将在 post_init 启动
|
|
||||||
|
|
||||||
logger.info(f"Bioyond工作站初始化完成")
|
logger.info(f"Bioyond工作站初始化完成")
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
"""析构函数:清理资源,停止 HTTP 服务"""
|
"""析构函数:清理资源,停止 HTTP 服务"""
|
||||||
try:
|
try:
|
||||||
if hasattr(self, 'connection_monitor') and self.connection_monitor:
|
|
||||||
self.connection_monitor.stop()
|
|
||||||
if hasattr(self, 'http_service') and self.http_service is not None:
|
if hasattr(self, 'http_service') and self.http_service is not None:
|
||||||
logger.info("正在停止 HTTP 报送服务...")
|
logger.info("正在停止 HTTP 报送服务...")
|
||||||
self.http_service.stop()
|
self.http_service.stop()
|
||||||
@@ -784,13 +648,6 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
def post_init(self, ros_node: ROS2WorkstationNode):
|
def post_init(self, ros_node: ROS2WorkstationNode):
|
||||||
self._ros_node = ros_node
|
self._ros_node = ros_node
|
||||||
|
|
||||||
# 启动连接监控
|
|
||||||
try:
|
|
||||||
self.connection_monitor = ConnectionMonitor(self)
|
|
||||||
self.connection_monitor.start()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"启动连接监控失败: {e}")
|
|
||||||
|
|
||||||
# 启动 HTTP 报送接收服务(现在 device_id 已可用)
|
# 启动 HTTP 报送接收服务(现在 device_id 已可用)
|
||||||
if hasattr(self, '_http_service_config'):
|
if hasattr(self, '_http_service_config'):
|
||||||
try:
|
try:
|
||||||
@@ -1157,15 +1014,7 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
|
|
||||||
workflow_id = self._get_workflow(actual_workflow_name)
|
workflow_id = self._get_workflow(actual_workflow_name)
|
||||||
if workflow_id:
|
if workflow_id:
|
||||||
# 兼容 BioyondReactionStation 中 workflow_sequence 被重写为 property 的情况
|
self.workflow_sequence.append(workflow_id)
|
||||||
if isinstance(self.workflow_sequence, list):
|
|
||||||
self.workflow_sequence.append(workflow_id)
|
|
||||||
elif hasattr(self, "_cached_workflow_sequence") and isinstance(self._cached_workflow_sequence, list):
|
|
||||||
self._cached_workflow_sequence.append(workflow_id)
|
|
||||||
else:
|
|
||||||
print(f"❌ 无法添加工作流: workflow_sequence 类型错误 {type(self.workflow_sequence)}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
print(f"添加工作流到执行顺序: {actual_workflow_name} -> {workflow_id}")
|
print(f"添加工作流到执行顺序: {actual_workflow_name} -> {workflow_id}")
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -1366,22 +1215,6 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
# TODO: 根据实际业务需求处理步骤完成逻辑
|
# TODO: 根据实际业务需求处理步骤完成逻辑
|
||||||
# 例如:更新数据库、触发后续流程等
|
# 例如:更新数据库、触发后续流程等
|
||||||
|
|
||||||
# 发布任务状态事件 (running/progress update)
|
|
||||||
self._publish_task_status(
|
|
||||||
task_id=data.get('orderCode'), # 使用 OrderCode 作为关联 ID
|
|
||||||
task_code=data.get('orderCode'),
|
|
||||||
task_type="bioyond_step",
|
|
||||||
status="running",
|
|
||||||
progress=0.5, # 步骤完成视为任务进行中
|
|
||||||
result={"step_name": data.get('stepName'), "step_id": data.get('stepId')}
|
|
||||||
)
|
|
||||||
|
|
||||||
# 更新物料信息
|
|
||||||
# 步骤完成后,物料状态可能发生变化(如位置、用量等),触发同步
|
|
||||||
logger.info(f"[步骤完成报送] 触发物料同步...")
|
|
||||||
self.resource_synchronizer.sync_from_external()
|
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"processed": True,
|
"processed": True,
|
||||||
"step_id": data.get('stepId'),
|
"step_id": data.get('stepId'),
|
||||||
@@ -1416,17 +1249,6 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
|
|
||||||
# TODO: 根据实际业务需求处理通量完成逻辑
|
# TODO: 根据实际业务需求处理通量完成逻辑
|
||||||
|
|
||||||
# 发布任务状态事件
|
|
||||||
self._publish_task_status(
|
|
||||||
task_id=data.get('orderCode'),
|
|
||||||
task_code=data.get('orderCode'),
|
|
||||||
task_type="bioyond_sample",
|
|
||||||
status="running",
|
|
||||||
progress=0.7,
|
|
||||||
result={"sample_id": data.get('sampleId'), "status": status_desc}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"processed": True,
|
"processed": True,
|
||||||
"sample_id": data.get('sampleId'),
|
"sample_id": data.get('sampleId'),
|
||||||
@@ -1466,32 +1288,6 @@ class BioyondWorkstation(WorkstationBase):
|
|||||||
# TODO: 根据实际业务需求处理任务完成逻辑
|
# TODO: 根据实际业务需求处理任务完成逻辑
|
||||||
# 例如:更新物料库存、生成报表等
|
# 例如:更新物料库存、生成报表等
|
||||||
|
|
||||||
# 映射状态到事件状态
|
|
||||||
event_status = "completed"
|
|
||||||
if str(data.get('status')) in ["-11", "-12"]:
|
|
||||||
event_status = "error"
|
|
||||||
elif str(data.get('status')) == "30":
|
|
||||||
event_status = "completed"
|
|
||||||
else:
|
|
||||||
event_status = "running" # 其他状态视为运行中(或根据实际定义)
|
|
||||||
|
|
||||||
# 发布任务状态事件
|
|
||||||
self._publish_task_status(
|
|
||||||
task_id=data.get('orderCode'),
|
|
||||||
task_code=data.get('orderCode'),
|
|
||||||
task_type="bioyond_order",
|
|
||||||
status=event_status,
|
|
||||||
progress=1.0 if event_status in ["completed", "error"] else 0.9,
|
|
||||||
result={"order_name": data.get('orderName'), "status": status_desc, "materials_count": len(used_materials)}
|
|
||||||
)
|
|
||||||
|
|
||||||
# 更新物料信息
|
|
||||||
# 任务完成后,且状态为完成时,触发同步以更新最终物料状态
|
|
||||||
if event_status == "completed":
|
|
||||||
logger.info(f"[任务完成报送] 触发物料同步...")
|
|
||||||
self.resource_synchronizer.sync_from_external()
|
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"processed": True,
|
"processed": True,
|
||||||
"order_code": data.get('orderCode'),
|
"order_code": data.get('orderCode'),
|
||||||
|
|||||||
@@ -459,12 +459,12 @@ class WorkstationHTTPHandler(BaseHTTPRequestHandler):
|
|||||||
# 验证必需字段
|
# 验证必需字段
|
||||||
if 'brand' in request_data:
|
if 'brand' in request_data:
|
||||||
if request_data['brand'] == "bioyond": # 奔曜
|
if request_data['brand'] == "bioyond": # 奔曜
|
||||||
material_data = request_data["text"]
|
error_msg = request_data["text"]
|
||||||
logger.info(f"收到奔曜物料变更报送: {material_data}")
|
logger.info(f"收到奔曜错误处理报送: {error_msg}")
|
||||||
return HttpResponse(
|
return HttpResponse(
|
||||||
success=True,
|
success=True,
|
||||||
message=f"物料变更报送已收到: {material_data}",
|
message=f"错误处理报送已收到: {error_msg}",
|
||||||
acknowledgment_id=f"MATERIAL_{int(time.time() * 1000)}_{material_data.get('id', 'unknown')}",
|
acknowledgment_id=f"ERROR_{int(time.time() * 1000)}_{error_msg.get('action_id', 'unknown')}",
|
||||||
data=None
|
data=None
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -5,6 +5,229 @@ bioyond_dispensing_station:
|
|||||||
- bioyond_dispensing_station
|
- bioyond_dispensing_station
|
||||||
class:
|
class:
|
||||||
action_value_mappings:
|
action_value_mappings:
|
||||||
|
auto-brief_step_parameters:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
data: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- data
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: brief_step_parameters参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-compute_experiment_design:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
m_tot: '70'
|
||||||
|
ratio: null
|
||||||
|
titration_percent: '0.03'
|
||||||
|
wt_percent: '0.25'
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
m_tot:
|
||||||
|
default: '70'
|
||||||
|
type: string
|
||||||
|
ratio:
|
||||||
|
type: object
|
||||||
|
titration_percent:
|
||||||
|
default: '0.03'
|
||||||
|
type: string
|
||||||
|
wt_percent:
|
||||||
|
default: '0.25'
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- ratio
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
properties:
|
||||||
|
feeding_order:
|
||||||
|
items: {}
|
||||||
|
title: Feeding Order
|
||||||
|
type: array
|
||||||
|
return_info:
|
||||||
|
title: Return Info
|
||||||
|
type: string
|
||||||
|
solutions:
|
||||||
|
items: {}
|
||||||
|
title: Solutions
|
||||||
|
type: array
|
||||||
|
solvents:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Solvents
|
||||||
|
type: object
|
||||||
|
titration:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Titration
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- solutions
|
||||||
|
- titration
|
||||||
|
- solvents
|
||||||
|
- feeding_order
|
||||||
|
- return_info
|
||||||
|
title: ComputeExperimentDesignReturn
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: compute_experiment_design参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-process_order_finish_report:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
report_request: null
|
||||||
|
used_materials: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
report_request:
|
||||||
|
type: string
|
||||||
|
used_materials:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- report_request
|
||||||
|
- used_materials
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: process_order_finish_report参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-project_order_report:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
order_id: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
order_id:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- order_id
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: project_order_report参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-query_resource_by_name:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
material_name: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
material_name:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- material_name
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: query_resource_by_name参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-transfer_materials_to_reaction_station:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
target_device_id: null
|
||||||
|
transfer_groups: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
target_device_id:
|
||||||
|
type: string
|
||||||
|
transfer_groups:
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- target_device_id
|
||||||
|
- transfer_groups
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: transfer_materials_to_reaction_station参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-workflow_sample_locations:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
workflow_id: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
workflow_id:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- workflow_id
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: workflow_sample_locations参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
batch_create_90_10_vial_feeding_tasks:
|
batch_create_90_10_vial_feeding_tasks:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -171,99 +394,6 @@ bioyond_dispensing_station:
|
|||||||
title: BatchCreateDiamineSolutionTasks
|
title: BatchCreateDiamineSolutionTasks
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
compute_experiment_design:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
m_tot: m_tot
|
|
||||||
ratio: ratio
|
|
||||||
titration_percent: titration_percent
|
|
||||||
wt_percent: wt_percent
|
|
||||||
goal_default:
|
|
||||||
m_tot: '70'
|
|
||||||
ratio: ''
|
|
||||||
titration_percent: '0.03'
|
|
||||||
wt_percent: '0.25'
|
|
||||||
handles:
|
|
||||||
output:
|
|
||||||
- data_key: solutions
|
|
||||||
data_source: executor
|
|
||||||
data_type: array
|
|
||||||
handler_key: solutions
|
|
||||||
io_type: sink
|
|
||||||
label: Solution Data From Python
|
|
||||||
- data_key: titration
|
|
||||||
data_source: executor
|
|
||||||
data_type: object
|
|
||||||
handler_key: titration
|
|
||||||
io_type: sink
|
|
||||||
label: Titration Data From Calculation Node
|
|
||||||
- data_key: solvents
|
|
||||||
data_source: executor
|
|
||||||
data_type: object
|
|
||||||
handler_key: solvents
|
|
||||||
io_type: sink
|
|
||||||
label: Solvents Data From Calculation Node
|
|
||||||
- data_key: feeding_order
|
|
||||||
data_source: executor
|
|
||||||
data_type: array
|
|
||||||
handler_key: feeding_order
|
|
||||||
io_type: sink
|
|
||||||
label: Feeding Order Data From Calculation Node
|
|
||||||
result:
|
|
||||||
feeding_order: feeding_order
|
|
||||||
return_info: return_info
|
|
||||||
solutions: solutions
|
|
||||||
solvents: solvents
|
|
||||||
titration: titration
|
|
||||||
schema:
|
|
||||||
description: 计算实验设计,输出solutions/titration/solvents/feeding_order用于后续节点。
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
m_tot:
|
|
||||||
default: '70'
|
|
||||||
description: 总质量(g)
|
|
||||||
type: string
|
|
||||||
ratio:
|
|
||||||
description: 组分摩尔比的对象,保持输入顺序,如{"MDA":1,"BTDA":1}
|
|
||||||
type: string
|
|
||||||
titration_percent:
|
|
||||||
default: '0.03'
|
|
||||||
description: 滴定比例(10%部分)
|
|
||||||
type: string
|
|
||||||
wt_percent:
|
|
||||||
default: '0.25'
|
|
||||||
description: 目标固含质量分数
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- ratio
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
properties:
|
|
||||||
feeding_order:
|
|
||||||
type: array
|
|
||||||
return_info:
|
|
||||||
type: string
|
|
||||||
solutions:
|
|
||||||
type: array
|
|
||||||
solvents:
|
|
||||||
type: object
|
|
||||||
titration:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- solutions
|
|
||||||
- titration
|
|
||||||
- solvents
|
|
||||||
- feeding_order
|
|
||||||
- return_info
|
|
||||||
title: ComputeExperimentDesign_Result
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: ComputeExperimentDesign
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
create_90_10_vial_feeding_task:
|
create_90_10_vial_feeding_task:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -490,89 +620,6 @@ bioyond_dispensing_station:
|
|||||||
title: DispenStationSolnPrep
|
title: DispenStationSolnPrep
|
||||||
type: object
|
type: object
|
||||||
type: DispenStationSolnPrep
|
type: DispenStationSolnPrep
|
||||||
scheduler_start:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default: {}
|
|
||||||
handles: {}
|
|
||||||
result:
|
|
||||||
return_info: return_info
|
|
||||||
schema:
|
|
||||||
description: 启动调度器 - 启动Bioyond配液站的任务调度器,开始执行队列中的任务
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties: {}
|
|
||||||
required: []
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
properties:
|
|
||||||
return_info:
|
|
||||||
description: 调度器启动结果,成功返回1,失败返回0
|
|
||||||
type: integer
|
|
||||||
required:
|
|
||||||
- return_info
|
|
||||||
title: scheduler_start结果
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: scheduler_start参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
transfer_materials_to_reaction_station:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
target_device_id: target_device_id
|
|
||||||
transfer_groups: transfer_groups
|
|
||||||
goal_default:
|
|
||||||
target_device_id: ''
|
|
||||||
transfer_groups: ''
|
|
||||||
handles: {}
|
|
||||||
placeholder_keys:
|
|
||||||
target_device_id: unilabos_devices
|
|
||||||
result: {}
|
|
||||||
schema:
|
|
||||||
description: 将配液站完成的物料(溶液、样品等)转移到指定反应站的堆栈库位。支持配置多组转移任务,每组包含物料名称、目标堆栈和目标库位。
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
target_device_id:
|
|
||||||
description: 目标反应站设备ID(从设备列表中选择,所有转移组都使用同一个目标设备)
|
|
||||||
type: string
|
|
||||||
transfer_groups:
|
|
||||||
description: 转移任务组列表,每组包含物料名称、目标堆栈和目标库位,可以添加多组
|
|
||||||
items:
|
|
||||||
properties:
|
|
||||||
materials:
|
|
||||||
description: 物料名称(手动输入,系统将通过RPC查询验证)
|
|
||||||
type: string
|
|
||||||
target_sites:
|
|
||||||
description: 目标库位(手动输入,如"A01")
|
|
||||||
type: string
|
|
||||||
target_stack:
|
|
||||||
description: 目标堆栈名称(从列表选择)
|
|
||||||
enum:
|
|
||||||
- 堆栈1左
|
|
||||||
- 堆栈1右
|
|
||||||
- 站内试剂存放堆栈
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- materials
|
|
||||||
- target_stack
|
|
||||||
- target_sites
|
|
||||||
type: object
|
|
||||||
type: array
|
|
||||||
required:
|
|
||||||
- target_device_id
|
|
||||||
- transfer_groups
|
|
||||||
type: object
|
|
||||||
result: {}
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: transfer_materials_to_reaction_station参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
wait_for_multiple_orders_and_get_reports:
|
wait_for_multiple_orders_and_get_reports:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
|
|||||||
@@ -9737,34 +9737,7 @@ liquid_handler.prcxi:
|
|||||||
touch_tip: false
|
touch_tip: false
|
||||||
use_channels:
|
use_channels:
|
||||||
- 0
|
- 0
|
||||||
handles:
|
handles: {}
|
||||||
input:
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: sources
|
|
||||||
label: sources
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: executor
|
|
||||||
data_type: resource
|
|
||||||
handler_key: targets
|
|
||||||
label: targets
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: executor
|
|
||||||
data_type: resource
|
|
||||||
handler_key: tip_rack
|
|
||||||
label: tip_rack
|
|
||||||
output:
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: sources_out
|
|
||||||
label: sources
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: executor
|
|
||||||
data_type: resource
|
|
||||||
handler_key: targets_out
|
|
||||||
label: targets
|
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
sources: unilabos_resources
|
sources: unilabos_resources
|
||||||
targets: unilabos_resources
|
targets: unilabos_resources
|
||||||
|
|||||||
@@ -4,88 +4,213 @@ reaction_station.bioyond:
|
|||||||
- reaction_station_bioyond
|
- reaction_station_bioyond
|
||||||
class:
|
class:
|
||||||
action_value_mappings:
|
action_value_mappings:
|
||||||
add_time_constraint:
|
auto-create_order:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal: {}
|
||||||
duration: duration
|
|
||||||
end_point: end_point
|
|
||||||
end_step_key: end_step_key
|
|
||||||
start_point: start_point
|
|
||||||
start_step_key: start_step_key
|
|
||||||
goal_default:
|
goal_default:
|
||||||
duration: 0
|
json_str: null
|
||||||
end_point: 0
|
|
||||||
end_step_key: ''
|
|
||||||
start_point: 0
|
|
||||||
start_step_key: ''
|
|
||||||
handles: {}
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
result: {}
|
result: {}
|
||||||
schema:
|
schema:
|
||||||
description: 添加时间约束 - 在两个工作流之间添加时间约束
|
description: ''
|
||||||
properties:
|
properties:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
properties:
|
properties:
|
||||||
duration:
|
json_str:
|
||||||
description: 时间(秒)
|
|
||||||
type: integer
|
|
||||||
end_point:
|
|
||||||
default: Start
|
|
||||||
description: 终点计时点 (Start=开始前, End=结束后)
|
|
||||||
enum:
|
|
||||||
- Start
|
|
||||||
- End
|
|
||||||
type: string
|
|
||||||
end_step_key:
|
|
||||||
description: 终点步骤Key (可选, 默认为空则自动选择)
|
|
||||||
type: string
|
|
||||||
start_point:
|
|
||||||
default: Start
|
|
||||||
description: 起点计时点 (Start=开始前, End=结束后)
|
|
||||||
enum:
|
|
||||||
- Start
|
|
||||||
- End
|
|
||||||
type: string
|
|
||||||
start_step_key:
|
|
||||||
description: 起点步骤Key (例如 "feeding", "liquid", 可选, 默认为空则自动选择)
|
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- duration
|
- json_str
|
||||||
type: object
|
type: object
|
||||||
result: {}
|
result: {}
|
||||||
required:
|
required:
|
||||||
- goal
|
- goal
|
||||||
title: add_time_constraint参数
|
title: create_order参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
clean_all_server_workflows:
|
auto-hard_delete_merged_workflows:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
goal_default: {}
|
goal_default:
|
||||||
|
workflow_ids: null
|
||||||
handles: {}
|
handles: {}
|
||||||
result:
|
placeholder_keys: {}
|
||||||
code: code
|
result: {}
|
||||||
message: message
|
|
||||||
schema:
|
schema:
|
||||||
description: 清空服务端所有非核心工作流 (保留核心流程)
|
description: ''
|
||||||
properties:
|
properties:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
properties: {}
|
|
||||||
required: []
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
properties:
|
properties:
|
||||||
code:
|
workflow_ids:
|
||||||
description: 操作结果代码(1表示成功)
|
items:
|
||||||
type: integer
|
type: string
|
||||||
message:
|
type: array
|
||||||
description: 结果描述
|
required:
|
||||||
type: string
|
- workflow_ids
|
||||||
type: object
|
type: object
|
||||||
|
result: {}
|
||||||
required:
|
required:
|
||||||
- goal
|
- goal
|
||||||
title: clean_all_server_workflows参数
|
title: hard_delete_merged_workflows参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-merge_workflow_with_parameters:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
json_str: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
json_str:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- json_str
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: merge_workflow_with_parameters参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-process_temperature_cutoff_report:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
report_request: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
report_request:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- report_request
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: process_temperature_cutoff_report参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-process_web_workflows:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
web_workflow_json: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
web_workflow_json:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- web_workflow_json
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: process_web_workflows参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-skip_titration_steps:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
preintake_id: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
preintake_id:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- preintake_id
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: skip_titration_steps参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-wait_for_multiple_orders_and_get_reports:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
batch_create_result: null
|
||||||
|
check_interval: 10
|
||||||
|
timeout: 7200
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
batch_create_result:
|
||||||
|
type: string
|
||||||
|
check_interval:
|
||||||
|
default: 10
|
||||||
|
type: integer
|
||||||
|
timeout:
|
||||||
|
default: 7200
|
||||||
|
type: integer
|
||||||
|
required: []
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: wait_for_multiple_orders_and_get_reports参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-workflow_step_query:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
workflow_id: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
workflow_id:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- workflow_id
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: workflow_step_query参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
drip_back:
|
drip_back:
|
||||||
@@ -122,19 +247,13 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟)
|
description: 观察时间(分钟)
|
||||||
type: string
|
type: string
|
||||||
titration_type:
|
titration_type:
|
||||||
description: 是否滴定(NO=否, YES=是)
|
description: 是否滴定(1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
description: 是否观察 (NO=否, YES=是)
|
description: 是否观察 (1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
volume:
|
volume:
|
||||||
description: 分液公式(mL)
|
description: 分液公式(μL)
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- volume
|
- volume
|
||||||
@@ -234,19 +353,13 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟)
|
description: 观察时间(分钟)
|
||||||
type: string
|
type: string
|
||||||
titration_type:
|
titration_type:
|
||||||
description: 是否滴定(NO=否, YES=是)
|
description: 是否滴定(1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
description: 是否观察 (NO=否, YES=是)
|
description: 是否观察 (1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
volume:
|
volume:
|
||||||
description: 分液公式(mL)
|
description: 分液公式(μL)
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- volume
|
- volume
|
||||||
@@ -290,7 +403,7 @@ reaction_station.bioyond:
|
|||||||
label: Solvents Data From Calculation Node
|
label: Solvents Data From Calculation Node
|
||||||
result: {}
|
result: {}
|
||||||
schema:
|
schema:
|
||||||
description: 液体投料-溶剂。可以直接提供volume(mL),或通过solvents对象自动从additional_solvent(mL)计算volume。
|
description: 液体投料-溶剂。可以直接提供volume(μL),或通过solvents对象自动从additional_solvent(mL)计算volume。
|
||||||
properties:
|
properties:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -310,21 +423,15 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟),默认360
|
description: 观察时间(分钟),默认360
|
||||||
type: string
|
type: string
|
||||||
titration_type:
|
titration_type:
|
||||||
default: 'NO'
|
default: '1'
|
||||||
description: 是否滴定(NO=否, YES=是),默认NO
|
description: 是否滴定(1=否, 2=是),默认1
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
default: 'YES'
|
default: '2'
|
||||||
description: 是否观察 (NO=否, YES=是),默认YES
|
description: 是否观察 (1=否, 2=是),默认2
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
volume:
|
volume:
|
||||||
description: 分液量(mL)。可直接提供,或通过solvents参数自动计算
|
description: 分液量(μL)。可直接提供,或通过solvents参数自动计算
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- assign_material_name
|
- assign_material_name
|
||||||
@@ -397,21 +504,15 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟),默认90
|
description: 观察时间(分钟),默认90
|
||||||
type: string
|
type: string
|
||||||
titration_type:
|
titration_type:
|
||||||
default: 'YES'
|
default: '2'
|
||||||
description: 是否滴定(NO=否, YES=是),默认YES
|
description: 是否滴定(1=否, 2=是),默认2
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
default: 'YES'
|
default: '2'
|
||||||
description: 是否观察 (NO=否, YES=是),默认YES
|
description: 是否观察 (1=否, 2=是),默认2
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
volume_formula:
|
volume_formula:
|
||||||
description: 分液公式(mL)。可直接提供固定公式,或留空由系统根据x_value、feeding_order_data、extracted_actuals自动生成
|
description: 分液公式(μL)。可直接提供固定公式,或留空由系统根据x_value、feeding_order_data、extracted_actuals自动生成
|
||||||
type: string
|
type: string
|
||||||
x_value:
|
x_value:
|
||||||
description: 公式中的x值,手工输入,格式为"{{1-2-3}}"(包含双花括号)。用于自动公式计算
|
description: 公式中的x值,手工输入,格式为"{{1-2-3}}"(包含双花括号)。用于自动公式计算
|
||||||
@@ -459,19 +560,13 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟)
|
description: 观察时间(分钟)
|
||||||
type: string
|
type: string
|
||||||
titration_type:
|
titration_type:
|
||||||
description: 是否滴定(NO=否, YES=是)
|
description: 是否滴定(1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
description: 是否观察 (NO=否, YES=是)
|
description: 是否观察 (1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
volume_formula:
|
volume_formula:
|
||||||
description: 分液公式(mL)
|
description: 分液公式(μL)
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- volume_formula
|
- volume_formula
|
||||||
@@ -585,35 +680,6 @@ reaction_station.bioyond:
|
|||||||
title: reactor_taken_out参数
|
title: reactor_taken_out参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
scheduler_start:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default: {}
|
|
||||||
handles: {}
|
|
||||||
result:
|
|
||||||
return_info: return_info
|
|
||||||
schema:
|
|
||||||
description: 启动调度器 - 启动Bioyond工作站的任务调度器,开始执行队列中的任务
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties: {}
|
|
||||||
required: []
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
properties:
|
|
||||||
return_info:
|
|
||||||
description: 调度器启动结果,成功返回1,失败返回0
|
|
||||||
type: integer
|
|
||||||
required:
|
|
||||||
- return_info
|
|
||||||
title: scheduler_start结果
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: scheduler_start参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
solid_feeding_vials:
|
solid_feeding_vials:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -640,11 +706,7 @@ reaction_station.bioyond:
|
|||||||
description: 物料名称(用于获取试剂瓶位ID)
|
description: 物料名称(用于获取试剂瓶位ID)
|
||||||
type: string
|
type: string
|
||||||
material_id:
|
material_id:
|
||||||
description: 粉末类型ID,Salt=盐(21分钟),Flour=面粉(27分钟),BTDA=BTDA(38分钟)
|
description: 粉末类型ID,1=盐(21分钟),2=面粉(27分钟),3=BTDA(38分钟)
|
||||||
enum:
|
|
||||||
- Salt
|
|
||||||
- Flour
|
|
||||||
- BTDA
|
|
||||||
type: string
|
type: string
|
||||||
temperature:
|
temperature:
|
||||||
description: 温度设定(°C)
|
description: 温度设定(°C)
|
||||||
@@ -653,10 +715,7 @@ reaction_station.bioyond:
|
|||||||
description: 观察时间(分钟)
|
description: 观察时间(分钟)
|
||||||
type: string
|
type: string
|
||||||
torque_variation:
|
torque_variation:
|
||||||
description: 是否观察 (NO=否, YES=是)
|
description: 是否观察 (1=否, 2=是)
|
||||||
enum:
|
|
||||||
- 'NO'
|
|
||||||
- 'YES'
|
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- assign_material_name
|
- assign_material_name
|
||||||
@@ -674,16 +733,6 @@ reaction_station.bioyond:
|
|||||||
module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactionStation
|
module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactionStation
|
||||||
protocol_type: []
|
protocol_type: []
|
||||||
status_types:
|
status_types:
|
||||||
average_viscosity: Float64
|
|
||||||
force: Float64
|
|
||||||
in_temperature: Float64
|
|
||||||
out_temperature: Float64
|
|
||||||
pt100_temperature: Float64
|
|
||||||
sensor_average_temperature: Float64
|
|
||||||
setting_temperature: Float64
|
|
||||||
speed: Float64
|
|
||||||
target_temperature: Float64
|
|
||||||
viscosity: Float64
|
|
||||||
workflow_sequence: String
|
workflow_sequence: String
|
||||||
type: python
|
type: python
|
||||||
config_info: []
|
config_info: []
|
||||||
@@ -716,19 +765,34 @@ reaction_station.reactor:
|
|||||||
- reactor
|
- reactor
|
||||||
- reaction_station_bioyond
|
- reaction_station_bioyond
|
||||||
class:
|
class:
|
||||||
action_value_mappings: {}
|
action_value_mappings:
|
||||||
|
auto-update_metrics:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
payload: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
payload:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- payload
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: update_metrics参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactor
|
module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactor
|
||||||
status_types:
|
status_types: {}
|
||||||
average_viscosity: Float64
|
|
||||||
force: Float64
|
|
||||||
in_temperature: Float64
|
|
||||||
out_temperature: Float64
|
|
||||||
pt100_temperature: Float64
|
|
||||||
sensor_average_temperature: Float64
|
|
||||||
setting_temperature: Float64
|
|
||||||
speed: Float64
|
|
||||||
target_temperature: Float64
|
|
||||||
viscosity: Float64
|
|
||||||
type: python
|
type: python
|
||||||
config_info: []
|
config_info: []
|
||||||
description: 反应站子设备-反应器
|
description: 反应站子设备-反应器
|
||||||
|
|||||||
@@ -222,7 +222,7 @@ class Registry:
|
|||||||
abs_path = Path(path).absolute()
|
abs_path = Path(path).absolute()
|
||||||
resource_path = abs_path / "resources"
|
resource_path = abs_path / "resources"
|
||||||
files = list(resource_path.glob("*/*.yaml"))
|
files = list(resource_path.glob("*/*.yaml"))
|
||||||
logger.trace(f"[UniLab Registry] load resources? {resource_path.exists()}, total: {len(files)}")
|
logger.debug(f"[UniLab Registry] resources: {resource_path.exists()}, total: {len(files)}")
|
||||||
current_resource_number = len(self.resource_type_registry) + 1
|
current_resource_number = len(self.resource_type_registry) + 1
|
||||||
for i, file in enumerate(files):
|
for i, file in enumerate(files):
|
||||||
with open(file, encoding="utf-8", mode="r") as f:
|
with open(file, encoding="utf-8", mode="r") as f:
|
||||||
|
|||||||
@@ -20,17 +20,6 @@ BIOYOND_PolymerStation_Liquid_Vial:
|
|||||||
icon: ''
|
icon: ''
|
||||||
init_param_schema: {}
|
init_param_schema: {}
|
||||||
version: 1.0.0
|
version: 1.0.0
|
||||||
BIOYOND_PolymerStation_Measurement_Vial:
|
|
||||||
category:
|
|
||||||
- bottles
|
|
||||||
class:
|
|
||||||
module: unilabos.resources.bioyond.bottles:BIOYOND_PolymerStation_Measurement_Vial
|
|
||||||
type: pylabrobot
|
|
||||||
description: 聚合站-测量小瓶(测密度)
|
|
||||||
handles: []
|
|
||||||
icon: ''
|
|
||||||
init_param_schema: {}
|
|
||||||
version: 1.0.0
|
|
||||||
BIOYOND_PolymerStation_Reactor:
|
BIOYOND_PolymerStation_Reactor:
|
||||||
category:
|
category:
|
||||||
- bottles
|
- bottles
|
||||||
|
|||||||
@@ -193,20 +193,3 @@ def BIOYOND_PolymerStation_Flask(
|
|||||||
barcode=barcode,
|
barcode=barcode,
|
||||||
model="BIOYOND_PolymerStation_Flask",
|
model="BIOYOND_PolymerStation_Flask",
|
||||||
)
|
)
|
||||||
|
|
||||||
def BIOYOND_PolymerStation_Measurement_Vial(
|
|
||||||
name: str,
|
|
||||||
diameter: float = 25.0,
|
|
||||||
height: float = 60.0,
|
|
||||||
max_volume: float = 20000.0, # 20mL
|
|
||||||
barcode: str = None,
|
|
||||||
) -> Bottle:
|
|
||||||
"""创建测量小瓶"""
|
|
||||||
return Bottle(
|
|
||||||
name=name,
|
|
||||||
diameter=diameter,
|
|
||||||
height=height,
|
|
||||||
max_volume=max_volume,
|
|
||||||
barcode=barcode,
|
|
||||||
model="BIOYOND_PolymerStation_Measurement_Vial",
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -49,17 +49,20 @@ class BIOYOND_PolymerReactionStation_Deck(Deck):
|
|||||||
"测量小瓶仓库(测密度)": bioyond_warehouse_density_vial("测量小瓶仓库(测密度)"), # A01~B03
|
"测量小瓶仓库(测密度)": bioyond_warehouse_density_vial("测量小瓶仓库(测密度)"), # A01~B03
|
||||||
}
|
}
|
||||||
self.warehouse_locations = {
|
self.warehouse_locations = {
|
||||||
"堆栈1左": Coordinate(-200.0, 450.0, 0.0), # 左侧位置
|
"堆栈1左": Coordinate(0.0, 430.0, 0.0), # 左侧位置
|
||||||
"堆栈1右": Coordinate(2350.0, 450.0, 0.0), # 右侧位置
|
"堆栈1右": Coordinate(2500.0, 430.0, 0.0), # 右侧位置
|
||||||
"站内试剂存放堆栈": Coordinate(730.0, 390.0, 0.0),
|
"站内试剂存放堆栈": Coordinate(640.0, 480.0, 0.0),
|
||||||
# "移液站内10%分装液体准备仓库": Coordinate(1200.0, 600.0, 0.0),
|
# "移液站内10%分装液体准备仓库": Coordinate(1200.0, 600.0, 0.0),
|
||||||
"站内Tip盒堆栈": Coordinate(300.0, 150.0, 0.0),
|
"站内Tip盒堆栈": Coordinate(300.0, 150.0, 0.0),
|
||||||
"测量小瓶仓库(测密度)": Coordinate(940.0, 530.0, 0.0),
|
"测量小瓶仓库(测密度)": Coordinate(922.0, 552.0, 0.0),
|
||||||
}
|
}
|
||||||
|
self.warehouses["站内试剂存放堆栈"].rotation = Rotation(z=90)
|
||||||
|
self.warehouses["测量小瓶仓库(测密度)"].rotation = Rotation(z=270)
|
||||||
|
|
||||||
for warehouse_name, warehouse in self.warehouses.items():
|
for warehouse_name, warehouse in self.warehouses.items():
|
||||||
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
|
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
|
||||||
|
|
||||||
|
|
||||||
class BIOYOND_PolymerPreparationStation_Deck(Deck):
|
class BIOYOND_PolymerPreparationStation_Deck(Deck):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -141,7 +144,6 @@ class BIOYOND_YB_Deck(Deck):
|
|||||||
|
|
||||||
for warehouse_name, warehouse in self.warehouses.items():
|
for warehouse_name, warehouse in self.warehouses.items():
|
||||||
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
|
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
|
||||||
|
|
||||||
def YB_Deck(name: str) -> Deck:
|
def YB_Deck(name: str) -> Deck:
|
||||||
by=BIOYOND_YB_Deck(name=name)
|
by=BIOYOND_YB_Deck(name=name)
|
||||||
by.setup()
|
by.setup()
|
||||||
|
|||||||
@@ -46,55 +46,41 @@ def bioyond_warehouse_1x4x4_right(name: str) -> WareHouse:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def bioyond_warehouse_density_vial(name: str) -> WareHouse:
|
def bioyond_warehouse_density_vial(name: str) -> WareHouse:
|
||||||
"""创建测量小瓶仓库(测密度) - 竖向排列2列3行
|
"""创建测量小瓶仓库(测密度) A01~B03"""
|
||||||
布局(从下到上,从左到右):
|
|
||||||
| A03 | B03 | ← 顶部
|
|
||||||
| A02 | B02 | ← 中部
|
|
||||||
| A01 | B01 | ← 底部
|
|
||||||
"""
|
|
||||||
return warehouse_factory(
|
return warehouse_factory(
|
||||||
name=name,
|
name=name,
|
||||||
num_items_x=2, # 2列(A, B)
|
num_items_x=3, # 3列(01-03)
|
||||||
num_items_y=3, # 3行(01-03,从下到上)
|
num_items_y=2, # 2行(A-B)
|
||||||
num_items_z=1, # 1层
|
num_items_z=1, # 1层
|
||||||
dx=10.0,
|
dx=10.0,
|
||||||
dy=10.0,
|
dy=10.0,
|
||||||
dz=10.0,
|
dz=10.0,
|
||||||
item_dx=40.0, # 列间距(A到B的横向距离)
|
item_dx=40.0,
|
||||||
item_dy=40.0, # 行间距(01到02到03的竖向距离)
|
item_dy=40.0,
|
||||||
item_dz=50.0,
|
item_dz=50.0,
|
||||||
# ⭐ 竖向warehouse:槽位尺寸也是竖向的(小瓶已经是正方形,无需调整)
|
# 用更小的 resource_size 来表现 "小点的孔位"
|
||||||
resource_size_x=30.0,
|
resource_size_x=30.0,
|
||||||
resource_size_y=30.0,
|
resource_size_y=30.0,
|
||||||
resource_size_z=12.0,
|
resource_size_z=12.0,
|
||||||
category="warehouse",
|
category="warehouse",
|
||||||
col_offset=0,
|
col_offset=0,
|
||||||
layout="vertical-col-major", # ⭐ 竖向warehouse专用布局
|
layout="row-major",
|
||||||
)
|
)
|
||||||
|
|
||||||
def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
|
def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
|
||||||
"""创建BioYond站内试剂存放堆栈 - 竖向排列1列2行
|
"""创建BioYond站内试剂存放堆栈(A01~A02, 1行×2列)"""
|
||||||
布局(竖向,从下到上):
|
|
||||||
| A02 | ← 顶部
|
|
||||||
| A01 | ← 底部
|
|
||||||
"""
|
|
||||||
return warehouse_factory(
|
return warehouse_factory(
|
||||||
name=name,
|
name=name,
|
||||||
num_items_x=1, # 1列
|
num_items_x=2, # 2列(01-02)
|
||||||
num_items_y=2, # 2行(01-02,从下到上)
|
num_items_y=1, # 1行(A)
|
||||||
num_items_z=1, # 1层
|
num_items_z=1, # 1层
|
||||||
dx=10.0,
|
dx=10.0,
|
||||||
dy=10.0,
|
dy=10.0,
|
||||||
dz=10.0,
|
dz=10.0,
|
||||||
item_dx=96.0, # 列间距(这里只有1列,不重要)
|
item_dx=137.0,
|
||||||
item_dy=137.0, # 行间距(A01到A02的竖向距离)
|
item_dy=96.0,
|
||||||
item_dz=120.0,
|
item_dz=120.0,
|
||||||
# ⭐ 竖向warehouse:交换槽位尺寸,使槽位框也是竖向的
|
|
||||||
resource_size_x=86.0, # 原来的 resource_size_y
|
|
||||||
resource_size_y=127.0, # 原来的 resource_size_x
|
|
||||||
resource_size_z=25.0,
|
|
||||||
category="warehouse",
|
category="warehouse",
|
||||||
layout="vertical-col-major", # ⭐ 竖向warehouse专用布局
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def bioyond_warehouse_tipbox_storage(name: str) -> WareHouse:
|
def bioyond_warehouse_tipbox_storage(name: str) -> WareHouse:
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from unilabos.config.config import BasicConfig
|
|||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
||||||
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
)
|
)
|
||||||
@@ -42,7 +42,7 @@ def canonicalize_nodes_data(
|
|||||||
Returns:
|
Returns:
|
||||||
ResourceTreeSet: 标准化后的资源树集合
|
ResourceTreeSet: 标准化后的资源树集合
|
||||||
"""
|
"""
|
||||||
print_status(f"{len(nodes)} Resources loaded", "info")
|
print_status(f"{len(nodes)} Resources loaded:", "info")
|
||||||
|
|
||||||
# 第一步:基本预处理(处理graphml的label字段)
|
# 第一步:基本预处理(处理graphml的label字段)
|
||||||
outer_host_node_id = None
|
outer_host_node_id = None
|
||||||
@@ -779,22 +779,6 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
|
|||||||
if not locations:
|
if not locations:
|
||||||
logger.debug(f"[物料位置] {unique_name} 没有location信息,跳过warehouse放置")
|
logger.debug(f"[物料位置] {unique_name} 没有location信息,跳过warehouse放置")
|
||||||
|
|
||||||
# ⭐ 预先检查:如果物料的任何location在竖向warehouse中,提前交换尺寸
|
|
||||||
# 这样可以避免多个location时尺寸不一致的问题
|
|
||||||
needs_size_swap = False
|
|
||||||
for loc in locations:
|
|
||||||
wh_name_check = loc.get("whName")
|
|
||||||
if wh_name_check in ["站内试剂存放堆栈", "测量小瓶仓库(测密度)"]:
|
|
||||||
needs_size_swap = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if needs_size_swap and hasattr(plr_material, 'size_x') and hasattr(plr_material, 'size_y'):
|
|
||||||
original_x = plr_material.size_x
|
|
||||||
original_y = plr_material.size_y
|
|
||||||
plr_material.size_x = original_y
|
|
||||||
plr_material.size_y = original_x
|
|
||||||
logger.debug(f" 物料 {unique_name} 将放入竖向warehouse,预先交换尺寸: {original_x}×{original_y} → {plr_material.size_x}×{plr_material.size_y}")
|
|
||||||
|
|
||||||
for loc in locations:
|
for loc in locations:
|
||||||
wh_name = loc.get("whName")
|
wh_name = loc.get("whName")
|
||||||
logger.debug(f"[物料位置] {unique_name} 尝试放置到 warehouse: {wh_name} (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')}, z={loc.get('z')})")
|
logger.debug(f"[物料位置] {unique_name} 尝试放置到 warehouse: {wh_name} (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')}, z={loc.get('z')})")
|
||||||
@@ -816,6 +800,7 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
|
|||||||
logger.debug(f"[Warehouse匹配] 找到warehouse: {wh_name} (容量: {warehouse.capacity}, 行×列: {warehouse.num_items_x}×{warehouse.num_items_y})")
|
logger.debug(f"[Warehouse匹配] 找到warehouse: {wh_name} (容量: {warehouse.capacity}, 行×列: {warehouse.num_items_x}×{warehouse.num_items_y})")
|
||||||
|
|
||||||
# Bioyond坐标映射 (重要!): x→行(1=A,2=B...), y→列(1=01,2=02...), z→层(通常=1)
|
# Bioyond坐标映射 (重要!): x→行(1=A,2=B...), y→列(1=01,2=02...), z→层(通常=1)
|
||||||
|
# PyLabRobot warehouse是列优先存储: A01,B01,C01,D01, A02,B02,C02,D02, ...
|
||||||
x = loc.get("x", 1) # 行号 (1-based: 1=A, 2=B, 3=C, 4=D)
|
x = loc.get("x", 1) # 行号 (1-based: 1=A, 2=B, 3=C, 4=D)
|
||||||
y = loc.get("y", 1) # 列号 (1-based: 1=01, 2=02, 3=03...)
|
y = loc.get("y", 1) # 列号 (1-based: 1=01, 2=02, 3=03...)
|
||||||
z = loc.get("z", 1) # 层号 (1-based, 通常为1)
|
z = loc.get("z", 1) # 层号 (1-based, 通常为1)
|
||||||
@@ -824,23 +809,12 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
|
|||||||
if wh_name == "堆栈1右":
|
if wh_name == "堆栈1右":
|
||||||
y = y - 4 # 将5-8映射到1-4
|
y = y - 4 # 将5-8映射到1-4
|
||||||
|
|
||||||
# 特殊处理竖向warehouse(站内试剂存放堆栈、测量小瓶仓库)
|
# 特殊处理:对于1行×N列的横向warehouse(如站内试剂存放堆栈)
|
||||||
# 这些warehouse使用 vertical-col-major 布局
|
# Bioyond的y坐标表示线性位置序号,而不是列号
|
||||||
if wh_name in ["站内试剂存放堆栈", "测量小瓶仓库(测密度)"]:
|
if warehouse.num_items_y == 1:
|
||||||
# vertical-col-major 布局的坐标映射:
|
# 1行warehouse: 直接用y作为线性索引
|
||||||
# - Bioyond的x(1=A,2=B)对应warehouse的列(col, x方向)
|
idx = y - 1
|
||||||
# - Bioyond的y(1=01,2=02,3=03)对应warehouse的行(row, y方向),从下到上
|
logger.debug(f"1行warehouse {wh_name}: y={y} → idx={idx}")
|
||||||
# vertical-col-major 中: row=0 对应底部,row=n-1 对应顶部
|
|
||||||
# Bioyond y=1(01) 对应底部 → row=0, y=2(02) 对应中间 → row=1
|
|
||||||
# 索引计算: idx = row * num_cols + col
|
|
||||||
col_idx = x - 1 # Bioyond的x(A,B) → col索引(0,1)
|
|
||||||
row_idx = y - 1 # Bioyond的y(01,02,03) → row索引(0,1,2)
|
|
||||||
layer_idx = z - 1
|
|
||||||
|
|
||||||
idx = layer_idx * (warehouse.num_items_x * warehouse.num_items_y) + row_idx * warehouse.num_items_x + col_idx
|
|
||||||
logger.debug(f"🔍 竖向warehouse {wh_name}: Bioyond(x={x},y={y},z={z}) → warehouse(col={col_idx},row={row_idx},layer={layer_idx}) → idx={idx}, capacity={warehouse.capacity}")
|
|
||||||
|
|
||||||
# 普通横向warehouse的处理
|
|
||||||
else:
|
else:
|
||||||
# 多行warehouse: 根据 layout 使用不同的索引计算
|
# 多行warehouse: 根据 layout 使用不同的索引计算
|
||||||
row_idx = x - 1 # x表示行: 转为0-based
|
row_idx = x - 1 # x表示行: 转为0-based
|
||||||
@@ -864,7 +838,6 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
|
|||||||
|
|
||||||
if 0 <= idx < warehouse.capacity:
|
if 0 <= idx < warehouse.capacity:
|
||||||
if warehouse[idx] is None or isinstance(warehouse[idx], ResourceHolder):
|
if warehouse[idx] is None or isinstance(warehouse[idx], ResourceHolder):
|
||||||
# 物料尺寸已在放入warehouse前根据需要进行了交换
|
|
||||||
warehouse[idx] = plr_material
|
warehouse[idx] = plr_material
|
||||||
logger.debug(f"✅ 物料 {unique_name} 放置到 {wh_name}[{idx}] (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')})")
|
logger.debug(f"✅ 物料 {unique_name} 放置到 {wh_name}[{idx}] (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')})")
|
||||||
else:
|
else:
|
||||||
@@ -1038,24 +1011,11 @@ def resource_plr_to_bioyond(plr_resources: list[ResourcePLR], type_mapping: dict
|
|||||||
logger.debug(f" 📭 [单瓶物料] {resource.name} 无液体,使用资源名: {material_name}")
|
logger.debug(f" 📭 [单瓶物料] {resource.name} 无液体,使用资源名: {material_name}")
|
||||||
|
|
||||||
# 🎯 处理物料默认参数和单位
|
# 🎯 处理物料默认参数和单位
|
||||||
# 优先级: typeId参数 > 物料名称参数 > 默认值
|
# 检查是否有该物料名称的默认参数配置
|
||||||
default_unit = "个" # 默认单位
|
default_unit = "个" # 默认单位
|
||||||
material_parameters = {}
|
material_parameters = {}
|
||||||
|
|
||||||
# 1️⃣ 首先检查是否有 typeId 对应的参数配置(从 material_params 中获取,key 格式为 "type:<typeId>")
|
if material_name in material_params:
|
||||||
type_params_key = f"type:{type_id}"
|
|
||||||
if type_params_key in material_params:
|
|
||||||
params_config = material_params[type_params_key].copy()
|
|
||||||
|
|
||||||
# 提取 unit 字段(如果有)
|
|
||||||
if "unit" in params_config:
|
|
||||||
default_unit = params_config.pop("unit") # 从参数中移除,放到外层
|
|
||||||
|
|
||||||
# 剩余的字段放入 Parameters
|
|
||||||
material_parameters = params_config
|
|
||||||
logger.debug(f" 🔧 [物料参数-按typeId] 为 typeId={type_id[:8]}... 应用配置: unit={default_unit}, parameters={material_parameters}")
|
|
||||||
# 2️⃣ 其次检查是否有该物料名称的默认参数配置
|
|
||||||
elif material_name in material_params:
|
|
||||||
params_config = material_params[material_name].copy()
|
params_config = material_params[material_name].copy()
|
||||||
|
|
||||||
# 提取 unit 字段(如果有)
|
# 提取 unit 字段(如果有)
|
||||||
@@ -1064,7 +1024,7 @@ def resource_plr_to_bioyond(plr_resources: list[ResourcePLR], type_mapping: dict
|
|||||||
|
|
||||||
# 剩余的字段放入 Parameters
|
# 剩余的字段放入 Parameters
|
||||||
material_parameters = params_config
|
material_parameters = params_config
|
||||||
logger.debug(f" 🔧 [物料参数-按名称] 为 {material_name} 应用配置: unit={default_unit}, parameters={material_parameters}")
|
logger.debug(f" 🔧 [物料参数] 为 {material_name} 应用配置: unit={default_unit}, parameters={material_parameters}")
|
||||||
|
|
||||||
# 转换为 JSON 字符串
|
# 转换为 JSON 字符串
|
||||||
parameters_json = json.dumps(material_parameters) if material_parameters else "{}"
|
parameters_json = json.dumps(material_parameters) if material_parameters else "{}"
|
||||||
|
|||||||
@@ -50,46 +50,13 @@ class Bottle(Well):
|
|||||||
self.barcode = barcode
|
self.barcode = barcode
|
||||||
|
|
||||||
def serialize(self) -> dict:
|
def serialize(self) -> dict:
|
||||||
# Pylabrobot expects barcode to be an object with serialize(), but here it is a str.
|
|
||||||
# We temporarily unset it to avoid AttributeError in super().serialize().
|
|
||||||
_barcode = self.barcode
|
|
||||||
self.barcode = None
|
|
||||||
try:
|
|
||||||
data = super().serialize()
|
|
||||||
finally:
|
|
||||||
self.barcode = _barcode
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**data,
|
**super().serialize(),
|
||||||
"diameter": self.diameter,
|
"diameter": self.diameter,
|
||||||
"height": self.height,
|
"height": self.height,
|
||||||
"barcode": self.barcode,
|
"barcode": self.barcode,
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deserialize(cls, data: dict, allow_marshal: bool = False):
|
|
||||||
# Extract barcode before calling parent deserialize to avoid type error
|
|
||||||
barcode_data = data.pop("barcode", None)
|
|
||||||
|
|
||||||
# Call parent deserialize
|
|
||||||
instance = super(Bottle, cls).deserialize(data, allow_marshal=allow_marshal)
|
|
||||||
|
|
||||||
# Set barcode as string (not as Barcode object)
|
|
||||||
if barcode_data:
|
|
||||||
if isinstance(barcode_data, str):
|
|
||||||
instance.barcode = barcode_data
|
|
||||||
elif isinstance(barcode_data, dict):
|
|
||||||
# If it's a dict (Barcode serialized format), extract the data field
|
|
||||||
instance.barcode = barcode_data.get("data", "")
|
|
||||||
else:
|
|
||||||
instance.barcode = ""
|
|
||||||
|
|
||||||
# Set additional attributes
|
|
||||||
instance.diameter = data.get("diameter", instance._size_x)
|
|
||||||
instance.height = data.get("height", instance._size_z)
|
|
||||||
|
|
||||||
return instance
|
|
||||||
|
|
||||||
T = TypeVar("T", bound=ResourceHolder)
|
T = TypeVar("T", bound=ResourceHolder)
|
||||||
|
|
||||||
S = TypeVar("S", bound=ResourceHolder)
|
S = TypeVar("S", bound=ResourceHolder)
|
||||||
@@ -182,7 +149,6 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
|
|
||||||
if not reassign and self.sites[idx] is not None:
|
if not reassign and self.sites[idx] is not None:
|
||||||
raise ValueError(f"a site with index {idx} already exists")
|
raise ValueError(f"a site with index {idx} already exists")
|
||||||
location = list(self.child_locations.values())[idx]
|
|
||||||
super().assign_child_resource(resource, location=location, reassign=reassign)
|
super().assign_child_resource(resource, location=location, reassign=reassign)
|
||||||
self.sites[idx] = resource
|
self.sites[idx] = resource
|
||||||
|
|
||||||
|
|||||||
@@ -42,10 +42,6 @@ def warehouse_factory(
|
|||||||
if layout == "row-major":
|
if layout == "row-major":
|
||||||
# 行优先:row=0(A行) 应该显示在上方,需要较小的 y 值
|
# 行优先:row=0(A行) 应该显示在上方,需要较小的 y 值
|
||||||
y = dy + row * item_dy
|
y = dy + row * item_dy
|
||||||
elif layout == "vertical-col-major":
|
|
||||||
# 竖向warehouse: row=0 对应顶部(y小),row=n-1 对应底部(y大)
|
|
||||||
# 但标签 01 应该在底部,所以使用反向映射
|
|
||||||
y = dy + (num_items_y - row - 1) * item_dy
|
|
||||||
else:
|
else:
|
||||||
# 列优先:保持原逻辑(row=0 对应较大的 y)
|
# 列优先:保持原逻辑(row=0 对应较大的 y)
|
||||||
y = dy + (num_items_y - row - 1) * item_dy
|
y = dy + (num_items_y - row - 1) * item_dy
|
||||||
@@ -70,14 +66,6 @@ def warehouse_factory(
|
|||||||
# 行优先顺序: A01,A02,A03,A04, B01,B02,B03,B04
|
# 行优先顺序: A01,A02,A03,A04, B01,B02,B03,B04
|
||||||
# locations[0] 对应 row=0, y最大(前端顶部)→ 应该是 A01
|
# locations[0] 对应 row=0, y最大(前端顶部)→ 应该是 A01
|
||||||
keys = [f"{LETTERS[j]}{i + 1 + col_offset:02d}" for j in range(len_y) for i in range(len_x)]
|
keys = [f"{LETTERS[j]}{i + 1 + col_offset:02d}" for j in range(len_y) for i in range(len_x)]
|
||||||
elif layout == "vertical-col-major":
|
|
||||||
# ⭐ 竖向warehouse专用布局:
|
|
||||||
# 字母(A,B,C...)对应列(横向, x方向),数字(01,02,03...)对应行(竖向, y方向,从下到上)
|
|
||||||
# locations 生成顺序: row→col (row=0,col=0 → row=0,col=1 → row=1,col=0 → ...)
|
|
||||||
# 其中 row=0 对应底部(y大),row=n-1 对应顶部(y小)
|
|
||||||
# 标签中 01 对应底部(row=0),02 对应中间(row=1),03 对应顶部(row=2)
|
|
||||||
# 标签顺序: A01,B01,A02,B02,A03,B03
|
|
||||||
keys = [f"{LETTERS[col]}{row + 1 + col_offset:02d}" for row in range(len_y) for col in range(len_x)]
|
|
||||||
else:
|
else:
|
||||||
# 列优先顺序: A01,B01,C01,D01, A02,B02,C02,D02
|
# 列优先顺序: A01,B01,C01,D01, A02,B02,C02,D02
|
||||||
keys = [f"{LETTERS[j]}{i + 1 + col_offset:02d}" for i in range(len_x) for j in range(len_y)]
|
keys = [f"{LETTERS[j]}{i + 1 + col_offset:02d}" for i in range(len_x) for j in range(len_y)]
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
get_action_type,
|
get_action_type,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
|
import copy
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
from unilabos.ros.device_node_wrapper import ros2_device_node
|
from unilabos.ros.device_node_wrapper import ros2_device_node
|
||||||
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.exception import DeviceClassInvalid
|
from unilabos.utils.exception import DeviceClassInvalid
|
||||||
from unilabos.utils.import_manager import default_manager
|
from unilabos.utils.import_manager import default_manager
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# from nt import device_encoding
|
# from nt import device_encoding
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
@@ -11,7 +10,7 @@ from unilabos_msgs.srv._serial_command import SerialCommand_Response
|
|||||||
|
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
||||||
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
||||||
from unilabos_msgs.srv import SerialCommand # type: ignore
|
from unilabos_msgs.srv import SerialCommand # type: ignore
|
||||||
from rclpy.executors import MultiThreadedExecutor
|
from rclpy.executors import MultiThreadedExecutor
|
||||||
@@ -56,11 +55,7 @@ def main(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""主函数"""
|
"""主函数"""
|
||||||
|
|
||||||
# Support restart - check if rclpy is already initialized
|
rclpy.init(args=rclpy_init_args)
|
||||||
if not rclpy.ok():
|
|
||||||
rclpy.init(args=rclpy_init_args)
|
|
||||||
else:
|
|
||||||
logger.info("[ROS] rclpy already initialized, reusing context")
|
|
||||||
executor = rclpy.__executor = MultiThreadedExecutor()
|
executor = rclpy.__executor = MultiThreadedExecutor()
|
||||||
# 创建主机节点
|
# 创建主机节点
|
||||||
host_node = HostNode(
|
host_node = HostNode(
|
||||||
@@ -93,7 +88,7 @@ def main(
|
|||||||
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
||||||
# lh_joint_pub = LiquidHandlerJointPublisher(
|
# lh_joint_pub = LiquidHandlerJointPublisher(
|
||||||
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
||||||
# )
|
# )
|
||||||
executor.add_node(resource_mesh_manager)
|
executor.add_node(resource_mesh_manager)
|
||||||
executor.add_node(joint_republisher)
|
executor.add_node(joint_republisher)
|
||||||
# executor.add_node(lh_joint_pub)
|
# executor.add_node(lh_joint_pub)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import copy
|
||||||
import inspect
|
import inspect
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
@@ -12,6 +13,7 @@ import asyncio
|
|||||||
|
|
||||||
import rclpy
|
import rclpy
|
||||||
import yaml
|
import yaml
|
||||||
|
from msgcenterpy import ROS2MessageInstance
|
||||||
from rclpy.node import Node
|
from rclpy.node import Node
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
@@ -20,13 +22,15 @@ from rclpy.callback_groups import ReentrantCallbackGroup
|
|||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
from unilabos_msgs.action import SendCmd
|
from unilabos_msgs.action import SendCmd
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
from unilabos.config.config import BasicConfig
|
|
||||||
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
||||||
|
|
||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.graphio import (
|
from unilabos.resources.graphio import (
|
||||||
|
resource_ulab_to_plr,
|
||||||
initialize_resources,
|
initialize_resources,
|
||||||
|
dict_to_tree,
|
||||||
|
resource_plr_to_ulab,
|
||||||
|
tree_to_list,
|
||||||
)
|
)
|
||||||
from unilabos.resources.plr_additional_res_reg import register
|
from unilabos.resources.plr_additional_res_reg import register
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
@@ -43,7 +47,7 @@ from unilabos_msgs.srv import (
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
DeviceNodeResourceTracker,
|
DeviceNodeResourceTracker,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
ResourceTreeInstance,
|
ResourceTreeInstance,
|
||||||
@@ -359,6 +363,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
||||||
|
from pylabrobot.resources.resource import Resource as ResourcePLR
|
||||||
from pylabrobot.resources.deck import Deck
|
from pylabrobot.resources.deck import Deck
|
||||||
from pylabrobot.resources import Coordinate
|
from pylabrobot.resources import Coordinate
|
||||||
from pylabrobot.resources import Plate
|
from pylabrobot.resources import Plate
|
||||||
@@ -619,7 +624,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
raw_nodes = json.loads(response.response)
|
raw_nodes = json.loads(response.response)
|
||||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes)
|
tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes)
|
||||||
self.lab_logger().trace(f"获取资源结果: {len(tree_set.trees)} 个资源树 {tree_set.root_nodes}")
|
self.lab_logger().debug(f"获取资源结果: {len(tree_set.trees)} 个资源树")
|
||||||
return tree_set
|
return tree_set
|
||||||
|
|
||||||
async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR":
|
async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR":
|
||||||
@@ -792,7 +797,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def _handle_update(
|
def _handle_update(
|
||||||
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
||||||
) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
处理资源更新操作的内部函数
|
处理资源更新操作的内部函数
|
||||||
|
|
||||||
@@ -804,7 +809,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
操作结果字典
|
操作结果字典
|
||||||
"""
|
"""
|
||||||
original_instances = []
|
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
if isinstance(plr_resource, ResourceDictInstance):
|
if isinstance(plr_resource, ResourceDictInstance):
|
||||||
self._lab_logger.info(f"跳过 非资源{plr_resource.res_content.name} 的更新")
|
self._lab_logger.info(f"跳过 非资源{plr_resource.res_content.name} 的更新")
|
||||||
@@ -847,16 +851,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
and original_parent_resource is not None
|
and original_parent_resource is not None
|
||||||
):
|
):
|
||||||
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
||||||
else:
|
|
||||||
# 判断是否变更了resource_site
|
|
||||||
target_site = original_instance.unilabos_extra.get("update_resource_site")
|
|
||||||
sites = original_instance.parent.sites if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else None
|
|
||||||
site_names = list(original_instance.parent._ordering.keys()) if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else []
|
|
||||||
if target_site is not None and sites is not None and site_names is not None:
|
|
||||||
site_index = sites.index(original_instance)
|
|
||||||
site_name = site_names[site_index]
|
|
||||||
if site_name != target_site:
|
|
||||||
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
|
||||||
|
|
||||||
# 加载状态
|
# 加载状态
|
||||||
original_instance.load_all_state(states)
|
original_instance.load_all_state(states)
|
||||||
@@ -864,14 +858,13 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
self.lab_logger().info(
|
self.lab_logger().info(
|
||||||
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
||||||
)
|
)
|
||||||
original_instances.append(original_instance)
|
|
||||||
|
|
||||||
# 调用driver的update回调
|
# 调用driver的update回调
|
||||||
func = getattr(self.driver_instance, "resource_tree_update", None)
|
func = getattr(self.driver_instance, "resource_tree_update", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
func(original_instances)
|
func(plr_resources)
|
||||||
|
|
||||||
return {"success": True, "action": "update"}, original_instances
|
return {"success": True, "action": "update"}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(req.command)
|
data = json.loads(req.command)
|
||||||
@@ -895,13 +888,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
raise ValueError("tree_set不能为None")
|
raise ValueError("tree_set不能为None")
|
||||||
plr_resources = tree_set.to_plr_resources()
|
plr_resources = tree_set.to_plr_resources()
|
||||||
result = _handle_add(plr_resources, tree_set, additional_add_params)
|
result = _handle_add(plr_resources, tree_set, additional_add_params)
|
||||||
new_tree_set = ResourceTreeSet.from_plr_resources(plr_resources)
|
|
||||||
r = SerialCommand.Request()
|
|
||||||
r.command = json.dumps(
|
|
||||||
{"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
|
|
||||||
response: SerialCommand_Response = await self._resource_clients[
|
|
||||||
"c2s_update_resource_tree"].call_async(r) # type: ignore
|
|
||||||
self.lab_logger().info(f"确认资源云端 Add 结果: {response.response}")
|
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "update":
|
elif action == "update":
|
||||||
if tree_set is None:
|
if tree_set is None:
|
||||||
@@ -912,15 +898,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
plr_resources.append(tree.root_node)
|
plr_resources.append(tree.root_node)
|
||||||
else:
|
else:
|
||||||
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
|
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
|
||||||
result, original_instances = _handle_update(plr_resources, tree_set, additional_add_params)
|
result = _handle_update(plr_resources, tree_set, additional_add_params)
|
||||||
if not BasicConfig.no_update_feedback:
|
|
||||||
new_tree_set = ResourceTreeSet.from_plr_resources(original_instances)
|
|
||||||
r = SerialCommand.Request()
|
|
||||||
r.command = json.dumps(
|
|
||||||
{"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
|
|
||||||
response: SerialCommand_Response = await self._resource_clients[
|
|
||||||
"c2s_update_resource_tree"].call_async(r) # type: ignore
|
|
||||||
self.lab_logger().info(f"确认资源云端 Update 结果: {response.response}")
|
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "remove":
|
elif action == "remove":
|
||||||
result = _handle_remove(resources_uuid)
|
result = _handle_remove(resources_uuid)
|
||||||
@@ -1787,7 +1765,6 @@ class ROS2DeviceNode:
|
|||||||
or driver_class.__name__ == "LiquidHandlerBiomek"
|
or driver_class.__name__ == "LiquidHandlerBiomek"
|
||||||
or driver_class.__name__ == "PRCXI9300Handler"
|
or driver_class.__name__ == "PRCXI9300Handler"
|
||||||
or driver_class.__name__ == "TransformXYZHandler"
|
or driver_class.__name__ == "TransformXYZHandler"
|
||||||
or driver_class.__name__ == "OpcUaClient"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 创建设备类实例
|
# 创建设备类实例
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, Type
|
|||||||
from action_msgs.msg import GoalStatus
|
from action_msgs.msg import GoalStatus
|
||||||
from geometry_msgs.msg import Point
|
from geometry_msgs.msg import Point
|
||||||
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
||||||
|
from rclpy.callback_groups import ReentrantCallbackGroup
|
||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
from unilabos_msgs.srv import (
|
from unilabos_msgs.srv import (
|
||||||
@@ -18,6 +19,7 @@ from unilabos_msgs.srv import (
|
|||||||
ResourceUpdate,
|
ResourceUpdate,
|
||||||
ResourceList,
|
ResourceList,
|
||||||
SerialCommand,
|
SerialCommand,
|
||||||
|
ResourceGet,
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
from unique_identifier_msgs.msg import UUID
|
from unique_identifier_msgs.msg import UUID
|
||||||
@@ -35,7 +37,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
||||||
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
ResourceDict,
|
ResourceDict,
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
@@ -70,8 +72,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
_instance: ClassVar[Optional["HostNode"]] = None
|
_instance: ClassVar[Optional["HostNode"]] = None
|
||||||
_ready_event: ClassVar[threading.Event] = threading.Event()
|
_ready_event: ClassVar[threading.Event] = threading.Event()
|
||||||
_shutting_down: ClassVar[bool] = False # Flag to signal shutdown to background threads
|
|
||||||
_background_threads: ClassVar[List[threading.Thread]] = [] # Track all background threads for cleanup
|
|
||||||
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
||||||
DeviceActionStatus
|
DeviceActionStatus
|
||||||
)
|
)
|
||||||
@@ -83,48 +83,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
return cls._instance
|
return cls._instance
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def shutdown_background_threads(cls, timeout: float = 5.0) -> None:
|
|
||||||
"""
|
|
||||||
Gracefully shutdown all background threads for clean exit or restart.
|
|
||||||
|
|
||||||
This method:
|
|
||||||
1. Sets shutdown flag to stop background operations
|
|
||||||
2. Waits for background threads to finish with timeout
|
|
||||||
3. Cleans up finished threads from tracking list
|
|
||||||
|
|
||||||
Args:
|
|
||||||
timeout: Maximum time to wait for each thread (seconds)
|
|
||||||
"""
|
|
||||||
cls._shutting_down = True
|
|
||||||
|
|
||||||
# Wait for background threads to finish
|
|
||||||
active_threads = []
|
|
||||||
for t in cls._background_threads:
|
|
||||||
if t.is_alive():
|
|
||||||
t.join(timeout=timeout)
|
|
||||||
if t.is_alive():
|
|
||||||
active_threads.append(t.name)
|
|
||||||
|
|
||||||
if active_threads:
|
|
||||||
logger.warning(f"[Host Node] Some background threads still running: {active_threads}")
|
|
||||||
|
|
||||||
# Clear the thread list
|
|
||||||
cls._background_threads.clear()
|
|
||||||
logger.info(f"[Host Node] Background threads shutdown complete")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def reset_state(cls) -> None:
|
|
||||||
"""
|
|
||||||
Reset the HostNode singleton state for restart or clean exit.
|
|
||||||
Call this after destroying the instance.
|
|
||||||
"""
|
|
||||||
cls._instance = None
|
|
||||||
cls._ready_event.clear()
|
|
||||||
cls._shutting_down = False
|
|
||||||
cls._background_threads.clear()
|
|
||||||
logger.info("[Host Node] State reset complete")
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
@@ -338,37 +296,12 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
bridge.publish_host_ready()
|
bridge.publish_host_ready()
|
||||||
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
||||||
|
|
||||||
def _send_re_register(self, sclient, device_namespace: str):
|
def _send_re_register(self, sclient):
|
||||||
"""
|
sclient.wait_for_service()
|
||||||
Send re-register command to a device. This is a one-time operation.
|
request = SerialCommand.Request()
|
||||||
|
request.command = ""
|
||||||
Args:
|
future = sclient.call_async(request)
|
||||||
sclient: The service client
|
response = future.result()
|
||||||
device_namespace: The device namespace for logging
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Use timeout to prevent indefinite blocking
|
|
||||||
if not sclient.wait_for_service(timeout_sec=10.0):
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register timeout for {device_namespace}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check shutdown flag after wait
|
|
||||||
if self._shutting_down:
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (shutdown)")
|
|
||||||
return
|
|
||||||
|
|
||||||
request = SerialCommand.Request()
|
|
||||||
request.command = ""
|
|
||||||
future = sclient.call_async(request)
|
|
||||||
# Use timeout for result as well
|
|
||||||
future.result(timeout_sec=5.0)
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register completed for {device_namespace}")
|
|
||||||
except Exception as e:
|
|
||||||
# Gracefully handle destruction during shutdown
|
|
||||||
if "destruction was requested" in str(e) or self._shutting_down:
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (cleanup)")
|
|
||||||
else:
|
|
||||||
self.lab_logger().warning(f"[Host Node] Re-register failed for {device_namespace}: {e}")
|
|
||||||
|
|
||||||
def _discover_devices(self) -> None:
|
def _discover_devices(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -400,27 +333,23 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self._create_action_clients_for_device(device_id, namespace)
|
self._create_action_clients_for_device(device_id, namespace)
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
t = threading.Thread(
|
threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient, namespace),
|
args=(sclient,),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
)
|
).start()
|
||||||
self._background_threads.append(t)
|
|
||||||
t.start()
|
|
||||||
elif device_key not in self._online_devices:
|
elif device_key not in self._online_devices:
|
||||||
# 设备重新上线
|
# 设备重新上线
|
||||||
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
t = threading.Thread(
|
threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient, namespace),
|
args=(sclient,),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
)
|
).start()
|
||||||
self._background_threads.append(t)
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
# 检测离线设备
|
# 检测离线设备
|
||||||
offline_devices = self._online_devices - current_devices
|
offline_devices = self._online_devices - current_devices
|
||||||
@@ -778,14 +707,13 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
raise ValueError(f"ActionClient {action_id} not found.")
|
raise ValueError(f"ActionClient {action_id} not found.")
|
||||||
|
|
||||||
action_client: ActionClient = self._action_clients[action_id]
|
action_client: ActionClient = self._action_clients[action_id]
|
||||||
|
|
||||||
# 遍历action_kwargs下的所有子dict,将"sample_uuid"的值赋给"sample_id"
|
# 遍历action_kwargs下的所有子dict,将"sample_uuid"的值赋给"sample_id"
|
||||||
def assign_sample_id(obj):
|
def assign_sample_id(obj):
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
if "sample_uuid" in obj:
|
if "sample_uuid" in obj:
|
||||||
obj["sample_id"] = obj["sample_uuid"]
|
obj["sample_id"] = obj["sample_uuid"]
|
||||||
obj.pop("sample_uuid")
|
obj.pop("sample_uuid")
|
||||||
for k, v in obj.items():
|
for k,v in obj.items():
|
||||||
if k != "unilabos_extra":
|
if k != "unilabos_extra":
|
||||||
assign_sample_id(v)
|
assign_sample_id(v)
|
||||||
elif isinstance(obj, list):
|
elif isinstance(obj, list):
|
||||||
@@ -816,7 +744,9 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
||||||
self._goals[item.job_id] = goal_handle
|
self._goals[item.job_id] = goal_handle
|
||||||
goal_future = goal_handle.get_result_async()
|
goal_future = goal_handle.get_result_async()
|
||||||
goal_future.add_done_callback(lambda f: self.get_result_callback(item, action_id, f))
|
goal_future.add_done_callback(
|
||||||
|
lambda f: self.get_result_callback(item, action_id, f)
|
||||||
|
)
|
||||||
goal_future.result()
|
goal_future.result()
|
||||||
|
|
||||||
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
||||||
@@ -1238,13 +1168,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
响应对象,包含查询到的资源
|
响应对象,包含查询到的资源
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from unilabos.app.web import http_client
|
|
||||||
|
|
||||||
data = json.loads(request.command)
|
data = json.loads(request.command)
|
||||||
if "uuid" in data and data["uuid"] is not None:
|
if "uuid" in data and data["uuid"] is not None:
|
||||||
http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"])
|
http_req = self.bridges[-1].resource_tree_get([data["uuid"]], data["with_children"])
|
||||||
elif "id" in data and data["id"].startswith("/"):
|
elif "id" in data and data["id"].startswith("/"):
|
||||||
http_req = http_client.resource_get(data["id"], data["with_children"])
|
http_req = self.bridges[-1].resource_get(data["id"], data["with_children"])
|
||||||
else:
|
else:
|
||||||
raise ValueError("没有使用正确的物料 id 或 uuid")
|
raise ValueError("没有使用正确的物料 id 或 uuid")
|
||||||
response.response = json.dumps(http_req["data"])
|
response.response = json.dumps(http_req["data"])
|
||||||
|
|||||||
@@ -12,10 +12,11 @@ from unilabos_msgs.srv import ResourceUpdate
|
|||||||
from unilabos.messages import * # type: ignore # protocol names
|
from unilabos.messages import * # type: ignore # protocol names
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
|
from rclpy.callback_groups import ReentrantCallbackGroup
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
from unilabos.compile import action_protocol_generators
|
from unilabos.compile import action_protocol_generators
|
||||||
from unilabos.resources.graphio import nested_dict_to_list
|
from unilabos.resources.graphio import list_to_nested_dict, nested_dict_to_list
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
get_action_type,
|
get_action_type,
|
||||||
@@ -23,7 +24,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
convert_from_ros_msg_with_mapping,
|
convert_from_ros_msg_with_mapping,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
||||||
from unilabos.utils.type_check import get_result_info_str
|
from unilabos.utils.type_check import get_result_info_str
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
|||||||
@@ -14,9 +14,9 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionSize(BaseModel):
|
class ResourceDictPositionSize(BaseModel):
|
||||||
depth: float = Field(description="Depth", default=0.0) # z
|
depth: float = Field(description="Depth", default=0.0)
|
||||||
width: float = Field(description="Width", default=0.0) # x
|
width: float = Field(description="Width", default=0.0)
|
||||||
height: float = Field(description="Height", default=0.0) # y
|
height: float = Field(description="Height", default=0.0)
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionScale(BaseModel):
|
class ResourceDictPositionScale(BaseModel):
|
||||||
@@ -66,8 +66,8 @@ class ResourceDict(BaseModel):
|
|||||||
klass: str = Field(alias="class", description="Resource class name")
|
klass: str = Field(alias="class", description="Resource class name")
|
||||||
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
||||||
config: Dict[str, Any] = Field(description="Resource configuration")
|
config: Dict[str, Any] = Field(description="Resource configuration")
|
||||||
data: Dict[str, Any] = Field(description="Resource data, eg: container liquid data")
|
data: Dict[str, Any] = Field(description="Resource data")
|
||||||
extra: Dict[str, Any] = Field(description="Extra data, eg: slot index")
|
extra: Dict[str, Any] = Field(description="Extra data")
|
||||||
|
|
||||||
@field_serializer("parent_uuid")
|
@field_serializer("parent_uuid")
|
||||||
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
||||||
@@ -469,9 +469,9 @@ class ResourceTreeSet(object):
|
|||||||
**res.config,
|
**res.config,
|
||||||
"name": res.name,
|
"name": res.name,
|
||||||
"type": res.config.get("type", plr_type),
|
"type": res.config.get("type", plr_type),
|
||||||
"size_x": res.pose.size.width,
|
"size_x": res.config.get("size_x", 0),
|
||||||
"size_y": res.pose.size.height,
|
"size_y": res.config.get("size_y", 0),
|
||||||
"size_z": res.pose.size.depth,
|
"size_z": res.config.get("size_z", 0),
|
||||||
"location": {
|
"location": {
|
||||||
"x": res.pose.position.x,
|
"x": res.pose.position.x,
|
||||||
"y": res.pose.position.y,
|
"y": res.pose.position.y,
|
||||||
@@ -608,16 +608,6 @@ class ResourceTreeSet(object):
|
|||||||
"""
|
"""
|
||||||
return [tree.root_node for tree in self.trees]
|
return [tree.root_node for tree in self.trees]
|
||||||
|
|
||||||
@property
|
|
||||||
def root_nodes_uuid(self) -> List[ResourceDictInstance]:
|
|
||||||
"""
|
|
||||||
获取所有树的根节点
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
所有根节点的资源实例列表
|
|
||||||
"""
|
|
||||||
return [tree.root_node.res_content.uuid for tree in self.trees]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all_nodes(self) -> List[ResourceDictInstance]:
|
def all_nodes(self) -> List[ResourceDictInstance]:
|
||||||
"""
|
"""
|
||||||
@@ -11,9 +11,10 @@ import traceback
|
|||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
from unilabos.resources.graphio import nested_dict_to_list, resource_ulab_to_plr
|
||||||
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
||||||
ResourceTreeInstance
|
ResourceTreeInstance
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger, import_manager
|
||||||
from unilabos.utils.cls_creator import create_instance_from_config
|
from unilabos.utils.cls_creator import create_instance_from_config
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
@@ -134,7 +135,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
处理后的数据
|
处理后的数据
|
||||||
"""
|
"""
|
||||||
from pylabrobot.resources import Resource
|
from pylabrobot.resources import Deck, Resource
|
||||||
|
|
||||||
if states is None:
|
if states is None:
|
||||||
states = {}
|
states = {}
|
||||||
|
|||||||
@@ -14,11 +14,7 @@
|
|||||||
],
|
],
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.bioyond",
|
"class": "reaction_station.bioyond",
|
||||||
"position": {
|
"position": {"x": 0, "y": 3800, "z": 0},
|
||||||
"x": 0,
|
|
||||||
"y": 1100,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
"config": {
|
||||||
"config": {
|
"config": {
|
||||||
"api_key": "DE9BDDA0",
|
"api_key": "DE9BDDA0",
|
||||||
@@ -61,10 +57,6 @@
|
|||||||
"BIOYOND_PolymerStation_TipBox": [
|
"BIOYOND_PolymerStation_TipBox": [
|
||||||
"枪头盒",
|
"枪头盒",
|
||||||
"3a143890-9d51-60ac-6d6f-6edb43c12041"
|
"3a143890-9d51-60ac-6d6f-6edb43c12041"
|
||||||
],
|
|
||||||
"BIOYOND_PolymerStation_Measurement_Vial": [
|
|
||||||
"测量小瓶",
|
|
||||||
"b1fc79c9-5864-4f05-8052-6ed3abc18a97"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -74,9 +66,6 @@
|
|||||||
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerReactionStation_Deck"
|
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerReactionStation_Deck"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"size_x": 2700.0,
|
|
||||||
"size_y": 1080.0,
|
|
||||||
"size_z": 2000.0,
|
|
||||||
"protocol_type": []
|
"protocol_type": []
|
||||||
},
|
},
|
||||||
"data": {}
|
"data": {}
|
||||||
@@ -88,11 +77,7 @@
|
|||||||
"parent": "reaction_station_bioyond",
|
"parent": "reaction_station_bioyond",
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.reactor",
|
"class": "reaction_station.reactor",
|
||||||
"position": {
|
"position": {"x": 1150, "y": 380, "z": 0},
|
||||||
"x": 1150,
|
|
||||||
"y": 380,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {},
|
"config": {},
|
||||||
"data": {}
|
"data": {}
|
||||||
},
|
},
|
||||||
@@ -103,11 +88,7 @@
|
|||||||
"parent": "reaction_station_bioyond",
|
"parent": "reaction_station_bioyond",
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.reactor",
|
"class": "reaction_station.reactor",
|
||||||
"position": {
|
"position": {"x": 1365, "y": 380, "z": 0},
|
||||||
"x": 1365,
|
|
||||||
"y": 380,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {},
|
"config": {},
|
||||||
"data": {}
|
"data": {}
|
||||||
},
|
},
|
||||||
@@ -118,11 +99,7 @@
|
|||||||
"parent": "reaction_station_bioyond",
|
"parent": "reaction_station_bioyond",
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.reactor",
|
"class": "reaction_station.reactor",
|
||||||
"position": {
|
"position": {"x": 1580, "y": 380, "z": 0},
|
||||||
"x": 1580,
|
|
||||||
"y": 380,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {},
|
"config": {},
|
||||||
"data": {}
|
"data": {}
|
||||||
},
|
},
|
||||||
@@ -133,11 +110,7 @@
|
|||||||
"parent": "reaction_station_bioyond",
|
"parent": "reaction_station_bioyond",
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.reactor",
|
"class": "reaction_station.reactor",
|
||||||
"position": {
|
"position": {"x": 1790, "y": 380, "z": 0},
|
||||||
"x": 1790,
|
|
||||||
"y": 380,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {},
|
"config": {},
|
||||||
"data": {}
|
"data": {}
|
||||||
},
|
},
|
||||||
@@ -148,11 +121,7 @@
|
|||||||
"parent": "reaction_station_bioyond",
|
"parent": "reaction_station_bioyond",
|
||||||
"type": "device",
|
"type": "device",
|
||||||
"class": "reaction_station.reactor",
|
"class": "reaction_station.reactor",
|
||||||
"position": {
|
"position": {"x": 2010, "y": 380, "z": 0},
|
||||||
"x": 2010,
|
|
||||||
"y": 380,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {},
|
"config": {},
|
||||||
"data": {}
|
"data": {}
|
||||||
},
|
},
|
||||||
@@ -165,7 +134,7 @@
|
|||||||
"class": "BIOYOND_PolymerReactionStation_Deck",
|
"class": "BIOYOND_PolymerReactionStation_Deck",
|
||||||
"position": {
|
"position": {
|
||||||
"x": 0,
|
"x": 0,
|
||||||
"y": 1100,
|
"y": 0,
|
||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
|
|||||||
@@ -1,547 +0,0 @@
|
|||||||
import re
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import networkx as nx
|
|
||||||
from networkx.drawing.nx_agraph import to_agraph
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
from typing import Dict, List, Any, Tuple, Optional
|
|
||||||
|
|
||||||
Json = Dict[str, Any]
|
|
||||||
|
|
||||||
# ---------------- Graph ----------------
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowGraph:
|
|
||||||
"""简单的有向图实现:使用 params 单层参数;inputs 内含连线;支持 node-link 导出"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.nodes: Dict[str, Dict[str, Any]] = {}
|
|
||||||
self.edges: List[Dict[str, Any]] = []
|
|
||||||
|
|
||||||
def add_node(self, node_id: str, **attrs):
|
|
||||||
self.nodes[node_id] = attrs
|
|
||||||
|
|
||||||
def add_edge(self, source: str, target: str, **attrs):
|
|
||||||
# 将 source_port/target_port 映射为服务端期望的 source_handle_key/target_handle_key
|
|
||||||
source_handle_key = attrs.pop("source_port", "") or attrs.pop("source_handle_key", "")
|
|
||||||
target_handle_key = attrs.pop("target_port", "") or attrs.pop("target_handle_key", "")
|
|
||||||
|
|
||||||
edge = {
|
|
||||||
"source": source,
|
|
||||||
"target": target,
|
|
||||||
"source_node_uuid": source,
|
|
||||||
"target_node_uuid": target,
|
|
||||||
"source_handle_key": source_handle_key,
|
|
||||||
"source_handle_io": attrs.pop("source_handle_io", "source"),
|
|
||||||
"target_handle_key": target_handle_key,
|
|
||||||
"target_handle_io": attrs.pop("target_handle_io", "target"),
|
|
||||||
**attrs,
|
|
||||||
}
|
|
||||||
self.edges.append(edge)
|
|
||||||
|
|
||||||
def _materialize_wiring_into_inputs(
|
|
||||||
self,
|
|
||||||
obj: Any,
|
|
||||||
inputs: Dict[str, Any],
|
|
||||||
variable_sources: Dict[str, Dict[str, Any]],
|
|
||||||
target_node_id: str,
|
|
||||||
base_path: List[str],
|
|
||||||
):
|
|
||||||
has_var = False
|
|
||||||
|
|
||||||
def walk(node: Any, path: List[str]):
|
|
||||||
nonlocal has_var
|
|
||||||
if isinstance(node, dict):
|
|
||||||
if "__var__" in node:
|
|
||||||
has_var = True
|
|
||||||
varname = node["__var__"]
|
|
||||||
placeholder = f"${{{varname}}}"
|
|
||||||
src = variable_sources.get(varname)
|
|
||||||
if src:
|
|
||||||
key = ".".join(path) # e.g. "params.foo.bar.0"
|
|
||||||
inputs[key] = {"node": src["node_id"], "output": src.get("output_name", "result")}
|
|
||||||
self.add_edge(
|
|
||||||
str(src["node_id"]),
|
|
||||||
target_node_id,
|
|
||||||
source_handle_io=src.get("output_name", "result"),
|
|
||||||
target_handle_io=key,
|
|
||||||
)
|
|
||||||
return placeholder
|
|
||||||
return {k: walk(v, path + [k]) for k, v in node.items()}
|
|
||||||
if isinstance(node, list):
|
|
||||||
return [walk(v, path + [str(i)]) for i, v in enumerate(node)]
|
|
||||||
return node
|
|
||||||
|
|
||||||
replaced = walk(obj, base_path[:])
|
|
||||||
return replaced, has_var
|
|
||||||
|
|
||||||
def add_workflow_node(
|
|
||||||
self,
|
|
||||||
node_id: int,
|
|
||||||
*,
|
|
||||||
device_key: Optional[str] = None, # 实例名,如 "ser"
|
|
||||||
resource_name: Optional[str] = None, # registry key(原 device_class)
|
|
||||||
module: Optional[str] = None,
|
|
||||||
template_name: Optional[str] = None, # 动作/模板名(原 action_key)
|
|
||||||
params: Dict[str, Any],
|
|
||||||
variable_sources: Dict[str, Dict[str, Any]],
|
|
||||||
add_ready_if_no_vars: bool = True,
|
|
||||||
prev_node_id: Optional[int] = None,
|
|
||||||
**extra_attrs,
|
|
||||||
) -> None:
|
|
||||||
"""添加工作流节点:params 单层;自动变量连线与 ready 串联;支持附加属性"""
|
|
||||||
node_id_str = str(node_id)
|
|
||||||
inputs: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
params, has_var = self._materialize_wiring_into_inputs(
|
|
||||||
params, inputs, variable_sources, node_id_str, base_path=["params"]
|
|
||||||
)
|
|
||||||
|
|
||||||
if add_ready_if_no_vars and not has_var:
|
|
||||||
last_id = str(prev_node_id) if prev_node_id is not None else "-1"
|
|
||||||
inputs["ready"] = {"node": int(last_id), "output": "ready"}
|
|
||||||
self.add_edge(last_id, node_id_str, source_handle_io="ready", target_handle_io="ready")
|
|
||||||
|
|
||||||
node_obj = {
|
|
||||||
"device_key": device_key,
|
|
||||||
"resource_name": resource_name, # ✅ 新名字
|
|
||||||
"module": module,
|
|
||||||
"template_name": template_name, # ✅ 新名字
|
|
||||||
"params": params,
|
|
||||||
"inputs": inputs,
|
|
||||||
}
|
|
||||||
node_obj.update(extra_attrs or {})
|
|
||||||
self.add_node(node_id_str, parameters=node_obj)
|
|
||||||
|
|
||||||
# 顺序工作流导出(连线在 inputs,不返回 edges)
|
|
||||||
def to_dict(self) -> List[Dict[str, Any]]:
|
|
||||||
result = []
|
|
||||||
for node_id, attrs in self.nodes.items():
|
|
||||||
node = {"uuid": node_id}
|
|
||||||
params = dict(attrs.get("parameters", {}) or {})
|
|
||||||
flat = {k: v for k, v in attrs.items() if k != "parameters"}
|
|
||||||
flat.update(params)
|
|
||||||
node.update(flat)
|
|
||||||
result.append(node)
|
|
||||||
return sorted(result, key=lambda n: int(n["uuid"]) if str(n["uuid"]).isdigit() else n["uuid"])
|
|
||||||
|
|
||||||
# node-link 导出(含 edges)
|
|
||||||
def to_node_link_dict(self) -> Dict[str, Any]:
|
|
||||||
nodes_list = []
|
|
||||||
for node_id, attrs in self.nodes.items():
|
|
||||||
node_attrs = attrs.copy()
|
|
||||||
params = node_attrs.pop("parameters", {}) or {}
|
|
||||||
node_attrs.update(params)
|
|
||||||
nodes_list.append({"uuid": node_id, **node_attrs})
|
|
||||||
return {
|
|
||||||
"directed": True,
|
|
||||||
"multigraph": False,
|
|
||||||
"graph": {},
|
|
||||||
"nodes": nodes_list,
|
|
||||||
"edges": self.edges,
|
|
||||||
"links": self.edges,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def refactor_data(
|
|
||||||
data: List[Dict[str, Any]],
|
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
"""统一的数据重构函数,根据操作类型自动选择模板
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始步骤数据列表
|
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
|
||||||
"""
|
|
||||||
refactored_data = []
|
|
||||||
|
|
||||||
# 定义操作映射,包含生物实验和有机化学的所有操作
|
|
||||||
OPERATION_MAPPING = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "transfer_liquid",
|
|
||||||
"transfer": "transfer",
|
|
||||||
"incubation": "incubation",
|
|
||||||
"move_labware": "move_labware",
|
|
||||||
"oscillation": "oscillation",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "HeatChillProtocol",
|
|
||||||
"StopHeatChill": "HeatChillStopProtocol",
|
|
||||||
"StartHeatChill": "HeatChillStartProtocol",
|
|
||||||
"HeatChill": "HeatChillProtocol",
|
|
||||||
"Dissolve": "DissolveProtocol",
|
|
||||||
"Transfer": "TransferProtocol",
|
|
||||||
"Evaporate": "EvaporateProtocol",
|
|
||||||
"Recrystallize": "RecrystallizeProtocol",
|
|
||||||
"Filter": "FilterProtocol",
|
|
||||||
"Dry": "DryProtocol",
|
|
||||||
"Add": "AddProtocol",
|
|
||||||
}
|
|
||||||
|
|
||||||
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
for step in data:
|
|
||||||
operation = step.get("action")
|
|
||||||
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 处理重复操作
|
|
||||||
if operation == "Repeat":
|
|
||||||
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
|
||||||
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
|
||||||
for i in range(int(times)):
|
|
||||||
sub_data = refactor_data(sub_steps, action_resource_mapping)
|
|
||||||
refactored_data.extend(sub_data)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 获取模板名称
|
|
||||||
template_name = OPERATION_MAPPING.get(operation)
|
|
||||||
if not template_name:
|
|
||||||
# 自动推断模板类型
|
|
||||||
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
|
||||||
template_name = f"biomek-{operation}"
|
|
||||||
else:
|
|
||||||
template_name = f"{operation}Protocol"
|
|
||||||
|
|
||||||
# 获取 resource_name
|
|
||||||
resource_name = f"device.{operation.lower()}"
|
|
||||||
if action_resource_mapping:
|
|
||||||
resource_name = action_resource_mapping.get(operation, resource_name)
|
|
||||||
|
|
||||||
# 获取步骤编号,生成 name 字段
|
|
||||||
step_number = step.get("step_number")
|
|
||||||
name = f"Step {step_number}" if step_number is not None else None
|
|
||||||
|
|
||||||
# 创建步骤数据
|
|
||||||
step_data = {
|
|
||||||
"template_name": template_name,
|
|
||||||
"resource_name": resource_name,
|
|
||||||
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
|
||||||
"lab_node_type": "Device",
|
|
||||||
"param": step.get("parameters", step.get("action_args", {})),
|
|
||||||
"footer": f"{template_name}-{resource_name}",
|
|
||||||
}
|
|
||||||
if name:
|
|
||||||
step_data["name"] = name
|
|
||||||
refactored_data.append(step_data)
|
|
||||||
|
|
||||||
return refactored_data
|
|
||||||
|
|
||||||
|
|
||||||
def build_protocol_graph(
|
|
||||||
labware_info: List[Dict[str, Any]],
|
|
||||||
protocol_steps: List[Dict[str, Any]],
|
|
||||||
workstation_name: str,
|
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
|
||||||
) -> WorkflowGraph:
|
|
||||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
|
||||||
|
|
||||||
Args:
|
|
||||||
labware_info: labware 信息字典
|
|
||||||
protocol_steps: 协议步骤列表
|
|
||||||
workstation_name: 工作站名称
|
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
|
||||||
"""
|
|
||||||
G = WorkflowGraph()
|
|
||||||
resource_last_writer = {}
|
|
||||||
|
|
||||||
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
|
||||||
# 有机化学&移液站协议图构建
|
|
||||||
WORKSTATION_ID = workstation_name
|
|
||||||
|
|
||||||
# 为所有labware创建资源节点
|
|
||||||
res_index = 0
|
|
||||||
for labware_id, item in labware_info.items():
|
|
||||||
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# 判断节点类型
|
|
||||||
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
|
|
||||||
lab_node_type = "Labware"
|
|
||||||
description = f"Prepare Labware: {labware_id}"
|
|
||||||
liquid_type = []
|
|
||||||
liquid_volume = []
|
|
||||||
elif item.get("type") == "hardware" or "reactor" in str(labware_id).lower():
|
|
||||||
if "reactor" not in str(labware_id).lower():
|
|
||||||
continue
|
|
||||||
lab_node_type = "Sample"
|
|
||||||
description = f"Prepare Reactor: {labware_id}"
|
|
||||||
liquid_type = []
|
|
||||||
liquid_volume = []
|
|
||||||
else:
|
|
||||||
lab_node_type = "Reagent"
|
|
||||||
description = f"Add Reagent to Flask: {labware_id}"
|
|
||||||
liquid_type = [labware_id]
|
|
||||||
liquid_volume = [1e5]
|
|
||||||
|
|
||||||
res_index += 1
|
|
||||||
G.add_node(
|
|
||||||
node_id,
|
|
||||||
template_name="create_resource",
|
|
||||||
resource_name="host_node",
|
|
||||||
name=f"Res {res_index}",
|
|
||||||
description=description,
|
|
||||||
lab_node_type=lab_node_type,
|
|
||||||
footer="create_resource-host_node",
|
|
||||||
param={
|
|
||||||
"res_id": labware_id,
|
|
||||||
"device_id": WORKSTATION_ID,
|
|
||||||
"class_name": "container",
|
|
||||||
"parent": WORKSTATION_ID,
|
|
||||||
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
|
||||||
"liquid_input_slot": [-1],
|
|
||||||
"liquid_type": liquid_type,
|
|
||||||
"liquid_volume": liquid_volume,
|
|
||||||
"slot_on_deck": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
resource_last_writer[labware_id] = f"{node_id}:labware"
|
|
||||||
|
|
||||||
last_control_node_id = None
|
|
||||||
|
|
||||||
# 处理协议步骤
|
|
||||||
for step in protocol_steps:
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
G.add_node(node_id, **step)
|
|
||||||
|
|
||||||
# 控制流
|
|
||||||
if last_control_node_id is not None:
|
|
||||||
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
|
||||||
last_control_node_id = node_id
|
|
||||||
|
|
||||||
# 物料流
|
|
||||||
params = step.get("param", {})
|
|
||||||
input_resources_possible_names = [
|
|
||||||
"vessel",
|
|
||||||
"to_vessel",
|
|
||||||
"from_vessel",
|
|
||||||
"reagent",
|
|
||||||
"solvent",
|
|
||||||
"compound",
|
|
||||||
"sources",
|
|
||||||
"targets",
|
|
||||||
]
|
|
||||||
|
|
||||||
for target_port in input_resources_possible_names:
|
|
||||||
resource_name = params.get(target_port)
|
|
||||||
if resource_name and resource_name in resource_last_writer:
|
|
||||||
source_node, source_port = resource_last_writer[resource_name].split(":")
|
|
||||||
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
|
||||||
|
|
||||||
output_resources = {
|
|
||||||
"vessel_out": params.get("vessel"),
|
|
||||||
"from_vessel_out": params.get("from_vessel"),
|
|
||||||
"to_vessel_out": params.get("to_vessel"),
|
|
||||||
"filtrate_out": params.get("filtrate_vessel"),
|
|
||||||
"reagent": params.get("reagent"),
|
|
||||||
"solvent": params.get("solvent"),
|
|
||||||
"compound": params.get("compound"),
|
|
||||||
"sources_out": params.get("sources"),
|
|
||||||
"targets_out": params.get("targets"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for source_port, resource_name in output_resources.items():
|
|
||||||
if resource_name:
|
|
||||||
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
|
||||||
|
|
||||||
return G
|
|
||||||
|
|
||||||
|
|
||||||
def draw_protocol_graph(protocol_graph: WorkflowGraph, output_path: str):
|
|
||||||
"""
|
|
||||||
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
G = nx.DiGraph()
|
|
||||||
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
|
|
||||||
G.add_node(node_id, label=label, **attrs)
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
G.add_edge(edge["source"], edge["target"])
|
|
||||||
|
|
||||||
plt.figure(figsize=(20, 15))
|
|
||||||
try:
|
|
||||||
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
|
||||||
except Exception:
|
|
||||||
pos = nx.shell_layout(G) # Fallback layout
|
|
||||||
|
|
||||||
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
|
||||||
nx.draw(
|
|
||||||
G,
|
|
||||||
pos,
|
|
||||||
with_labels=False,
|
|
||||||
node_size=2500,
|
|
||||||
node_color="skyblue",
|
|
||||||
node_shape="o",
|
|
||||||
edge_color="gray",
|
|
||||||
width=1.5,
|
|
||||||
arrowsize=15,
|
|
||||||
)
|
|
||||||
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
|
||||||
|
|
||||||
plt.title("Chemical Protocol Workflow Graph", size=15)
|
|
||||||
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
|
||||||
plt.close()
|
|
||||||
print(f" - Visualization saved to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
COMPASS = {"n", "e", "s", "w", "ne", "nw", "se", "sw", "c"}
|
|
||||||
|
|
||||||
|
|
||||||
def _is_compass(port: str) -> bool:
|
|
||||||
return isinstance(port, str) and port.lower() in COMPASS
|
|
||||||
|
|
||||||
|
|
||||||
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
|
||||||
"""
|
|
||||||
使用 Graphviz 端口语法绘制协议工作流图。
|
|
||||||
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
|
||||||
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
|
||||||
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# 1) 先用 networkx 搭建有向图,保留端口属性
|
|
||||||
G = nx.DiGraph()
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
|
|
||||||
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
|
||||||
G.add_node(node_id, _core_label=str(label), **{k: v for k, v in attrs.items() if k not in ("label",)})
|
|
||||||
|
|
||||||
edges_data = []
|
|
||||||
in_ports_by_node = {} # 收集命名输入端口
|
|
||||||
out_ports_by_node = {} # 收集命名输出端口
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
u = edge["source"]
|
|
||||||
v = edge["target"]
|
|
||||||
sp = edge.get("source_handle_key") or edge.get("source_port")
|
|
||||||
tp = edge.get("target_handle_key") or edge.get("target_port")
|
|
||||||
|
|
||||||
# 记录到图里(保留原始端口信息)
|
|
||||||
G.add_edge(u, v, source_handle_key=sp, target_handle_key=tp)
|
|
||||||
edges_data.append((u, v, sp, tp))
|
|
||||||
|
|
||||||
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
|
||||||
if sp and not _is_compass(sp):
|
|
||||||
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
|
||||||
if tp and not _is_compass(tp):
|
|
||||||
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
|
||||||
|
|
||||||
# 2) 转为 AGraph,使用 Graphviz 渲染
|
|
||||||
A = to_agraph(G)
|
|
||||||
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
|
||||||
A.node_attr.update(
|
|
||||||
shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica"
|
|
||||||
)
|
|
||||||
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
|
||||||
|
|
||||||
# 3) 为需要命名端口的节点设置 record 形状与 label
|
|
||||||
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
|
||||||
for n in A.nodes():
|
|
||||||
node = A.get_node(n)
|
|
||||||
core = G.nodes[n].get("_core_label", n)
|
|
||||||
|
|
||||||
in_ports = sorted(in_ports_by_node.get(n, []))
|
|
||||||
out_ports = sorted(out_ports_by_node.get(n, []))
|
|
||||||
|
|
||||||
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
|
||||||
if in_ports or out_ports:
|
|
||||||
|
|
||||||
def port_fields(ports):
|
|
||||||
if not ports:
|
|
||||||
return " " # 必须留一个空槽占位
|
|
||||||
# 每个端口一个小格子,<p> name
|
|
||||||
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
|
||||||
|
|
||||||
left = port_fields(in_ports)
|
|
||||||
right = port_fields(out_ports)
|
|
||||||
|
|
||||||
# 三栏:左(入) | 中(节点名) | 右(出)
|
|
||||||
record_label = f"{{ {left} | {core} | {right} }}"
|
|
||||||
node.attr.update(shape="record", label=record_label)
|
|
||||||
else:
|
|
||||||
# 没有命名端口:普通盒子,显示核心标签
|
|
||||||
node.attr.update(label=str(core))
|
|
||||||
|
|
||||||
# 4) 给边设置 headport / tailport
|
|
||||||
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
|
||||||
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
|
||||||
for u, v, sp, tp in edges_data:
|
|
||||||
e = A.get_edge(u, v)
|
|
||||||
|
|
||||||
# Graphviz 属性:tail 是源,head 是目标
|
|
||||||
if sp:
|
|
||||||
if _is_compass(sp):
|
|
||||||
e.attr["tailport"] = sp.lower()
|
|
||||||
else:
|
|
||||||
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
|
||||||
e.attr["tailport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(sp))
|
|
||||||
|
|
||||||
if tp:
|
|
||||||
if _is_compass(tp):
|
|
||||||
e.attr["headport"] = tp.lower()
|
|
||||||
else:
|
|
||||||
e.attr["headport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(tp))
|
|
||||||
|
|
||||||
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
|
||||||
# e.attr["arrowhead"] = "vee"
|
|
||||||
|
|
||||||
# 5) 输出
|
|
||||||
A.draw(output_path, prog="dot")
|
|
||||||
print(f" - Port-aware workflow rendered to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------- Registry Adapter ----------------
|
|
||||||
|
|
||||||
|
|
||||||
class RegistryAdapter:
|
|
||||||
"""根据 module 的类名(冒号右侧)反查 registry 的 resource_name(原 device_class),并抽取参数顺序"""
|
|
||||||
|
|
||||||
def __init__(self, device_registry: Dict[str, Any]):
|
|
||||||
self.device_registry = device_registry or {}
|
|
||||||
self.module_class_to_resource = self._build_module_class_index()
|
|
||||||
|
|
||||||
def _build_module_class_index(self) -> Dict[str, str]:
|
|
||||||
idx = {}
|
|
||||||
for resource_name, info in self.device_registry.items():
|
|
||||||
module = info.get("module")
|
|
||||||
if isinstance(module, str) and ":" in module:
|
|
||||||
cls = module.split(":")[-1]
|
|
||||||
idx[cls] = resource_name
|
|
||||||
idx[cls.lower()] = resource_name
|
|
||||||
return idx
|
|
||||||
|
|
||||||
def resolve_resource_by_classname(self, class_name: str) -> Optional[str]:
|
|
||||||
if not class_name:
|
|
||||||
return None
|
|
||||||
return self.module_class_to_resource.get(class_name) or self.module_class_to_resource.get(class_name.lower())
|
|
||||||
|
|
||||||
def get_device_module(self, resource_name: Optional[str]) -> Optional[str]:
|
|
||||||
if not resource_name:
|
|
||||||
return None
|
|
||||||
return self.device_registry.get(resource_name, {}).get("module")
|
|
||||||
|
|
||||||
def get_actions(self, resource_name: Optional[str]) -> Dict[str, Any]:
|
|
||||||
if not resource_name:
|
|
||||||
return {}
|
|
||||||
return (self.device_registry.get(resource_name, {}).get("class", {}).get("action_value_mappings", {})) or {}
|
|
||||||
|
|
||||||
def get_action_schema(self, resource_name: Optional[str], template_name: str) -> Optional[Json]:
|
|
||||||
return (self.get_actions(resource_name).get(template_name) or {}).get("schema")
|
|
||||||
|
|
||||||
def get_action_goal_default(self, resource_name: Optional[str], template_name: str) -> Json:
|
|
||||||
return (self.get_actions(resource_name).get(template_name) or {}).get("goal_default", {}) or {}
|
|
||||||
|
|
||||||
def get_action_input_keys(self, resource_name: Optional[str], template_name: str) -> List[str]:
|
|
||||||
schema = self.get_action_schema(resource_name, template_name) or {}
|
|
||||||
goal = (schema.get("properties") or {}).get("goal") or {}
|
|
||||||
props = goal.get("properties") or {}
|
|
||||||
required = goal.get("required") or []
|
|
||||||
return list(dict.fromkeys(required + list(props.keys())))
|
|
||||||
@@ -1,356 +0,0 @@
|
|||||||
"""
|
|
||||||
JSON 工作流转换模块
|
|
||||||
|
|
||||||
提供从多种 JSON 格式转换为统一工作流格式的功能。
|
|
||||||
支持的格式:
|
|
||||||
1. workflow/reagent 格式
|
|
||||||
2. steps_info/labware_info 格式
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
from os import PathLike
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
|
||||||
|
|
||||||
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
|
|
||||||
from unilabos.registry.registry import lab_registry
|
|
||||||
|
|
||||||
|
|
||||||
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
|
|
||||||
"""
|
|
||||||
从 registry 获取指定设备和动作的 handles 配置
|
|
||||||
|
|
||||||
Args:
|
|
||||||
resource_name: 设备资源名称,如 "liquid_handler.prcxi"
|
|
||||||
template_name: 动作模板名称,如 "transfer_liquid"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
包含 source 和 target handler_keys 的字典:
|
|
||||||
{"source": ["sources_out", "targets_out", ...], "target": ["sources", "targets", ...]}
|
|
||||||
"""
|
|
||||||
result = {"source": [], "target": []}
|
|
||||||
|
|
||||||
device_info = lab_registry.device_type_registry.get(resource_name, {})
|
|
||||||
if not device_info:
|
|
||||||
return result
|
|
||||||
|
|
||||||
action_mappings = device_info.get("class", {}).get("action_value_mappings", {})
|
|
||||||
action_config = action_mappings.get(template_name, {})
|
|
||||||
handles = action_config.get("handles", {})
|
|
||||||
|
|
||||||
if isinstance(handles, dict):
|
|
||||||
# 处理 input handles (作为 target)
|
|
||||||
for handle in handles.get("input", []):
|
|
||||||
handler_key = handle.get("handler_key", "")
|
|
||||||
if handler_key:
|
|
||||||
result["source"].append(handler_key)
|
|
||||||
# 处理 output handles (作为 source)
|
|
||||||
for handle in handles.get("output", []):
|
|
||||||
handler_key = handle.get("handler_key", "")
|
|
||||||
if handler_key:
|
|
||||||
result["target"].append(handler_key)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
|
|
||||||
"""
|
|
||||||
校验工作流图中所有边的句柄配置是否正确
|
|
||||||
|
|
||||||
Args:
|
|
||||||
graph: 工作流图对象
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(is_valid, errors): 是否有效,错误信息列表
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
nodes = graph.nodes
|
|
||||||
|
|
||||||
for edge in graph.edges:
|
|
||||||
left_uuid = edge.get("source")
|
|
||||||
right_uuid = edge.get("target")
|
|
||||||
# target_handle_key是target, right的输入节点(入节点)
|
|
||||||
# source_handle_key是source, left的输出节点(出节点)
|
|
||||||
right_source_conn_key = edge.get("target_handle_key", "")
|
|
||||||
left_target_conn_key = edge.get("source_handle_key", "")
|
|
||||||
|
|
||||||
# 获取源节点和目标节点信息
|
|
||||||
left_node = nodes.get(left_uuid, {})
|
|
||||||
right_node = nodes.get(right_uuid, {})
|
|
||||||
|
|
||||||
left_res_name = left_node.get("resource_name", "")
|
|
||||||
left_template_name = left_node.get("template_name", "")
|
|
||||||
right_res_name = right_node.get("resource_name", "")
|
|
||||||
right_template_name = right_node.get("template_name", "")
|
|
||||||
|
|
||||||
# 获取源节点的 output handles
|
|
||||||
left_node_handles = get_action_handles(left_res_name, left_template_name)
|
|
||||||
target_valid_keys = left_node_handles.get("target", [])
|
|
||||||
target_valid_keys.append("ready")
|
|
||||||
|
|
||||||
# 获取目标节点的 input handles
|
|
||||||
right_node_handles = get_action_handles(right_res_name, right_template_name)
|
|
||||||
source_valid_keys = right_node_handles.get("source", [])
|
|
||||||
source_valid_keys.append("ready")
|
|
||||||
|
|
||||||
# 如果节点配置了 output handles,则 source_port 必须有效
|
|
||||||
if not right_source_conn_key:
|
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
|
||||||
errors.append(f"源节点 '{node_name}' 的 source_handle_key 为空," f"应设置为: {source_valid_keys}")
|
|
||||||
elif right_source_conn_key not in source_valid_keys:
|
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
|
||||||
errors.append(
|
|
||||||
f"源节点 '{node_name}' 的 source 端点 '{right_source_conn_key}' 不存在," f"支持的端点: {source_valid_keys}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 如果节点配置了 input handles,则 target_port 必须有效
|
|
||||||
if not left_target_conn_key:
|
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
|
||||||
errors.append(f"目标节点 '{node_name}' 的 target_handle_key 为空," f"应设置为: {target_valid_keys}")
|
|
||||||
elif left_target_conn_key not in target_valid_keys:
|
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
|
||||||
errors.append(
|
|
||||||
f"目标节点 '{node_name}' 的 target 端点 '{left_target_conn_key}' 不存在,"
|
|
||||||
f"支持的端点: {target_valid_keys}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return len(errors) == 0, errors
|
|
||||||
|
|
||||||
|
|
||||||
# action 到 resource_name 的映射
|
|
||||||
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "liquid_handler.prcxi",
|
|
||||||
"transfer": "liquid_handler.prcxi",
|
|
||||||
"incubation": "incubator.prcxi",
|
|
||||||
"move_labware": "labware_mover.prcxi",
|
|
||||||
"oscillation": "shaker.prcxi",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "heatchill.chemputer",
|
|
||||||
"StopHeatChill": "heatchill.chemputer",
|
|
||||||
"StartHeatChill": "heatchill.chemputer",
|
|
||||||
"HeatChill": "heatchill.chemputer",
|
|
||||||
"Dissolve": "stirrer.chemputer",
|
|
||||||
"Transfer": "liquid_handler.chemputer",
|
|
||||||
"Evaporate": "rotavap.chemputer",
|
|
||||||
"Recrystallize": "reactor.chemputer",
|
|
||||||
"Filter": "filter.chemputer",
|
|
||||||
"Dry": "dryer.chemputer",
|
|
||||||
"Add": "liquid_handler.chemputer",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_steps(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将不同格式的步骤数据规范化为统一格式
|
|
||||||
|
|
||||||
支持的输入格式:
|
|
||||||
- action + parameters
|
|
||||||
- action + action_args
|
|
||||||
- operation + parameters
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始步骤数据列表
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
规范化后的步骤列表,格式为 [{"action": str, "parameters": dict, "description": str?, "step_number": int?}, ...]
|
|
||||||
"""
|
|
||||||
normalized = []
|
|
||||||
for idx, step in enumerate(data):
|
|
||||||
# 获取动作名称(支持 action 或 operation 字段)
|
|
||||||
action = step.get("action") or step.get("operation")
|
|
||||||
if not action:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 获取参数(支持 parameters 或 action_args 字段)
|
|
||||||
raw_params = step.get("parameters") or step.get("action_args") or {}
|
|
||||||
params = dict(raw_params)
|
|
||||||
|
|
||||||
# 规范化 source/target -> sources/targets
|
|
||||||
if "source" in raw_params and "sources" not in raw_params:
|
|
||||||
params["sources"] = raw_params["source"]
|
|
||||||
if "target" in raw_params and "targets" not in raw_params:
|
|
||||||
params["targets"] = raw_params["target"]
|
|
||||||
|
|
||||||
# 获取描述(支持 description 或 purpose 字段)
|
|
||||||
description = step.get("description") or step.get("purpose")
|
|
||||||
|
|
||||||
# 获取步骤编号(优先使用原始数据中的 step_number,否则使用索引+1)
|
|
||||||
step_number = step.get("step_number", idx + 1)
|
|
||||||
|
|
||||||
step_dict = {"action": action, "parameters": params, "step_number": step_number}
|
|
||||||
if description:
|
|
||||||
step_dict["description"] = description
|
|
||||||
|
|
||||||
normalized.append(step_dict)
|
|
||||||
|
|
||||||
return normalized
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_labware(data: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将不同格式的 labware 数据规范化为统一的字典格式
|
|
||||||
|
|
||||||
支持的输入格式:
|
|
||||||
- reagent_name + material_name + positions
|
|
||||||
- name + labware + slot
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始 labware 数据列表
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
规范化后的 labware 字典,格式为 {name: {"slot": int, "labware": str, "well": list, "type": str, "role": str, "name": str}, ...}
|
|
||||||
"""
|
|
||||||
labware = {}
|
|
||||||
for item in data:
|
|
||||||
# 获取 key 名称(优先使用 reagent_name,其次是 material_name 或 name)
|
|
||||||
reagent_name = item.get("reagent_name")
|
|
||||||
key = reagent_name or item.get("material_name") or item.get("name")
|
|
||||||
if not key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
key = str(key)
|
|
||||||
|
|
||||||
# 处理重复 key,自动添加后缀
|
|
||||||
idx = 1
|
|
||||||
original_key = key
|
|
||||||
while key in labware:
|
|
||||||
idx += 1
|
|
||||||
key = f"{original_key}_{idx}"
|
|
||||||
|
|
||||||
labware[key] = {
|
|
||||||
"slot": item.get("positions") or item.get("slot"),
|
|
||||||
"labware": item.get("material_name") or item.get("labware"),
|
|
||||||
"well": item.get("well", []),
|
|
||||||
"type": item.get("type", "reagent"),
|
|
||||||
"role": item.get("role", ""),
|
|
||||||
"name": key,
|
|
||||||
}
|
|
||||||
|
|
||||||
return labware
|
|
||||||
|
|
||||||
|
|
||||||
def convert_from_json(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
validate: bool = True,
|
|
||||||
) -> WorkflowGraph:
|
|
||||||
"""
|
|
||||||
从 JSON 数据或文件转换为 WorkflowGraph
|
|
||||||
|
|
||||||
支持的 JSON 格式:
|
|
||||||
1. {"workflow": [...], "reagent": {...}} - 直接格式
|
|
||||||
2. {"steps_info": [...], "labware_info": [...]} - 需要规范化的格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
validate: 是否校验句柄配置,默认 True
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
WorkflowGraph: 构建好的工作流图
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: 不支持的 JSON 格式 或 句柄校验失败
|
|
||||||
FileNotFoundError: 文件不存在
|
|
||||||
json.JSONDecodeError: JSON 解析失败
|
|
||||||
"""
|
|
||||||
# 处理输入数据
|
|
||||||
if isinstance(data, (str, PathLike)):
|
|
||||||
path = Path(data)
|
|
||||||
if path.exists():
|
|
||||||
with path.open("r", encoding="utf-8") as fp:
|
|
||||||
json_data = json.load(fp)
|
|
||||||
elif isinstance(data, str):
|
|
||||||
# 尝试作为 JSON 字符串解析
|
|
||||||
json_data = json.loads(data)
|
|
||||||
else:
|
|
||||||
raise FileNotFoundError(f"文件不存在: {data}")
|
|
||||||
elif isinstance(data, dict):
|
|
||||||
json_data = data
|
|
||||||
else:
|
|
||||||
raise TypeError(f"不支持的数据类型: {type(data)}")
|
|
||||||
|
|
||||||
# 根据格式解析数据
|
|
||||||
if "workflow" in json_data and "reagent" in json_data:
|
|
||||||
# 格式1: workflow/reagent(已经是规范格式)
|
|
||||||
protocol_steps = json_data["workflow"]
|
|
||||||
labware_info = json_data["reagent"]
|
|
||||||
elif "steps_info" in json_data and "labware_info" in json_data:
|
|
||||||
# 格式2: steps_info/labware_info(需要规范化)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps_info"])
|
|
||||||
labware_info = normalize_labware(json_data["labware_info"])
|
|
||||||
elif "steps" in json_data and "labware" in json_data:
|
|
||||||
# 格式3: steps/labware(另一种常见格式)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps"])
|
|
||||||
if isinstance(json_data["labware"], list):
|
|
||||||
labware_info = normalize_labware(json_data["labware"])
|
|
||||||
else:
|
|
||||||
labware_info = json_data["labware"]
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
"不支持的 JSON 格式。支持的格式:\n"
|
|
||||||
"1. {'workflow': [...], 'reagent': {...}}\n"
|
|
||||||
"2. {'steps_info': [...], 'labware_info': [...]}\n"
|
|
||||||
"3. {'steps': [...], 'labware': [...]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 构建工作流图
|
|
||||||
graph = build_protocol_graph(
|
|
||||||
labware_info=labware_info,
|
|
||||||
protocol_steps=protocol_steps,
|
|
||||||
workstation_name=workstation_name,
|
|
||||||
action_resource_mapping=ACTION_RESOURCE_MAPPING,
|
|
||||||
)
|
|
||||||
|
|
||||||
# 校验句柄配置
|
|
||||||
if validate:
|
|
||||||
is_valid, errors = validate_workflow_handles(graph)
|
|
||||||
if not is_valid:
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
for error in errors:
|
|
||||||
warnings.warn(f"句柄校验警告: {error}")
|
|
||||||
|
|
||||||
return graph
|
|
||||||
|
|
||||||
|
|
||||||
def convert_json_to_node_link(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
将 JSON 数据转换为 node-link 格式的字典
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: node-link 格式的工作流数据
|
|
||||||
"""
|
|
||||||
graph = convert_from_json(data, workstation_name)
|
|
||||||
return graph.to_node_link_dict()
|
|
||||||
|
|
||||||
|
|
||||||
def convert_json_to_workflow_list(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将 JSON 数据转换为工作流列表格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List: 工作流节点列表
|
|
||||||
"""
|
|
||||||
graph = convert_from_json(data, workstation_name)
|
|
||||||
return graph.to_dict()
|
|
||||||
|
|
||||||
|
|
||||||
# 为了向后兼容,保留下划线前缀的别名
|
|
||||||
_normalize_steps = normalize_steps
|
|
||||||
_normalize_labware = normalize_labware
|
|
||||||
@@ -1,241 +0,0 @@
|
|||||||
import ast
|
|
||||||
import json
|
|
||||||
from typing import Dict, List, Any, Tuple, Optional
|
|
||||||
|
|
||||||
from .common import WorkflowGraph, RegistryAdapter
|
|
||||||
|
|
||||||
Json = Dict[str, Any]
|
|
||||||
|
|
||||||
# ---------------- Converter ----------------
|
|
||||||
|
|
||||||
class DeviceMethodConverter:
|
|
||||||
"""
|
|
||||||
- 字段统一:resource_name(原 device_class)、template_name(原 action_key)
|
|
||||||
- params 单层;inputs 使用 'params.' 前缀
|
|
||||||
- SimpleGraph.add_workflow_node 负责变量连线与边
|
|
||||||
"""
|
|
||||||
def __init__(self, device_registry: Optional[Dict[str, Any]] = None):
|
|
||||||
self.graph = WorkflowGraph()
|
|
||||||
self.variable_sources: Dict[str, Dict[str, Any]] = {} # var -> {node_id, output_name}
|
|
||||||
self.instance_to_resource: Dict[str, Optional[str]] = {} # 实例名 -> resource_name
|
|
||||||
self.node_id_counter: int = 0
|
|
||||||
self.registry = RegistryAdapter(device_registry or {})
|
|
||||||
|
|
||||||
# ---- helpers ----
|
|
||||||
def _new_node_id(self) -> int:
|
|
||||||
nid = self.node_id_counter
|
|
||||||
self.node_id_counter += 1
|
|
||||||
return nid
|
|
||||||
|
|
||||||
def _assign_targets(self, targets) -> List[str]:
|
|
||||||
names: List[str] = []
|
|
||||||
import ast
|
|
||||||
if isinstance(targets, ast.Tuple):
|
|
||||||
for elt in targets.elts:
|
|
||||||
if isinstance(elt, ast.Name):
|
|
||||||
names.append(elt.id)
|
|
||||||
elif isinstance(targets, ast.Name):
|
|
||||||
names.append(targets.id)
|
|
||||||
return names
|
|
||||||
|
|
||||||
def _extract_device_instantiation(self, node) -> Optional[Tuple[str, str]]:
|
|
||||||
import ast
|
|
||||||
if not isinstance(node.value, ast.Call):
|
|
||||||
return None
|
|
||||||
callee = node.value.func
|
|
||||||
if isinstance(callee, ast.Name):
|
|
||||||
class_name = callee.id
|
|
||||||
elif isinstance(callee, ast.Attribute) and isinstance(callee.value, ast.Name):
|
|
||||||
class_name = callee.attr
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
if isinstance(node.targets[0], ast.Name):
|
|
||||||
instance = node.targets[0].id
|
|
||||||
return instance, class_name
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _extract_call(self, call) -> Tuple[str, str, Dict[str, Any], str]:
|
|
||||||
import ast
|
|
||||||
owner_name, method_name, call_kind = "", "", "func"
|
|
||||||
if isinstance(call.func, ast.Attribute):
|
|
||||||
method_name = call.func.attr
|
|
||||||
if isinstance(call.func.value, ast.Name):
|
|
||||||
owner_name = call.func.value.id
|
|
||||||
call_kind = "instance" if owner_name in self.instance_to_resource else "class_or_module"
|
|
||||||
elif isinstance(call.func.value, ast.Attribute) and isinstance(call.func.value.value, ast.Name):
|
|
||||||
owner_name = call.func.value.attr
|
|
||||||
call_kind = "class_or_module"
|
|
||||||
elif isinstance(call.func, ast.Name):
|
|
||||||
method_name = call.func.id
|
|
||||||
call_kind = "func"
|
|
||||||
|
|
||||||
def pack(node):
|
|
||||||
if isinstance(node, ast.Name):
|
|
||||||
return {"type": "variable", "value": node.id}
|
|
||||||
if isinstance(node, ast.Constant):
|
|
||||||
return {"type": "constant", "value": node.value}
|
|
||||||
if isinstance(node, ast.Dict):
|
|
||||||
return {"type": "dict", "value": self._parse_dict(node)}
|
|
||||||
if isinstance(node, ast.List):
|
|
||||||
return {"type": "list", "value": self._parse_list(node)}
|
|
||||||
return {"type": "raw", "value": ast.unparse(node) if hasattr(ast, "unparse") else str(node)}
|
|
||||||
|
|
||||||
args: Dict[str, Any] = {}
|
|
||||||
pos: List[Any] = []
|
|
||||||
for a in call.args:
|
|
||||||
pos.append(pack(a))
|
|
||||||
for kw in call.keywords:
|
|
||||||
args[kw.arg] = pack(kw.value)
|
|
||||||
if pos:
|
|
||||||
args["_positional"] = pos
|
|
||||||
return owner_name, method_name, args, call_kind
|
|
||||||
|
|
||||||
def _parse_dict(self, node) -> Dict[str, Any]:
|
|
||||||
import ast
|
|
||||||
out: Dict[str, Any] = {}
|
|
||||||
for k, v in zip(node.keys, node.values):
|
|
||||||
if isinstance(k, ast.Constant):
|
|
||||||
key = str(k.value)
|
|
||||||
if isinstance(v, ast.Name):
|
|
||||||
out[key] = f"var:{v.id}"
|
|
||||||
elif isinstance(v, ast.Constant):
|
|
||||||
out[key] = v.value
|
|
||||||
elif isinstance(v, ast.Dict):
|
|
||||||
out[key] = self._parse_dict(v)
|
|
||||||
elif isinstance(v, ast.List):
|
|
||||||
out[key] = self._parse_list(v)
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _parse_list(self, node) -> List[Any]:
|
|
||||||
import ast
|
|
||||||
out: List[Any] = []
|
|
||||||
for elt in node.elts:
|
|
||||||
if isinstance(elt, ast.Name):
|
|
||||||
out.append(f"var:{elt.id}")
|
|
||||||
elif isinstance(elt, ast.Constant):
|
|
||||||
out.append(elt.value)
|
|
||||||
elif isinstance(elt, ast.Dict):
|
|
||||||
out.append(self._parse_dict(elt))
|
|
||||||
elif isinstance(elt, ast.List):
|
|
||||||
out.append(self._parse_list(elt))
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _normalize_var_tokens(self, x: Any) -> Any:
|
|
||||||
if isinstance(x, str) and x.startswith("var:"):
|
|
||||||
return {"__var__": x[4:]}
|
|
||||||
if isinstance(x, list):
|
|
||||||
return [self._normalize_var_tokens(i) for i in x]
|
|
||||||
if isinstance(x, dict):
|
|
||||||
return {k: self._normalize_var_tokens(v) for k, v in x.items()}
|
|
||||||
return x
|
|
||||||
|
|
||||||
def _make_params_payload(self, resource_name: Optional[str], template_name: str, call_args: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
input_keys = self.registry.get_action_input_keys(resource_name, template_name) if resource_name else []
|
|
||||||
defaults = self.registry.get_action_goal_default(resource_name, template_name) if resource_name else {}
|
|
||||||
params: Dict[str, Any] = dict(defaults)
|
|
||||||
|
|
||||||
def unpack(p):
|
|
||||||
t, v = p.get("type"), p.get("value")
|
|
||||||
if t == "variable":
|
|
||||||
return {"__var__": v}
|
|
||||||
if t == "dict":
|
|
||||||
return self._normalize_var_tokens(v)
|
|
||||||
if t == "list":
|
|
||||||
return self._normalize_var_tokens(v)
|
|
||||||
return v
|
|
||||||
|
|
||||||
for k, p in call_args.items():
|
|
||||||
if k == "_positional":
|
|
||||||
continue
|
|
||||||
params[k] = unpack(p)
|
|
||||||
|
|
||||||
pos = call_args.get("_positional", [])
|
|
||||||
if pos:
|
|
||||||
if input_keys:
|
|
||||||
for i, p in enumerate(pos):
|
|
||||||
if i >= len(input_keys):
|
|
||||||
break
|
|
||||||
name = input_keys[i]
|
|
||||||
if name in params:
|
|
||||||
continue
|
|
||||||
params[name] = unpack(p)
|
|
||||||
else:
|
|
||||||
for i, p in enumerate(pos):
|
|
||||||
params[f"arg_{i}"] = unpack(p)
|
|
||||||
return params
|
|
||||||
|
|
||||||
# ---- handlers ----
|
|
||||||
def _on_assign(self, stmt):
|
|
||||||
import ast
|
|
||||||
inst = self._extract_device_instantiation(stmt)
|
|
||||||
if inst:
|
|
||||||
instance, code_class = inst
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(code_class)
|
|
||||||
self.instance_to_resource[instance] = resource_name
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(stmt.value, ast.Call):
|
|
||||||
owner, method, call_args, kind = self._extract_call(stmt.value)
|
|
||||||
if kind == "instance":
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.instance_to_resource.get(owner)
|
|
||||||
else:
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(owner)
|
|
||||||
|
|
||||||
module = self.registry.get_device_module(resource_name)
|
|
||||||
params = self._make_params_payload(resource_name, method, call_args)
|
|
||||||
|
|
||||||
nid = self._new_node_id()
|
|
||||||
self.graph.add_workflow_node(
|
|
||||||
nid,
|
|
||||||
device_key=device_key,
|
|
||||||
resource_name=resource_name, # ✅
|
|
||||||
module=module,
|
|
||||||
template_name=method, # ✅
|
|
||||||
params=params,
|
|
||||||
variable_sources=self.variable_sources,
|
|
||||||
add_ready_if_no_vars=True,
|
|
||||||
prev_node_id=(nid - 1) if nid > 0 else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
out_vars = self._assign_targets(stmt.targets[0])
|
|
||||||
for var in out_vars:
|
|
||||||
self.variable_sources[var] = {"node_id": nid, "output_name": "result"}
|
|
||||||
|
|
||||||
def _on_expr(self, stmt):
|
|
||||||
import ast
|
|
||||||
if not isinstance(stmt.value, ast.Call):
|
|
||||||
return
|
|
||||||
owner, method, call_args, kind = self._extract_call(stmt.value)
|
|
||||||
if kind == "instance":
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.instance_to_resource.get(owner)
|
|
||||||
else:
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(owner)
|
|
||||||
|
|
||||||
module = self.registry.get_device_module(resource_name)
|
|
||||||
params = self._make_params_payload(resource_name, method, call_args)
|
|
||||||
|
|
||||||
nid = self._new_node_id()
|
|
||||||
self.graph.add_workflow_node(
|
|
||||||
nid,
|
|
||||||
device_key=device_key,
|
|
||||||
resource_name=resource_name, # ✅
|
|
||||||
module=module,
|
|
||||||
template_name=method, # ✅
|
|
||||||
params=params,
|
|
||||||
variable_sources=self.variable_sources,
|
|
||||||
add_ready_if_no_vars=True,
|
|
||||||
prev_node_id=(nid - 1) if nid > 0 else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def convert(self, python_code: str):
|
|
||||||
tree = ast.parse(python_code)
|
|
||||||
for stmt in tree.body:
|
|
||||||
if isinstance(stmt, ast.Assign):
|
|
||||||
self._on_assign(stmt)
|
|
||||||
elif isinstance(stmt, ast.Expr):
|
|
||||||
self._on_expr(stmt)
|
|
||||||
return self
|
|
||||||
@@ -1,131 +0,0 @@
|
|||||||
from typing import List, Any, Dict
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_type(val: str) -> Any:
|
|
||||||
"""将字符串值转换为适当的数据类型"""
|
|
||||||
if val == "True":
|
|
||||||
return True
|
|
||||||
if val == "False":
|
|
||||||
return False
|
|
||||||
if val == "?":
|
|
||||||
return None
|
|
||||||
if val.endswith(" g"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
if val.endswith("mg"):
|
|
||||||
return float(val.split("mg")[0])
|
|
||||||
elif val.endswith("mmol"):
|
|
||||||
return float(val.split("mmol")[0]) / 1000
|
|
||||||
elif val.endswith("mol"):
|
|
||||||
return float(val.split("mol")[0])
|
|
||||||
elif val.endswith("ml"):
|
|
||||||
return float(val.split("ml")[0])
|
|
||||||
elif val.endswith("RPM"):
|
|
||||||
return float(val.split("RPM")[0])
|
|
||||||
elif val.endswith(" °C"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
elif val.endswith(" %"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
|
||||||
"""展平嵌套的XDL程序结构"""
|
|
||||||
flattened_operations = []
|
|
||||||
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
def extract_operations(element: ET.Element):
|
|
||||||
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
|
||||||
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
|
||||||
flattened_operations.append(element)
|
|
||||||
|
|
||||||
for child in element:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
for child in procedure_elem:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
return flattened_operations
|
|
||||||
|
|
||||||
|
|
||||||
def parse_xdl_content(xdl_content: str) -> tuple:
|
|
||||||
"""解析XDL内容"""
|
|
||||||
try:
|
|
||||||
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
|
||||||
root = ET.fromstring(xdl_content_cleaned)
|
|
||||||
|
|
||||||
synthesis_elem = root.find("Synthesis")
|
|
||||||
if synthesis_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
# 解析硬件组件
|
|
||||||
hardware_elem = synthesis_elem.find("Hardware")
|
|
||||||
hardware = []
|
|
||||||
if hardware_elem is not None:
|
|
||||||
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
|
||||||
|
|
||||||
# 解析试剂
|
|
||||||
reagents_elem = synthesis_elem.find("Reagents")
|
|
||||||
reagents = []
|
|
||||||
if reagents_elem is not None:
|
|
||||||
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
|
||||||
|
|
||||||
# 解析程序
|
|
||||||
procedure_elem = synthesis_elem.find("Procedure")
|
|
||||||
if procedure_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
|
||||||
return hardware, reagents, flattened_operations
|
|
||||||
|
|
||||||
except ET.ParseError as e:
|
|
||||||
raise ValueError(f"Invalid XDL format: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
将XDL XML格式转换为标准的字典格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
xdl_content: XDL XML内容
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
转换结果,包含步骤和器材信息
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
|
||||||
if hardware is None:
|
|
||||||
return {"error": "Failed to parse XDL content", "success": False}
|
|
||||||
|
|
||||||
# 将XDL元素转换为字典格式
|
|
||||||
steps_data = []
|
|
||||||
for elem in flattened_operations:
|
|
||||||
# 转换参数类型
|
|
||||||
parameters = {}
|
|
||||||
for key, val in elem.attrib.items():
|
|
||||||
converted_val = convert_to_type(val)
|
|
||||||
if converted_val is not None:
|
|
||||||
parameters[key] = converted_val
|
|
||||||
|
|
||||||
step_dict = {
|
|
||||||
"operation": elem.tag,
|
|
||||||
"parameters": parameters,
|
|
||||||
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
|
||||||
}
|
|
||||||
steps_data.append(step_dict)
|
|
||||||
|
|
||||||
# 合并硬件和试剂为统一的labware_info格式
|
|
||||||
labware_data = []
|
|
||||||
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
|
||||||
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"steps": steps_data,
|
|
||||||
"labware": labware_data,
|
|
||||||
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"XDL conversion failed: {str(e)}"
|
|
||||||
return {"error": error_msg, "success": False}
|
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="3">
|
<package format="3">
|
||||||
<name>unilabos_msgs</name>
|
<name>unilabos_msgs</name>
|
||||||
<version>0.10.15</version>
|
<version>0.10.14</version>
|
||||||
<description>ROS2 Messages package for unilabos devices</description>
|
<description>ROS2 Messages package for unilabos devices</description>
|
||||||
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
||||||
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
||||||
|
|||||||
Reference in New Issue
Block a user