mirror of
https://github.com/dptech-corp/Uni-Lab-OS.git
synced 2026-02-07 23:45:10 +00:00
Compare commits
184 Commits
66c18c080a
...
prcix9320
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6d46e669d | ||
|
|
abf5555e37 | ||
|
|
4f7d431c0b | ||
|
|
341a1b537c | ||
|
|
957fb41a6f | ||
|
|
26271bcab8 | ||
|
|
e4d915c59c | ||
|
|
11a38d4558 | ||
|
|
84a8223173 | ||
|
|
e8d1263488 | ||
|
|
380b39100d | ||
|
|
56eb7e2ab4 | ||
|
|
23ce145f74 | ||
|
|
b0da149252 | ||
|
|
07c9e6f0fe | ||
|
|
ccec6b9d77 | ||
|
|
dadfdf3d8d | ||
|
|
aeeb36d075 | ||
|
|
3478bfd7ed | ||
|
|
400bb073d4 | ||
|
|
3f63c36505 | ||
|
|
0ae94f7f3c | ||
|
|
7eacae6442 | ||
|
|
f7d2cb4b9e | ||
|
|
bf980d7248 | ||
|
|
27c0544bfc | ||
|
|
d48e77c9ae | ||
|
|
e70a5bea66 | ||
|
|
467d75dc03 | ||
|
|
9feeb0c430 | ||
|
|
b2f26ffb28 | ||
|
|
4b0d1553e9 | ||
|
|
67ddee2ab2 | ||
|
|
1bcdad9448 | ||
|
|
039c96fe01 | ||
|
|
e1555d10a0 | ||
|
|
f2a96b2041 | ||
|
|
329349639e | ||
|
|
e4cc111523 | ||
|
|
d245ceef1b | ||
|
|
6db7fbd721 | ||
|
|
ab05b858e1 | ||
|
|
43e4c71a8e | ||
|
|
d6910da57d | ||
|
|
2cf58ca452 | ||
|
|
fd73bb7dcb | ||
|
|
a02cecfd18 | ||
|
|
d6accc3f1c | ||
|
|
39dc443399 | ||
|
|
37b1fca962 | ||
|
|
216f19fb62 | ||
|
|
d5b4f07406 | ||
|
|
470d7283e4 | ||
|
|
03f7f44c77 | ||
|
|
ec7ca6a1fe | ||
|
|
4c8022ee95 | ||
|
|
6f600b4fc7 | ||
|
|
269ce440d1 | ||
|
|
be054589b5 | ||
|
|
ad21644db0 | ||
|
|
b045ab4e0a | ||
|
|
4595f86725 | ||
|
|
44a4c2362d | ||
|
|
9dfd58e9af | ||
|
|
31c9f9a172 | ||
|
|
1340bae838 | ||
|
|
ae75f07c8e | ||
|
|
02cd8de4c5 | ||
|
|
a66603ec1c | ||
|
|
ec015e16cd | ||
|
|
18d0ba7a46 | ||
|
|
de7fbe7ac8 | ||
|
|
965bf36e8d | ||
|
|
aacf3497e0 | ||
|
|
657f952e7a | ||
|
|
0165590290 | ||
|
|
daea1ab54d | ||
|
|
31e8d065c4 | ||
|
|
93cb307396 | ||
|
|
1c312772ae | ||
|
|
bad1db5094 | ||
|
|
f26eb69eca | ||
|
|
12c0770c92 | ||
|
|
3d2d428a96 | ||
|
|
78bf57f590 | ||
|
|
e227cddab3 | ||
|
|
f2b993643f | ||
|
|
2e14bf197c | ||
|
|
219a480c08 | ||
|
|
e9f1a7bb44 | ||
|
|
ead43b2bc1 | ||
|
|
cef86fd98d | ||
|
|
6993e97ae9 | ||
|
|
db396bcab3 | ||
|
|
1fed8de57d | ||
|
|
63eb0c0a4c | ||
|
|
888c6cf542 | ||
|
|
cc248fc32c | ||
|
|
cfe64b023b | ||
|
|
ad1312cf26 | ||
|
|
799813f85b | ||
|
|
19c9d655d0 | ||
|
|
f9a9e35269 | ||
|
|
8cd306cd32 | ||
|
|
816a0d747b | ||
|
|
b0cff1a7a8 | ||
|
|
71d57c5631 | ||
|
|
546fb633ec | ||
|
|
a3c7fa9385 | ||
|
|
c6cf84def0 | ||
|
|
86512a0482 | ||
|
|
3ddbc1c9b7 | ||
|
|
abf1005241 | ||
|
|
c475eabb60 | ||
|
|
3ad20c85a5 | ||
|
|
44fc80c70f | ||
|
|
8ba911bb55 | ||
|
|
896f287d92 | ||
|
|
0d150f7acd | ||
|
|
c27f7e42d6 | ||
|
|
cc56a68bc6 | ||
|
|
d7302c3b35 | ||
|
|
b46a51c40e | ||
|
|
c6780087b8 | ||
|
|
1ef698dde6 | ||
|
|
91aadba4ef | ||
|
|
b1cdef9185 | ||
|
|
9854ed8c9c | ||
|
|
52544a2c69 | ||
|
|
5ce433e235 | ||
|
|
c7c14d2332 | ||
|
|
6fdd482649 | ||
|
|
d390236318 | ||
|
|
ed8ee29732 | ||
|
|
ffc583e9d5 | ||
|
|
f1ad0c9c96 | ||
|
|
8fa3407649 | ||
|
|
d3282822fc | ||
|
|
554bcade24 | ||
|
|
a662c75de1 | ||
|
|
931614fe64 | ||
|
|
d39662f65f | ||
|
|
acf5fdebf8 | ||
|
|
7f7b1c13c0 | ||
|
|
75f09034ff | ||
|
|
549a50220b | ||
|
|
4189a2cfbe | ||
|
|
48895a9bb1 | ||
|
|
891f126ed6 | ||
|
|
4d3475a849 | ||
|
|
b475db66df | ||
|
|
a625a86e3e | ||
|
|
37e0f1037c | ||
|
|
a242253145 | ||
|
|
448e0074b7 | ||
|
|
304827fc8d | ||
|
|
872b3d781f | ||
|
|
813400f2b4 | ||
|
|
b6dfe2b944 | ||
|
|
8807865649 | ||
|
|
5fc7eb7586 | ||
|
|
9bd72b48e1 | ||
|
|
42b78ab4c1 | ||
|
|
9645609a05 | ||
|
|
a2a827d7ac | ||
|
|
bb3ca645a4 | ||
|
|
37ee43d19a | ||
|
|
bc30f23e34 | ||
|
|
166d84afe1 | ||
|
|
1b43c53015 | ||
|
|
d4415f5a35 | ||
|
|
0260cbbedb | ||
|
|
7c440d10ab | ||
|
|
c85c49817d | ||
|
|
c70eafa5f0 | ||
|
|
b64466d443 | ||
|
|
ef3f24ed48 | ||
|
|
2a8e8d014b | ||
|
|
e0da1c7217 | ||
|
|
51d3e61723 | ||
|
|
6b5765bbf3 | ||
|
|
eb1f3fbe1c | ||
|
|
fb93b1cd94 | ||
|
|
9aeffebde1 |
60
.conda/base/recipe.yaml
Normal file
60
.conda/base/recipe.yaml
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# unilabos: Production package (depends on unilabos-env + pip unilabos)
|
||||||
|
# For production deployment
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: unilabos
|
||||||
|
version: 0.10.17
|
||||||
|
|
||||||
|
source:
|
||||||
|
path: ../../unilabos
|
||||||
|
target_directory: unilabos
|
||||||
|
|
||||||
|
build:
|
||||||
|
python:
|
||||||
|
entry_points:
|
||||||
|
- unilab = unilabos.app.main:main
|
||||||
|
script:
|
||||||
|
- set PIP_NO_INDEX=
|
||||||
|
- if: win
|
||||||
|
then:
|
||||||
|
- copy %RECIPE_DIR%\..\..\MANIFEST.in %SRC_DIR%
|
||||||
|
- copy %RECIPE_DIR%\..\..\setup.cfg %SRC_DIR%
|
||||||
|
- copy %RECIPE_DIR%\..\..\setup.py %SRC_DIR%
|
||||||
|
- pip install %SRC_DIR%
|
||||||
|
- if: unix
|
||||||
|
then:
|
||||||
|
- cp $RECIPE_DIR/../../MANIFEST.in $SRC_DIR
|
||||||
|
- cp $RECIPE_DIR/../../setup.cfg $SRC_DIR
|
||||||
|
- cp $RECIPE_DIR/../../setup.py $SRC_DIR
|
||||||
|
- pip install $SRC_DIR
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
host:
|
||||||
|
- python ==3.11.14
|
||||||
|
- pip
|
||||||
|
- setuptools
|
||||||
|
- zstd
|
||||||
|
- zstandard
|
||||||
|
run:
|
||||||
|
- zstd
|
||||||
|
- zstandard
|
||||||
|
- networkx
|
||||||
|
- typing_extensions
|
||||||
|
- websockets
|
||||||
|
- pint
|
||||||
|
- fastapi
|
||||||
|
- jinja2
|
||||||
|
- requests
|
||||||
|
- uvicorn
|
||||||
|
- opcua # [not osx]
|
||||||
|
- pyserial
|
||||||
|
- pandas
|
||||||
|
- pymodbus
|
||||||
|
- matplotlib
|
||||||
|
- pylibftdi
|
||||||
|
- uni-lab::unilabos-env ==0.10.17
|
||||||
|
|
||||||
|
about:
|
||||||
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
|
license: GPL-3.0-only
|
||||||
|
description: "UniLabOS - Production package with minimal ROS2 dependencies"
|
||||||
39
.conda/environment/recipe.yaml
Normal file
39
.conda/environment/recipe.yaml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# unilabos-env: conda environment dependencies (ROS2 + conda packages)
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: unilabos-env
|
||||||
|
version: 0.10.17
|
||||||
|
|
||||||
|
build:
|
||||||
|
noarch: generic
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
run:
|
||||||
|
# Python
|
||||||
|
- zstd
|
||||||
|
- zstandard
|
||||||
|
- conda-forge::python ==3.11.14
|
||||||
|
- conda-forge::opencv
|
||||||
|
# ROS2 dependencies (from ci-check.yml)
|
||||||
|
- robostack-staging::ros-humble-ros-core
|
||||||
|
- robostack-staging::ros-humble-action-msgs
|
||||||
|
- robostack-staging::ros-humble-std-msgs
|
||||||
|
- robostack-staging::ros-humble-geometry-msgs
|
||||||
|
- robostack-staging::ros-humble-control-msgs
|
||||||
|
- robostack-staging::ros-humble-nav2-msgs
|
||||||
|
- robostack-staging::ros-humble-cv-bridge
|
||||||
|
- robostack-staging::ros-humble-vision-opencv
|
||||||
|
- robostack-staging::ros-humble-tf-transformations
|
||||||
|
- robostack-staging::ros-humble-moveit-msgs
|
||||||
|
- robostack-staging::ros-humble-tf2-ros
|
||||||
|
- robostack-staging::ros-humble-tf2-ros-py
|
||||||
|
- conda-forge::transforms3d
|
||||||
|
- conda-forge::uv
|
||||||
|
|
||||||
|
# UniLabOS custom messages
|
||||||
|
- uni-lab::ros-humble-unilabos-msgs
|
||||||
|
|
||||||
|
about:
|
||||||
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
|
license: GPL-3.0-only
|
||||||
|
description: "UniLabOS Environment - ROS2 and conda dependencies"
|
||||||
42
.conda/full/recipe.yaml
Normal file
42
.conda/full/recipe.yaml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# unilabos-full: Full package with all features
|
||||||
|
# Depends on unilabos + complete ROS2 desktop + dev tools
|
||||||
|
|
||||||
|
package:
|
||||||
|
name: unilabos-full
|
||||||
|
version: 0.10.17
|
||||||
|
|
||||||
|
build:
|
||||||
|
noarch: generic
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
run:
|
||||||
|
# Base unilabos package (includes unilabos-env)
|
||||||
|
- uni-lab::unilabos ==0.10.17
|
||||||
|
# Documentation tools
|
||||||
|
- sphinx
|
||||||
|
- sphinx_rtd_theme
|
||||||
|
# Web UI
|
||||||
|
- gradio
|
||||||
|
- flask
|
||||||
|
# Interactive development
|
||||||
|
- ipython
|
||||||
|
- jupyter
|
||||||
|
- jupyros
|
||||||
|
- colcon-common-extensions
|
||||||
|
# ROS2 full desktop (includes rviz2, gazebo, etc.)
|
||||||
|
- robostack-staging::ros-humble-desktop-full
|
||||||
|
# Navigation and motion control
|
||||||
|
- ros-humble-navigation2
|
||||||
|
- ros-humble-ros2-control
|
||||||
|
- ros-humble-robot-state-publisher
|
||||||
|
- ros-humble-joint-state-publisher
|
||||||
|
# MoveIt motion planning
|
||||||
|
- ros-humble-moveit
|
||||||
|
- ros-humble-moveit-servo
|
||||||
|
# Simulation
|
||||||
|
- ros-humble-simulation
|
||||||
|
|
||||||
|
about:
|
||||||
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
|
license: GPL-3.0-only
|
||||||
|
description: "UniLabOS Full - Complete package with ROS2 Desktop, MoveIt, Navigation2, Gazebo, Jupyter"
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
package:
|
|
||||||
name: unilabos
|
|
||||||
version: 0.10.13
|
|
||||||
|
|
||||||
source:
|
|
||||||
path: ../unilabos
|
|
||||||
target_directory: unilabos
|
|
||||||
|
|
||||||
build:
|
|
||||||
python:
|
|
||||||
entry_points:
|
|
||||||
- unilab = unilabos.app.main:main
|
|
||||||
script:
|
|
||||||
- set PIP_NO_INDEX=
|
|
||||||
- if: win
|
|
||||||
then:
|
|
||||||
- copy %RECIPE_DIR%\..\MANIFEST.in %SRC_DIR%
|
|
||||||
- copy %RECIPE_DIR%\..\setup.cfg %SRC_DIR%
|
|
||||||
- copy %RECIPE_DIR%\..\setup.py %SRC_DIR%
|
|
||||||
- call %PYTHON% -m pip install %SRC_DIR%
|
|
||||||
- if: unix
|
|
||||||
then:
|
|
||||||
- cp $RECIPE_DIR/../MANIFEST.in $SRC_DIR
|
|
||||||
- cp $RECIPE_DIR/../setup.cfg $SRC_DIR
|
|
||||||
- cp $RECIPE_DIR/../setup.py $SRC_DIR
|
|
||||||
- $PYTHON -m pip install $SRC_DIR
|
|
||||||
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
host:
|
|
||||||
- python ==3.11.11
|
|
||||||
- pip
|
|
||||||
- setuptools
|
|
||||||
- zstd
|
|
||||||
- zstandard
|
|
||||||
run:
|
|
||||||
- conda-forge::python ==3.11.11
|
|
||||||
- compilers
|
|
||||||
- cmake
|
|
||||||
- zstd
|
|
||||||
- zstandard
|
|
||||||
- ninja
|
|
||||||
- if: unix
|
|
||||||
then:
|
|
||||||
- make
|
|
||||||
- sphinx
|
|
||||||
- sphinx_rtd_theme
|
|
||||||
- numpy
|
|
||||||
- scipy
|
|
||||||
- pandas
|
|
||||||
- networkx
|
|
||||||
- matplotlib
|
|
||||||
- pint
|
|
||||||
- pyserial
|
|
||||||
- pyusb
|
|
||||||
- pylibftdi
|
|
||||||
- pymodbus
|
|
||||||
- python-can
|
|
||||||
- pyvisa
|
|
||||||
- opencv
|
|
||||||
- pydantic
|
|
||||||
- fastapi
|
|
||||||
- uvicorn
|
|
||||||
- gradio
|
|
||||||
- flask
|
|
||||||
- websockets
|
|
||||||
- ipython
|
|
||||||
- jupyter
|
|
||||||
- jupyros
|
|
||||||
- colcon-common-extensions
|
|
||||||
- robostack-staging::ros-humble-desktop-full
|
|
||||||
- robostack-staging::ros-humble-control-msgs
|
|
||||||
- robostack-staging::ros-humble-sensor-msgs
|
|
||||||
- robostack-staging::ros-humble-trajectory-msgs
|
|
||||||
- ros-humble-navigation2
|
|
||||||
- ros-humble-ros2-control
|
|
||||||
- ros-humble-robot-state-publisher
|
|
||||||
- ros-humble-joint-state-publisher
|
|
||||||
- ros-humble-rosbridge-server
|
|
||||||
- ros-humble-cv-bridge
|
|
||||||
- ros-humble-tf2
|
|
||||||
- ros-humble-moveit
|
|
||||||
- ros-humble-moveit-servo
|
|
||||||
- ros-humble-simulation
|
|
||||||
- ros-humble-tf-transformations
|
|
||||||
- transforms3d
|
|
||||||
- uni-lab::ros-humble-unilabos-msgs
|
|
||||||
|
|
||||||
about:
|
|
||||||
repository: https://github.com/dptech-corp/Uni-Lab-OS
|
|
||||||
license: GPL-3.0-only
|
|
||||||
description: "Uni-Lab-OS"
|
|
||||||
26
.cursorignore
Normal file
26
.cursorignore
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
.conda
|
||||||
|
# .github
|
||||||
|
.idea
|
||||||
|
# .vscode
|
||||||
|
output
|
||||||
|
pylabrobot_repo
|
||||||
|
recipes
|
||||||
|
scripts
|
||||||
|
service
|
||||||
|
temp
|
||||||
|
# unilabos/test
|
||||||
|
# unilabos/app/web
|
||||||
|
unilabos/device_mesh
|
||||||
|
unilabos_data
|
||||||
|
unilabos_msgs
|
||||||
|
unilabos.egg-info
|
||||||
|
CONTRIBUTORS
|
||||||
|
# LICENSE
|
||||||
|
MANIFEST.in
|
||||||
|
pyrightconfig.json
|
||||||
|
# README.md
|
||||||
|
# README_zh.md
|
||||||
|
setup.py
|
||||||
|
setup.cfg
|
||||||
|
.gitattrubutes
|
||||||
|
**/__pycache__
|
||||||
67
.github/workflows/ci-check.yml
vendored
Normal file
67
.github/workflows/ci-check.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
name: CI Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, dev]
|
||||||
|
pull_request:
|
||||||
|
branches: [main, dev]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
registry-check:
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Fix Unicode encoding issue on Windows runner (cp1252 -> utf-8)
|
||||||
|
PYTHONIOENCODING: utf-8
|
||||||
|
PYTHONUTF8: 1
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: cmd
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Miniforge
|
||||||
|
uses: conda-incubator/setup-miniconda@v3
|
||||||
|
with:
|
||||||
|
miniforge-version: latest
|
||||||
|
use-mamba: true
|
||||||
|
channels: robostack-staging,conda-forge,uni-lab
|
||||||
|
channel-priority: flexible
|
||||||
|
activate-environment: check-env
|
||||||
|
auto-update-conda: false
|
||||||
|
show-channel-urls: true
|
||||||
|
|
||||||
|
- name: Install ROS dependencies, uv and unilabos-msgs
|
||||||
|
run: |
|
||||||
|
echo Installing ROS dependencies...
|
||||||
|
mamba install -n check-env conda-forge::uv conda-forge::opencv robostack-staging::ros-humble-ros-core robostack-staging::ros-humble-action-msgs robostack-staging::ros-humble-std-msgs robostack-staging::ros-humble-geometry-msgs robostack-staging::ros-humble-control-msgs robostack-staging::ros-humble-nav2-msgs uni-lab::ros-humble-unilabos-msgs robostack-staging::ros-humble-cv-bridge robostack-staging::ros-humble-vision-opencv robostack-staging::ros-humble-tf-transformations robostack-staging::ros-humble-moveit-msgs robostack-staging::ros-humble-tf2-ros robostack-staging::ros-humble-tf2-ros-py conda-forge::transforms3d -c robostack-staging -c conda-forge -c uni-lab -y
|
||||||
|
|
||||||
|
- name: Install pip dependencies and unilabos
|
||||||
|
run: |
|
||||||
|
call conda activate check-env
|
||||||
|
echo Installing pip dependencies...
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
uv pip install pywinauto git+https://github.com/Xuwznln/pylabrobot.git
|
||||||
|
uv pip uninstall enum34 || echo enum34 not installed, skipping
|
||||||
|
uv pip install .
|
||||||
|
|
||||||
|
- name: Run check mode (complete_registry)
|
||||||
|
run: |
|
||||||
|
call conda activate check-env
|
||||||
|
echo Running check mode...
|
||||||
|
python -m unilabos --check_mode --skip_env_check
|
||||||
|
|
||||||
|
- name: Check for uncommitted changes
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if ! git diff --exit-code; then
|
||||||
|
echo "::error::检测到文件变化!请先在本地运行 'python -m unilabos --complete_registry' 并提交变更"
|
||||||
|
echo "变化的文件:"
|
||||||
|
git diff --name-only
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "检查通过:无文件变化"
|
||||||
45
.github/workflows/conda-pack-build.yml
vendored
45
.github/workflows/conda-pack-build.yml
vendored
@@ -13,6 +13,11 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: 'win-64'
|
default: 'win-64'
|
||||||
type: string
|
type: string
|
||||||
|
build_full:
|
||||||
|
description: '是否构建完整版 unilabos-full (默认构建轻量版 unilabos)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-conda-pack:
|
build-conda-pack:
|
||||||
@@ -24,7 +29,7 @@ jobs:
|
|||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
- os: macos-13 # Intel
|
- os: macos-15 # Intel (via Rosetta)
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
@@ -57,7 +62,7 @@ jobs:
|
|||||||
echo "should_build=false" >> $GITHUB_OUTPUT
|
echo "should_build=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.inputs.branch }}
|
ref: ${{ github.event.inputs.branch }}
|
||||||
@@ -69,7 +74,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
miniforge-version: latest
|
miniforge-version: latest
|
||||||
use-mamba: true
|
use-mamba: true
|
||||||
python-version: '3.11.11'
|
python-version: '3.11.14'
|
||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: flexible
|
channel-priority: flexible
|
||||||
activate-environment: unilab
|
activate-environment: unilab
|
||||||
@@ -81,7 +86,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo Installing unilabos and dependencies to unilab environment...
|
echo Installing unilabos and dependencies to unilab environment...
|
||||||
echo Using mamba for faster and more reliable dependency resolution...
|
echo Using mamba for faster and more reliable dependency resolution...
|
||||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
echo Build full: ${{ github.event.inputs.build_full }}
|
||||||
|
if "${{ github.event.inputs.build_full }}"=="true" (
|
||||||
|
echo Installing unilabos-full ^(complete package^)...
|
||||||
|
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
|
) else (
|
||||||
|
echo Installing unilabos ^(minimal package^)...
|
||||||
|
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
|
)
|
||||||
|
|
||||||
- name: Install conda-pack, unilabos and dependencies (Unix)
|
- name: Install conda-pack, unilabos and dependencies (Unix)
|
||||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||||
@@ -89,7 +101,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Installing unilabos and dependencies to unilab environment..."
|
echo "Installing unilabos and dependencies to unilab environment..."
|
||||||
echo "Using mamba for faster and more reliable dependency resolution..."
|
echo "Using mamba for faster and more reliable dependency resolution..."
|
||||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
echo "Build full: ${{ github.event.inputs.build_full }}"
|
||||||
|
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||||
|
echo "Installing unilabos-full (complete package)..."
|
||||||
|
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
|
else
|
||||||
|
echo "Installing unilabos (minimal package)..."
|
||||||
|
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
||||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||||
@@ -293,7 +312,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload distribution package
|
- name: Upload distribution package
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
||||||
path: dist-package/
|
path: dist-package/
|
||||||
@@ -308,7 +327,12 @@ jobs:
|
|||||||
echo ==========================================
|
echo ==========================================
|
||||||
echo Platform: ${{ matrix.platform }}
|
echo Platform: ${{ matrix.platform }}
|
||||||
echo Branch: ${{ github.event.inputs.branch }}
|
echo Branch: ${{ github.event.inputs.branch }}
|
||||||
echo Python version: 3.11.11
|
echo Python version: 3.11.14
|
||||||
|
if "${{ github.event.inputs.build_full }}"=="true" (
|
||||||
|
echo Package: unilabos-full ^(complete^)
|
||||||
|
) else (
|
||||||
|
echo Package: unilabos ^(minimal^)
|
||||||
|
)
|
||||||
echo.
|
echo.
|
||||||
echo Distribution package contents:
|
echo Distribution package contents:
|
||||||
dir dist-package
|
dir dist-package
|
||||||
@@ -328,7 +352,12 @@ jobs:
|
|||||||
echo "=========================================="
|
echo "=========================================="
|
||||||
echo "Platform: ${{ matrix.platform }}"
|
echo "Platform: ${{ matrix.platform }}"
|
||||||
echo "Branch: ${{ github.event.inputs.branch }}"
|
echo "Branch: ${{ github.event.inputs.branch }}"
|
||||||
echo "Python version: 3.11.11"
|
echo "Python version: 3.11.14"
|
||||||
|
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||||
|
echo "Package: unilabos-full (complete)"
|
||||||
|
else
|
||||||
|
echo "Package: unilabos (minimal)"
|
||||||
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
echo "Distribution package contents:"
|
echo "Distribution package contents:"
|
||||||
ls -lh dist-package/
|
ls -lh dist-package/
|
||||||
|
|||||||
37
.github/workflows/deploy-docs.yml
vendored
37
.github/workflows/deploy-docs.yml
vendored
@@ -1,10 +1,12 @@
|
|||||||
name: Deploy Docs
|
name: Deploy Docs
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
# 在 CI Check 成功后自动触发(仅 main 分支)
|
||||||
branches: [main]
|
workflow_run:
|
||||||
pull_request:
|
workflows: ["CI Check"]
|
||||||
|
types: [completed]
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
# 手动触发
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
branch:
|
branch:
|
||||||
@@ -33,12 +35,19 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Build documentation
|
# Build documentation
|
||||||
build:
|
build:
|
||||||
|
# 只在以下情况运行:
|
||||||
|
# 1. workflow_run 触发且 CI Check 成功
|
||||||
|
# 2. 手动触发
|
||||||
|
if: |
|
||||||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.inputs.branch || github.ref }}
|
# workflow_run 时使用触发工作流的分支,手动触发时使用输入的分支
|
||||||
|
ref: ${{ github.event.workflow_run.head_branch || github.event.inputs.branch || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup Miniforge (with mamba)
|
- name: Setup Miniforge (with mamba)
|
||||||
@@ -46,7 +55,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
miniforge-version: latest
|
miniforge-version: latest
|
||||||
use-mamba: true
|
use-mamba: true
|
||||||
python-version: '3.11.11'
|
python-version: '3.11.14'
|
||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: flexible
|
channel-priority: flexible
|
||||||
activate-environment: unilab
|
activate-environment: unilab
|
||||||
@@ -75,8 +84,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Setup Pages
|
- name: Setup Pages
|
||||||
id: pages
|
id: pages
|
||||||
uses: actions/configure-pages@v4
|
uses: actions/configure-pages@v5
|
||||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
if: |
|
||||||
|
github.event.workflow_run.head_branch == 'main' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
|
|
||||||
- name: Build Sphinx documentation
|
- name: Build Sphinx documentation
|
||||||
run: |
|
run: |
|
||||||
@@ -94,14 +105,18 @@ jobs:
|
|||||||
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
||||||
|
|
||||||
- name: Upload build artifacts
|
- name: Upload build artifacts
|
||||||
uses: actions/upload-pages-artifact@v3
|
uses: actions/upload-pages-artifact@v4
|
||||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
if: |
|
||||||
|
github.event.workflow_run.head_branch == 'main' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
with:
|
with:
|
||||||
path: docs/_build/html
|
path: docs/_build/html
|
||||||
|
|
||||||
# Deploy to GitHub Pages
|
# Deploy to GitHub Pages
|
||||||
deploy:
|
deploy:
|
||||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
if: |
|
||||||
|
github.event.workflow_run.head_branch == 'main' ||
|
||||||
|
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
environment:
|
environment:
|
||||||
name: github-pages
|
name: github-pages
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
|||||||
48
.github/workflows/multi-platform-build.yml
vendored
48
.github/workflows/multi-platform-build.yml
vendored
@@ -1,11 +1,16 @@
|
|||||||
name: Multi-Platform Conda Build
|
name: Multi-Platform Conda Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
# 在 CI Check 工作流完成后触发(仅限 main/dev 分支)
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["CI Check"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
branches: [main, dev]
|
||||||
|
# 支持 tag 推送(不依赖 CI Check)
|
||||||
push:
|
push:
|
||||||
branches: [main, dev]
|
|
||||||
tags: ['v*']
|
tags: ['v*']
|
||||||
pull_request:
|
# 手动触发
|
||||||
branches: [main, dev]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
platforms:
|
platforms:
|
||||||
@@ -17,9 +22,37 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
type: boolean
|
type: boolean
|
||||||
|
skip_ci_check:
|
||||||
|
description: '跳过等待 CI Check (手动触发时可选)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
||||||
|
wait-for-ci:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_run'
|
||||||
|
outputs:
|
||||||
|
should_continue: ${{ steps.check.outputs.should_continue }}
|
||||||
|
steps:
|
||||||
|
- name: Check CI status
|
||||||
|
id: check
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
||||||
|
echo "should_continue=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "CI Check passed, proceeding with build"
|
||||||
|
else
|
||||||
|
echo "should_continue=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
||||||
|
fi
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
needs: [wait-for-ci]
|
||||||
|
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
||||||
|
if: |
|
||||||
|
always() &&
|
||||||
|
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -27,7 +60,7 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
- os: macos-13 # Intel
|
- os: macos-15 # Intel (via Rosetta)
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
@@ -44,8 +77,10 @@ jobs:
|
|||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
|
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
||||||
|
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Check if platform should be built
|
- name: Check if platform should be built
|
||||||
@@ -69,7 +104,6 @@ jobs:
|
|||||||
channels: conda-forge,robostack-staging,defaults
|
channels: conda-forge,robostack-staging,defaults
|
||||||
channel-priority: strict
|
channel-priority: strict
|
||||||
activate-environment: build-env
|
activate-environment: build-env
|
||||||
auto-activate-base: false
|
|
||||||
auto-update-conda: false
|
auto-update-conda: false
|
||||||
show-channel-urls: true
|
show-channel-urls: true
|
||||||
|
|
||||||
@@ -115,7 +149,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload conda package artifacts
|
- name: Upload conda package artifacts
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: conda-package-${{ matrix.platform }}
|
name: conda-package-${{ matrix.platform }}
|
||||||
path: conda-packages-temp
|
path: conda-packages-temp
|
||||||
|
|||||||
115
.github/workflows/unilabos-conda-build.yml
vendored
115
.github/workflows/unilabos-conda-build.yml
vendored
@@ -1,32 +1,69 @@
|
|||||||
name: UniLabOS Conda Build
|
name: UniLabOS Conda Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
# 在 CI Check 成功后自动触发
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["CI Check"]
|
||||||
|
types: [completed]
|
||||||
|
branches: [main, dev]
|
||||||
|
# 标签推送时直接触发(发布版本)
|
||||||
push:
|
push:
|
||||||
branches: [main, dev]
|
|
||||||
tags: ['v*']
|
tags: ['v*']
|
||||||
pull_request:
|
# 手动触发
|
||||||
branches: [main, dev]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
platforms:
|
platforms:
|
||||||
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
||||||
required: false
|
required: false
|
||||||
default: 'linux-64'
|
default: 'linux-64'
|
||||||
|
build_full:
|
||||||
|
description: '是否构建 unilabos-full 完整包 (默认只构建 unilabos 基础包)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
upload_to_anaconda:
|
upload_to_anaconda:
|
||||||
description: '是否上传到Anaconda.org'
|
description: '是否上传到Anaconda.org'
|
||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
type: boolean
|
type: boolean
|
||||||
|
skip_ci_check:
|
||||||
|
description: '跳过等待 CI Check (手动触发时可选)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
||||||
|
wait-for-ci:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_run'
|
||||||
|
outputs:
|
||||||
|
should_continue: ${{ steps.check.outputs.should_continue }}
|
||||||
|
steps:
|
||||||
|
- name: Check CI status
|
||||||
|
id: check
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
||||||
|
echo "should_continue=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "CI Check passed, proceeding with build"
|
||||||
|
else
|
||||||
|
echo "should_continue=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
||||||
|
fi
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
needs: [wait-for-ci]
|
||||||
|
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
||||||
|
if: |
|
||||||
|
always() &&
|
||||||
|
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
- os: macos-13 # Intel
|
- os: macos-15 # Intel (via Rosetta)
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
platform: osx-arm64
|
platform: osx-arm64
|
||||||
@@ -40,8 +77,10 @@ jobs:
|
|||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
|
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
||||||
|
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Check if platform should be built
|
- name: Check if platform should be built
|
||||||
@@ -65,7 +104,6 @@ jobs:
|
|||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: strict
|
channel-priority: strict
|
||||||
activate-environment: build-env
|
activate-environment: build-env
|
||||||
auto-activate-base: false
|
|
||||||
auto-update-conda: false
|
auto-update-conda: false
|
||||||
show-channel-urls: true
|
show-channel-urls: true
|
||||||
|
|
||||||
@@ -81,12 +119,61 @@ jobs:
|
|||||||
conda list | grep -E "(rattler-build|anaconda-client)"
|
conda list | grep -E "(rattler-build|anaconda-client)"
|
||||||
echo "Platform: ${{ matrix.platform }}"
|
echo "Platform: ${{ matrix.platform }}"
|
||||||
echo "OS: ${{ matrix.os }}"
|
echo "OS: ${{ matrix.os }}"
|
||||||
echo "Building UniLabOS package"
|
echo "Build full package: ${{ github.event.inputs.build_full || 'false' }}"
|
||||||
|
echo "Building packages:"
|
||||||
|
echo " - unilabos-env (environment dependencies)"
|
||||||
|
echo " - unilabos (with pip package)"
|
||||||
|
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||||
|
echo " - unilabos-full (complete package)"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Build conda package
|
- name: Build unilabos-env (conda environment only, noarch)
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
run: |
|
run: |
|
||||||
rattler-build build -r .conda/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
echo "Building unilabos-env (conda environment dependencies)..."
|
||||||
|
rattler-build build -r .conda/environment/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
- name: Upload unilabos-env to Anaconda.org (if enabled)
|
||||||
|
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Uploading unilabos-env to uni-lab organization..."
|
||||||
|
for package in $(find ./output -name "unilabos-env*.conda"); do
|
||||||
|
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Build unilabos (with pip package)
|
||||||
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Building unilabos package..."
|
||||||
|
# 如果已上传到 Anaconda,从 uni-lab channel 获取 unilabos-env;否则从本地 output 获取
|
||||||
|
rattler-build build -r .conda/base/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
||||||
|
|
||||||
|
- name: Upload unilabos to Anaconda.org (if enabled)
|
||||||
|
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Uploading unilabos to uni-lab organization..."
|
||||||
|
for package in $(find ./output -name "unilabos-0*.conda" -o -name "unilabos-[0-9]*.conda"); do
|
||||||
|
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Build unilabos-full - Only when explicitly requested
|
||||||
|
if: |
|
||||||
|
steps.should_build.outputs.should_build == 'true' &&
|
||||||
|
github.event.inputs.build_full == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Building unilabos-full package on ${{ matrix.platform }}..."
|
||||||
|
rattler-build build -r .conda/full/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
||||||
|
|
||||||
|
- name: Upload unilabos-full to Anaconda.org (if enabled)
|
||||||
|
if: |
|
||||||
|
steps.should_build.outputs.should_build == 'true' &&
|
||||||
|
github.event.inputs.build_full == 'true' &&
|
||||||
|
github.event.inputs.upload_to_anaconda == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Uploading unilabos-full to uni-lab organization..."
|
||||||
|
for package in $(find ./output -name "unilabos-full*.conda"); do
|
||||||
|
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||||
|
done
|
||||||
|
|
||||||
- name: List built packages
|
- name: List built packages
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
@@ -108,17 +195,9 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload conda package artifacts
|
- name: Upload conda package artifacts
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: conda-package-unilabos-${{ matrix.platform }}
|
name: conda-package-unilabos-${{ matrix.platform }}
|
||||||
path: conda-packages-temp
|
path: conda-packages-temp
|
||||||
if-no-files-found: warn
|
if-no-files-found: warn
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
|
|
||||||
- name: Upload to Anaconda.org (uni-lab organization)
|
|
||||||
if: github.event.inputs.upload_to_anaconda == 'true'
|
|
||||||
run: |
|
|
||||||
for package in $(find ./output -name "*.conda"); do
|
|
||||||
echo "Uploading $package to uni-lab organization..."
|
|
||||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
|
||||||
done
|
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
|
cursor_docs/
|
||||||
configs/
|
configs/
|
||||||
temp/
|
temp/
|
||||||
output/
|
output/
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
recursive-include unilabos/test *
|
recursive-include unilabos/test *
|
||||||
|
recursive-include unilabos/utils *
|
||||||
recursive-include unilabos/registry *.yaml
|
recursive-include unilabos/registry *.yaml
|
||||||
recursive-include unilabos/app/web/static *
|
recursive-include unilabos/app/web/static *
|
||||||
recursive-include unilabos/app/web/templates *
|
recursive-include unilabos/app/web/templates *
|
||||||
|
|||||||
17
NOTICE
Normal file
17
NOTICE
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Uni-Lab-OS Licensing Notice
|
||||||
|
|
||||||
|
This project uses a dual licensing structure:
|
||||||
|
|
||||||
|
## 1. Main Framework - GPL-3.0
|
||||||
|
|
||||||
|
- unilabos/ (except unilabos/devices/)
|
||||||
|
- docs/
|
||||||
|
- tests/
|
||||||
|
|
||||||
|
See [LICENSE](LICENSE) for details.
|
||||||
|
|
||||||
|
## 2. Device Drivers - DP Technology Proprietary License
|
||||||
|
|
||||||
|
- unilabos/devices/
|
||||||
|
|
||||||
|
See [unilabos/devices/LICENSE](unilabos/devices/LICENSE) for details.
|
||||||
90
README.md
90
README.md
@@ -8,17 +8,13 @@
|
|||||||
|
|
||||||
**English** | [中文](README_zh.md)
|
**English** | [中文](README_zh.md)
|
||||||
|
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/stargazers)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/stargazers)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/network/members)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/network/members)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/blob/main/LICENSE)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/blob/main/LICENSE)
|
||||||
|
|
||||||
Uni-Lab-OS is a platform for laboratory automation, designed to connect and control various experimental equipment, enabling automation and standardization of experimental workflows.
|
Uni-Lab-OS is a platform for laboratory automation, designed to connect and control various experimental equipment, enabling automation and standardization of experimental workflows.
|
||||||
|
|
||||||
## 🏆 Competition
|
|
||||||
|
|
||||||
Join the [Intelligent Organic Chemistry Synthesis Competition](https://bohrium.dp.tech/competitions/1451645258) to explore automated synthesis with Uni-Lab-OS!
|
|
||||||
|
|
||||||
## Key Features
|
## Key Features
|
||||||
|
|
||||||
- Multi-device integration management
|
- Multi-device integration management
|
||||||
@@ -31,41 +27,89 @@ Join the [Intelligent Organic Chemistry Synthesis Competition](https://bohrium.d
|
|||||||
|
|
||||||
Detailed documentation can be found at:
|
Detailed documentation can be found at:
|
||||||
|
|
||||||
- [Online Documentation](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
- [Online Documentation](https://deepmodeling.github.io/Uni-Lab-OS/)
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
Uni-Lab-OS recommends using `mamba` for environment management. Choose the appropriate environment file for your operating system:
|
### 1. Setup Conda Environment
|
||||||
|
|
||||||
|
Uni-Lab-OS recommends using `mamba` for environment management. Choose the package that fits your needs:
|
||||||
|
|
||||||
|
| Package | Use Case | Contents |
|
||||||
|
|---------|----------|----------|
|
||||||
|
| `unilabos` | **Recommended for most users** | Complete package, ready to use |
|
||||||
|
| `unilabos-env` | Developers (editable install) | Environment only, install unilabos via pip |
|
||||||
|
| `unilabos-full` | Simulation/Visualization | unilabos + ROS2 Desktop + Gazebo + MoveIt |
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create new environment
|
# Create new environment
|
||||||
mamba create -n unilab python=3.11.11
|
mamba create -n unilab python=3.11.14
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
# Option A: Standard installation (recommended for most users)
|
||||||
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
# Option B: For developers (editable mode development)
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
# Then install unilabos and dependencies:
|
||||||
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||||
|
pip install -e .
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
|
||||||
|
# Option C: Full installation (simulation/visualization)
|
||||||
|
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
## Install Dev Uni-Lab-OS
|
**When to use which?**
|
||||||
|
- **unilabos**: Standard installation for production deployment and general usage (recommended)
|
||||||
|
- **unilabos-env**: For developers who need `pip install -e .` editable mode, modify source code
|
||||||
|
- **unilabos-full**: For simulation (Gazebo), visualization (rviz2), and Jupyter notebooks
|
||||||
|
|
||||||
|
### 2. Clone Repository (Optional, for developers)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository
|
# Clone the repository (only needed for development or examples)
|
||||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
# Install Uni-Lab-OS
|
|
||||||
pip install .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Start Uni-Lab System:
|
3. Start Uni-Lab System
|
||||||
|
|
||||||
Please refer to [Documentation - Boot Examples](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
|
4. Best Practice
|
||||||
|
|
||||||
|
See [Best Practice Guide](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
||||||
|
|
||||||
## Message Format
|
## Message Format
|
||||||
|
|
||||||
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/dptech-corp/Uni-Lab-OS/releases) page.
|
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
||||||
|
|
||||||
|
## Citation
|
||||||
|
|
||||||
|
If you use [Uni-Lab-OS](https://arxiv.org/abs/2512.21766) in academic research, please cite:
|
||||||
|
|
||||||
|
```bibtex
|
||||||
|
@article{gao2025unilabos,
|
||||||
|
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||||
|
doi = {10.48550/arXiv.2512.21766},
|
||||||
|
publisher = {arXiv},
|
||||||
|
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and
|
||||||
|
Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and
|
||||||
|
Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and
|
||||||
|
Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||||
|
year = {2025}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under GPL-3.0 - see the [LICENSE](LICENSE) file for details.
|
This project uses a dual licensing structure:
|
||||||
|
|
||||||
|
- **Main Framework**: GPL-3.0 - see [LICENSE](LICENSE)
|
||||||
|
- **Device Drivers** (`unilabos/devices/`): DP Technology Proprietary License
|
||||||
|
|
||||||
|
See [NOTICE](NOTICE) for complete licensing details.
|
||||||
|
|
||||||
## Project Statistics
|
## Project Statistics
|
||||||
|
|
||||||
@@ -77,4 +121,4 @@ This project is licensed under GPL-3.0 - see the [LICENSE](LICENSE) file for det
|
|||||||
|
|
||||||
## Contact Us
|
## Contact Us
|
||||||
|
|
||||||
- GitHub Issues: [https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
- GitHub Issues: [https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
|
|||||||
90
README_zh.md
90
README_zh.md
@@ -8,17 +8,13 @@
|
|||||||
|
|
||||||
[English](README.md) | **中文**
|
[English](README.md) | **中文**
|
||||||
|
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/stargazers)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/stargazers)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/network/members)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/network/members)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
[](https://github.com/dptech-corp/Uni-Lab-OS/blob/main/LICENSE)
|
[](https://github.com/deepmodeling/Uni-Lab-OS/blob/main/LICENSE)
|
||||||
|
|
||||||
Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控制各种实验设备,实现实验流程的自动化和标准化。
|
Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控制各种实验设备,实现实验流程的自动化和标准化。
|
||||||
|
|
||||||
## 🏆 比赛
|
|
||||||
|
|
||||||
欢迎参加[有机化学合成智能实验大赛](https://bohrium.dp.tech/competitions/1451645258),使用 Uni-Lab-OS 探索自动化合成!
|
|
||||||
|
|
||||||
## 核心特点
|
## 核心特点
|
||||||
|
|
||||||
- 多设备集成管理
|
- 多设备集成管理
|
||||||
@@ -31,43 +27,89 @@ Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控
|
|||||||
|
|
||||||
详细文档可在以下位置找到:
|
详细文档可在以下位置找到:
|
||||||
|
|
||||||
- [在线文档](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
- [在线文档](https://deepmodeling.github.io/Uni-Lab-OS/)
|
||||||
|
|
||||||
## 快速开始
|
## 快速开始
|
||||||
|
|
||||||
1. 配置 Conda 环境
|
### 1. 配置 Conda 环境
|
||||||
|
|
||||||
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的操作系统选择适当的环境文件:
|
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的需求选择合适的安装包:
|
||||||
|
|
||||||
|
| 安装包 | 适用场景 | 包含内容 |
|
||||||
|
|--------|----------|----------|
|
||||||
|
| `unilabos` | **推荐大多数用户** | 完整安装包,开箱即用 |
|
||||||
|
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
||||||
|
| `unilabos-full` | 仿真/可视化 | unilabos + ROS2 桌面版 + Gazebo + MoveIt |
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 创建新环境
|
# 创建新环境
|
||||||
mamba create -n unilab python=3.11.11
|
mamba create -n unilab python=3.11.14
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
# 方案 A:标准安装(推荐大多数用户)
|
||||||
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
# 方案 B:开发者环境(可编辑模式开发)
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
# 然后安装 unilabos 和依赖:
|
||||||
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||||
|
pip install -e .
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
|
||||||
|
# 方案 C:完整安装(仿真/可视化)
|
||||||
|
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
2. 安装开发版 Uni-Lab-OS:
|
**如何选择?**
|
||||||
|
- **unilabos**:标准安装,适用于生产部署和日常使用(推荐)
|
||||||
|
- **unilabos-env**:开发者使用,支持 `pip install -e .` 可编辑模式,可修改源代码
|
||||||
|
- **unilabos-full**:需要仿真(Gazebo)、可视化(rviz2)或 Jupyter Notebook
|
||||||
|
|
||||||
|
### 2. 克隆仓库(可选,供开发者使用)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 克隆仓库
|
# 克隆仓库(仅开发或查看示例时需要)
|
||||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
# 安装 Uni-Lab-OS
|
|
||||||
pip install .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 启动 Uni-Lab 系统:
|
3. 启动 Uni-Lab 系统
|
||||||
|
|
||||||
请见[文档-启动样例](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
|
4. 最佳实践
|
||||||
|
|
||||||
|
请见[最佳实践指南](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
||||||
|
|
||||||
## 消息格式
|
## 消息格式
|
||||||
|
|
||||||
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/dptech-corp/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
||||||
|
|
||||||
|
## 引用
|
||||||
|
|
||||||
|
如果您在学术研究中使用 [Uni-Lab-OS](https://arxiv.org/abs/2512.21766),请引用:
|
||||||
|
|
||||||
|
```bibtex
|
||||||
|
@article{gao2025unilabos,
|
||||||
|
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||||
|
doi = {10.48550/arXiv.2512.21766},
|
||||||
|
publisher = {arXiv},
|
||||||
|
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and
|
||||||
|
Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and
|
||||||
|
Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and
|
||||||
|
Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||||
|
year = {2025}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## 许可证
|
## 许可证
|
||||||
|
|
||||||
此项目采用 GPL-3.0 许可 - 详情请参阅 [LICENSE](LICENSE) 文件。
|
本项目采用双许可证结构:
|
||||||
|
|
||||||
|
- **主框架**:GPL-3.0 - 详见 [LICENSE](LICENSE)
|
||||||
|
- **设备驱动** (`unilabos/devices/`):深势科技专有许可证
|
||||||
|
|
||||||
|
完整许可证说明请参阅 [NOTICE](NOTICE)。
|
||||||
|
|
||||||
## 项目统计
|
## 项目统计
|
||||||
|
|
||||||
@@ -79,4 +121,4 @@ Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在
|
|||||||
|
|
||||||
## 联系我们
|
## 联系我们
|
||||||
|
|
||||||
- GitHub Issues: [https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
- GitHub Issues: [https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ extensions = [
|
|||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
||||||
"sphinx_rtd_theme",
|
"sphinx_rtd_theme",
|
||||||
"sphinxcontrib.mermaid"
|
"sphinxcontrib.mermaid",
|
||||||
]
|
]
|
||||||
|
|
||||||
source_suffix = {
|
source_suffix = {
|
||||||
@@ -58,7 +58,7 @@ html_theme = "sphinx_rtd_theme"
|
|||||||
|
|
||||||
# sphinx-book-theme 主题选项
|
# sphinx-book-theme 主题选项
|
||||||
html_theme_options = {
|
html_theme_options = {
|
||||||
"repository_url": "https://github.com/用户名/Uni-Lab",
|
"repository_url": "https://github.com/deepmodeling/Uni-Lab-OS",
|
||||||
"use_repository_button": True,
|
"use_repository_button": True,
|
||||||
"use_issues_button": True,
|
"use_issues_button": True,
|
||||||
"use_edit_page_button": True,
|
"use_edit_page_button": True,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -12,3 +12,7 @@ sphinx-copybutton>=0.5.0
|
|||||||
|
|
||||||
# 用于自动摘要生成
|
# 用于自动摘要生成
|
||||||
sphinx-autobuild>=2024.2.4
|
sphinx-autobuild>=2024.2.4
|
||||||
|
|
||||||
|
# 用于PDF导出 (rinohtype方案,纯Python无需LaTeX)
|
||||||
|
rinohtype>=0.5.4
|
||||||
|
sphinx-simplepdf>=1.6.0
|
||||||
@@ -31,6 +31,14 @@
|
|||||||
|
|
||||||
详细的安装步骤请参考 [安装指南](installation.md)。
|
详细的安装步骤请参考 [安装指南](installation.md)。
|
||||||
|
|
||||||
|
**选择合适的安装包:**
|
||||||
|
|
||||||
|
| 安装包 | 适用场景 | 包含组件 |
|
||||||
|
|--------|----------|----------|
|
||||||
|
| `unilabos` | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 |
|
||||||
|
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
||||||
|
| `unilabos-full` | 仿真/可视化 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt |
|
||||||
|
|
||||||
**关键步骤:**
|
**关键步骤:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -38,15 +46,30 @@
|
|||||||
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
|
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
|
||||||
|
|
||||||
# 2. 创建 Conda 环境
|
# 2. 创建 Conda 环境
|
||||||
mamba create -n unilab python=3.11.11
|
mamba create -n unilab python=3.11.14
|
||||||
|
|
||||||
# 3. 激活环境
|
# 3. 激活环境
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
|
|
||||||
# 4. 安装 Uni-Lab-OS
|
# 4. 安装 Uni-Lab-OS(选择其一)
|
||||||
|
|
||||||
|
# 方案 A:标准安装(推荐大多数用户)
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
# 方案 B:开发者环境(可编辑模式开发)
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
pip install -e /path/to/Uni-Lab-OS # 可编辑安装
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt # 安装 pip 依赖
|
||||||
|
|
||||||
|
# 方案 C:完整版(仿真/可视化)
|
||||||
|
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**选择建议:**
|
||||||
|
- **日常使用/生产部署**:使用 `unilabos`(推荐),完整功能,开箱即用
|
||||||
|
- **开发者**:使用 `unilabos-env` + `pip install -e .` + `uv pip install -r unilabos/utils/requirements.txt`,代码修改立即生效
|
||||||
|
- **仿真/可视化**:使用 `unilabos-full`,含 Gazebo、rviz2、MoveIt
|
||||||
|
|
||||||
#### 1.2 验证安装
|
#### 1.2 验证安装
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -416,6 +439,9 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
|
|||||||
1. 访问 Web 界面,进入"仪器耗材"模块
|
1. 访问 Web 界面,进入"仪器耗材"模块
|
||||||
2. 在"仪器设备"区域找到并添加上述设备
|
2. 在"仪器设备"区域找到并添加上述设备
|
||||||
3. 在"物料耗材"区域找到并添加容器
|
3. 在"物料耗材"区域找到并添加容器
|
||||||
|
4. 在workstation中配置protocol_type包含PumpTransferProtocol
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -426,8 +452,9 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
|
|||||||
**操作步骤:**
|
**操作步骤:**
|
||||||
|
|
||||||
1. 将两个 `container` 拖拽到 `workstation` 中
|
1. 将两个 `container` 拖拽到 `workstation` 中
|
||||||
2. 将 `virtual_transfer_pump` 拖拽到 `workstation` 中
|
2. 将 `virtual_multiway_valve` 拖拽到 `workstation` 中
|
||||||
3. 在画布上连接它们(建立父子关系)
|
3. 将 `virtual_transfer_pump` 拖拽到 `workstation` 中
|
||||||
|
4. 在画布上连接它们(建立父子关系)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -768,7 +795,43 @@ Waiting for host service...
|
|||||||
|
|
||||||
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
|
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
|
||||||
|
|
||||||
#### 9.1 为什么需要自定义设备?
|
#### 9.1 开发环境准备
|
||||||
|
|
||||||
|
**推荐使用 `unilabos-env` + `pip install -e .` + `uv pip install`** 进行设备开发:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. 创建环境并安装 unilabos-env(ROS2 + conda 依赖 + uv)
|
||||||
|
mamba create -n unilab python=3.11.14
|
||||||
|
conda activate unilab
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
# 2. 克隆代码
|
||||||
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
|
# 3. 以可编辑模式安装(推荐使用脚本,自动检测中文环境)
|
||||||
|
python scripts/dev_install.py
|
||||||
|
|
||||||
|
# 或手动安装:
|
||||||
|
pip install -e .
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
**为什么使用这种方式?**
|
||||||
|
- `unilabos-env` 提供 ROS2 核心组件和 uv(通过 conda 安装,避免编译)
|
||||||
|
- `unilabos/utils/requirements.txt` 包含所有运行时需要的 pip 依赖
|
||||||
|
- `dev_install.py` 自动检测中文环境,中文系统自动使用清华镜像
|
||||||
|
- 使用 `uv` 替代 `pip`,安装速度更快
|
||||||
|
- 可编辑模式:代码修改**立即生效**,无需重新安装
|
||||||
|
|
||||||
|
**如果安装失败或速度太慢**,可以手动执行(使用清华镜像):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 9.2 为什么需要自定义设备?
|
||||||
|
|
||||||
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
|
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
|
||||||
|
|
||||||
@@ -777,7 +840,7 @@ Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要
|
|||||||
- 特殊的实验流程
|
- 特殊的实验流程
|
||||||
- 第三方设备集成
|
- 第三方设备集成
|
||||||
|
|
||||||
#### 9.2 创建 Python 包
|
#### 9.3 创建 Python 包
|
||||||
|
|
||||||
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
|
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
|
||||||
|
|
||||||
@@ -814,7 +877,7 @@ touch my_lab_devices/my_lab_devices/__init__.py
|
|||||||
touch my_lab_devices/my_lab_devices/devices/__init__.py
|
touch my_lab_devices/my_lab_devices/devices/__init__.py
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 9.3 创建 setup.py
|
#### 9.4 创建 setup.py
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# my_lab_devices/setup.py
|
# my_lab_devices/setup.py
|
||||||
@@ -845,7 +908,7 @@ setup(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 9.4 开发安装
|
#### 9.5 开发安装
|
||||||
|
|
||||||
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
|
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
|
||||||
|
|
||||||
@@ -860,7 +923,7 @@ pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|||||||
- 方便调试和测试
|
- 方便调试和测试
|
||||||
- 支持版本控制(git)
|
- 支持版本控制(git)
|
||||||
|
|
||||||
#### 9.5 编写设备驱动
|
#### 9.6 编写设备驱动
|
||||||
|
|
||||||
创建设备驱动文件:
|
创建设备驱动文件:
|
||||||
|
|
||||||
@@ -1001,7 +1064,7 @@ class MyPump:
|
|||||||
- **返回 Dict**:所有动作方法返回字典类型
|
- **返回 Dict**:所有动作方法返回字典类型
|
||||||
- **文档字符串**:详细说明参数和功能
|
- **文档字符串**:详细说明参数和功能
|
||||||
|
|
||||||
#### 9.6 测试设备驱动
|
#### 9.7 测试设备驱动
|
||||||
|
|
||||||
创建简单的测试脚本:
|
创建简单的测试脚本:
|
||||||
|
|
||||||
@@ -1807,7 +1870,7 @@ unilab --ak your_ak --sk your_sk -g graph.json \
|
|||||||
|
|
||||||
#### 14.5 社区支持
|
#### 14.5 社区支持
|
||||||
|
|
||||||
- **GitHub Issues**:[https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
- **GitHub Issues**:[https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
- **官方网站**:[https://uni-lab.bohrium.com](https://uni-lab.bohrium.com)
|
- **官方网站**:[https://uni-lab.bohrium.com](https://uni-lab.bohrium.com)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
@@ -463,7 +463,7 @@ Uni-Lab 使用 `ResourceDictInstance.get_resource_instance_from_dict()` 方法
|
|||||||
### 使用示例
|
### 使用示例
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
from unilabos.resources.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 旧格式节点
|
# 旧格式节点
|
||||||
old_format_node = {
|
old_format_node = {
|
||||||
@@ -477,10 +477,10 @@ old_format_node = {
|
|||||||
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
||||||
|
|
||||||
# 访问标准化后的数据
|
# 访问标准化后的数据
|
||||||
print(instance.res_content.id) # "pump_1"
|
print(instance.res_content.id) # "pump_1"
|
||||||
print(instance.res_content.uuid) # 自动生成的 UUID
|
print(instance.res_content.uuid) # 自动生成的 UUID
|
||||||
print(instance.res_content.config) # {}
|
print(instance.res_content.config) # {}
|
||||||
print(instance.res_content.data) # {}
|
print(instance.res_content.data) # {}
|
||||||
```
|
```
|
||||||
|
|
||||||
### 格式迁移建议
|
### 格式迁移建议
|
||||||
@@ -857,4 +857,4 @@ class ResourceDictPosition(BaseModel):
|
|||||||
- 在 Web 界面中使用模板创建
|
- 在 Web 界面中使用模板创建
|
||||||
- 参考示例文件:`test/experiments/` 目录
|
- 参考示例文件:`test/experiments/` 目录
|
||||||
- 查看 ResourceDict 源码了解完整定义
|
- 查看 ResourceDict 源码了解完整定义
|
||||||
- [GitHub 讨论区](https://github.com/dptech-corp/Uni-Lab-OS/discussions)
|
- [GitHub 讨论区](https://github.com/deepmodeling/Uni-Lab-OS/discussions)
|
||||||
|
|||||||
BIN
docs/user_guide/image/add_protocol.png
Normal file
BIN
docs/user_guide/image/add_protocol.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 81 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 275 KiB After Width: | Height: | Size: 415 KiB |
@@ -13,15 +13,26 @@
|
|||||||
- 开发者需要 Git 和基本的 Python 开发知识
|
- 开发者需要 Git 和基本的 Python 开发知识
|
||||||
- 自定义 msgs 需要 GitHub 账号
|
- 自定义 msgs 需要 GitHub 账号
|
||||||
|
|
||||||
|
## 安装包选择
|
||||||
|
|
||||||
|
Uni-Lab-OS 提供三个安装包版本,根据您的需求选择:
|
||||||
|
|
||||||
|
| 安装包 | 适用场景 | 包含组件 | 磁盘占用 |
|
||||||
|
|--------|----------|----------|----------|
|
||||||
|
| **unilabos** | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 | ~2-3 GB |
|
||||||
|
| **unilabos-env** | 开发者环境(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos | ~2 GB |
|
||||||
|
| **unilabos-full** | 仿真可视化、完整功能体验 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt | ~8-10 GB |
|
||||||
|
|
||||||
## 安装方式选择
|
## 安装方式选择
|
||||||
|
|
||||||
根据您的使用场景,选择合适的安装方式:
|
根据您的使用场景,选择合适的安装方式:
|
||||||
|
|
||||||
| 安装方式 | 适用人群 | 特点 | 安装时间 |
|
| 安装方式 | 适用人群 | 推荐安装包 | 特点 | 安装时间 |
|
||||||
| ---------------------- | -------------------- | ------------------------------ | ---------------------------- |
|
| ---------------------- | -------------------- | ----------------- | ------------------------------ | ---------------------------- |
|
||||||
| **方式一:一键安装** | 实验室用户、快速体验 | 预打包环境,离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
| **方式一:一键安装** | 快速体验、演示 | 预打包环境 | 离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
||||||
| **方式二:手动安装** | 标准用户、生产环境 | 灵活配置,版本可控 | 10-20 分钟 |
|
| **方式二:手动安装** | **大多数用户** | `unilabos` | 完整功能,开箱即用 | 10-20 分钟 |
|
||||||
| **方式三:开发者安装** | 开发者、需要修改源码 | 可编辑模式,支持自定义 msgs | 20-30 分钟 |
|
| **方式三:开发者安装** | 开发者、需要修改源码 | `unilabos-env` | 可编辑模式,支持自定义开发 | 20-30 分钟 |
|
||||||
|
| **仿真/可视化** | 仿真测试、可视化调试 | `unilabos-full` | 含 Gazebo、rviz2、MoveIt | 30-60 分钟 |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -37,7 +48,7 @@
|
|||||||
|
|
||||||
#### 第一步:下载预打包环境
|
#### 第一步:下载预打包环境
|
||||||
|
|
||||||
1. 访问 [GitHub Actions - Conda Pack Build](https://github.com/dptech-corp/Uni-Lab-OS/actions/workflows/conda-pack-build.yml)
|
1. 访问 [GitHub Actions - Conda Pack Build](https://github.com/deepmodeling/Uni-Lab-OS/actions/workflows/conda-pack-build.yml)
|
||||||
|
|
||||||
2. 选择最新的成功构建记录(绿色勾号 ✓)
|
2. 选择最新的成功构建记录(绿色勾号 ✓)
|
||||||
|
|
||||||
@@ -144,17 +155,38 @@ bash Miniforge3-$(uname)-$(uname -m).sh
|
|||||||
使用以下命令创建 Uni-Lab 专用环境:
|
使用以下命令创建 Uni-Lab 专用环境:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mamba create -n unilab python=3.11.11 # 目前ros2组件依赖版本大多为3.11.11
|
mamba create -n unilab python=3.11.14 # 目前ros2组件依赖版本大多为3.11.14
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
# 选择安装包(三选一):
|
||||||
|
|
||||||
|
# 方案 A:标准安装(推荐大多数用户)
|
||||||
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
|
# 方案 B:开发者环境(可编辑模式开发)
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
# 然后安装 unilabos 和 pip 依赖:
|
||||||
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||||
|
pip install -e .
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
|
||||||
|
# 方案 C:完整版(含仿真和可视化工具)
|
||||||
|
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
**参数说明**:
|
**参数说明**:
|
||||||
|
|
||||||
- `-n unilab`: 创建名为 "unilab" 的环境
|
- `-n unilab`: 创建名为 "unilab" 的环境
|
||||||
- `uni-lab::unilabos`: 从 uni-lab channel 安装 unilabos 包
|
- `uni-lab::unilabos`: 安装 unilabos 完整包,开箱即用(推荐)
|
||||||
|
- `uni-lab::unilabos-env`: 仅安装环境依赖,适合开发者使用 `pip install -e .`
|
||||||
|
- `uni-lab::unilabos-full`: 安装完整包(含 ROS2 Desktop、Gazebo、MoveIt 等)
|
||||||
- `-c robostack-staging -c conda-forge`: 添加额外的软件源
|
- `-c robostack-staging -c conda-forge`: 添加额外的软件源
|
||||||
|
|
||||||
|
**包选择建议**:
|
||||||
|
- **日常使用/生产部署**:安装 `unilabos`(推荐,完整功能,开箱即用)
|
||||||
|
- **开发者**:安装 `unilabos-env`,然后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖,再 `pip install -e .` 进行可编辑安装
|
||||||
|
- **仿真/可视化**:安装 `unilabos-full`(Gazebo、rviz2、MoveIt)
|
||||||
|
|
||||||
**如果遇到网络问题**,可以使用清华镜像源加速下载:
|
**如果遇到网络问题**,可以使用清华镜像源加速下载:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -163,8 +195,14 @@ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/m
|
|||||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
|
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
|
||||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
|
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
|
||||||
|
|
||||||
# 然后重新执行安装命令
|
# 然后重新执行安装命令(推荐标准安装)
|
||||||
mamba create -n unilab uni-lab::unilabos -c robostack-staging
|
mamba create -n unilab uni-lab::unilabos -c robostack-staging
|
||||||
|
|
||||||
|
# 或完整版(仿真/可视化)
|
||||||
|
mamba create -n unilab uni-lab::unilabos-full -c robostack-staging
|
||||||
|
|
||||||
|
# pip 安装时使用清华镜像(开发者安装时使用)
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
```
|
```
|
||||||
|
|
||||||
### 第三步:激活环境
|
### 第三步:激活环境
|
||||||
@@ -189,13 +227,13 @@ conda activate unilab
|
|||||||
### 第一步:克隆仓库
|
### 第一步:克隆仓库
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
```
|
```
|
||||||
|
|
||||||
如果您需要贡献代码,建议先 Fork 仓库:
|
如果您需要贡献代码,建议先 Fork 仓库:
|
||||||
|
|
||||||
1. 访问 https://github.com/dptech-corp/Uni-Lab-OS
|
1. 访问 https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
2. 点击右上角的 "Fork" 按钮
|
2. 点击右上角的 "Fork" 按钮
|
||||||
3. Clone 您的 Fork 版本:
|
3. Clone 您的 Fork 版本:
|
||||||
```bash
|
```bash
|
||||||
@@ -203,58 +241,87 @@ cd Uni-Lab-OS
|
|||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
```
|
```
|
||||||
|
|
||||||
### 第二步:安装基础环境
|
### 第二步:安装开发环境(unilabos-env)
|
||||||
|
|
||||||
**推荐方式**:先通过**方式一(一键安装)**或**方式二(手动安装)**完成基础环境的安装,这将包含所有必需的依赖项(ROS2、msgs 等)。
|
**重要**:开发者请使用 `unilabos-env` 包,它专为开发者设计:
|
||||||
|
- 包含 ROS2 核心组件和消息包(ros-humble-ros-core、std-msgs、geometry-msgs 等)
|
||||||
#### 选项 A:通过一键安装(推荐)
|
- 包含 transforms3d、cv-bridge、tf2 等 conda 依赖
|
||||||
|
- 包含 `uv` 工具,用于快速安装 pip 依赖
|
||||||
参考上文"方式一:一键安装",完成基础环境的安装后,激活环境:
|
- **不包含** pip 依赖和 unilabos 包(由 `pip install -e .` 和 `uv pip install` 安装)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# 创建并激活环境
|
||||||
|
mamba create -n unilab python=3.11.14
|
||||||
conda activate unilab
|
conda activate unilab
|
||||||
|
|
||||||
|
# 安装开发者环境包(ROS2 + conda 依赖 + uv)
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 选项 B:通过手动安装
|
### 第三步:安装 pip 依赖和可编辑模式安装
|
||||||
|
|
||||||
参考上文"方式二:手动安装",创建并安装环境:
|
克隆代码并安装依赖:
|
||||||
|
|
||||||
```bash
|
|
||||||
mamba create -n unilab python=3.11.11
|
|
||||||
conda activate unilab
|
|
||||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
```
|
|
||||||
|
|
||||||
**说明**:这会安装包括 Python 3.11.11、ROS2 Humble、ros-humble-unilabos-msgs 和所有必需依赖
|
|
||||||
|
|
||||||
### 第三步:切换到开发版本
|
|
||||||
|
|
||||||
现在你已经有了一个完整可用的 Uni-Lab 环境,接下来将 unilabos 包切换为开发版本:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 确保环境已激活
|
# 确保环境已激活
|
||||||
conda activate unilab
|
conda activate unilab
|
||||||
|
|
||||||
# 卸载 pip 安装的 unilabos(保留所有 conda 依赖)
|
# 克隆仓库(如果还未克隆)
|
||||||
pip uninstall unilabos -y
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
|
|
||||||
# 克隆 dev 分支(如果还未克隆)
|
|
||||||
cd /path/to/your/workspace
|
|
||||||
git clone -b dev https://github.com/dptech-corp/Uni-Lab-OS.git
|
|
||||||
# 或者如果已经克隆,切换到 dev 分支
|
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
|
# 切换到 dev 分支(可选)
|
||||||
git checkout dev
|
git checkout dev
|
||||||
git pull
|
git pull
|
||||||
|
|
||||||
# 以可编辑模式安装开发版 unilabos
|
|
||||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**参数说明**:
|
**推荐:使用安装脚本**(自动检测中文环境,使用 uv 加速):
|
||||||
|
|
||||||
- `-e`: editable mode(可编辑模式),代码修改立即生效,无需重新安装
|
```bash
|
||||||
- `-i`: 使用清华镜像源加速下载
|
# 自动检测中文环境,如果是中文系统则使用清华镜像
|
||||||
- `pip uninstall unilabos`: 只卸载 pip 安装的 unilabos 包,不影响 conda 安装的其他依赖(如 ROS2、msgs 等)
|
python scripts/dev_install.py
|
||||||
|
|
||||||
|
# 或者手动指定:
|
||||||
|
python scripts/dev_install.py --china # 强制使用清华镜像
|
||||||
|
python scripts/dev_install.py --no-mirror # 强制使用 PyPI
|
||||||
|
python scripts/dev_install.py --skip-deps # 跳过 pip 依赖安装
|
||||||
|
python scripts/dev_install.py --use-pip # 使用 pip 而非 uv
|
||||||
|
```
|
||||||
|
|
||||||
|
**手动安装**(如果脚本安装失败或速度太慢):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. 安装 unilabos(可编辑模式)
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# 2. 使用 uv 安装 pip 依赖(推荐,速度更快)
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
|
||||||
|
# 国内用户使用清华镜像:
|
||||||
|
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
|
```
|
||||||
|
|
||||||
|
**注意**:
|
||||||
|
- `uv` 已包含在 `unilabos-env` 中,无需单独安装
|
||||||
|
- `unilabos/utils/requirements.txt` 包含运行 unilabos 所需的所有 pip 依赖
|
||||||
|
- 部分特殊包(如 pylabrobot)会在运行时由 unilabos 自动检测并安装
|
||||||
|
|
||||||
|
**为什么使用可编辑模式?**
|
||||||
|
|
||||||
|
- `-e` (editable mode):代码修改**立即生效**,无需重新安装
|
||||||
|
- 适合开发调试:修改代码后直接运行测试
|
||||||
|
- 与 `unilabos-env` 配合:环境依赖由 conda 管理,unilabos 代码由 pip 管理
|
||||||
|
|
||||||
|
**验证安装**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 检查 unilabos 版本
|
||||||
|
python -c "import unilabos; print(unilabos.__version__)"
|
||||||
|
|
||||||
|
# 检查安装位置(应该指向你的代码目录)
|
||||||
|
pip show unilabos | grep Location
|
||||||
|
```
|
||||||
|
|
||||||
### 第四步:安装或自定义 ros-humble-unilabos-msgs(可选)
|
### 第四步:安装或自定义 ros-humble-unilabos-msgs(可选)
|
||||||
|
|
||||||
@@ -464,7 +531,45 @@ cd $CONDA_PREFIX/envs/unilab
|
|||||||
|
|
||||||
### 问题 8: 环境很大,有办法减小吗?
|
### 问题 8: 环境很大,有办法减小吗?
|
||||||
|
|
||||||
**解决方案**: 预打包的环境包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。如果空间有限,考虑使用方式二手动安装,只安装需要的组件。
|
**解决方案**:
|
||||||
|
|
||||||
|
1. **使用 `unilabos` 标准版**(推荐大多数用户):
|
||||||
|
```bash
|
||||||
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
```
|
||||||
|
标准版包含完整功能,环境大小约 2-3GB(相比完整版的 8-10GB)。
|
||||||
|
|
||||||
|
2. **使用 `unilabos-env` 开发者版**(最小化):
|
||||||
|
```bash
|
||||||
|
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||||
|
# 然后手动安装依赖
|
||||||
|
pip install -e .
|
||||||
|
uv pip install -r unilabos/utils/requirements.txt
|
||||||
|
```
|
||||||
|
开发者版只包含环境依赖,体积最小约 2GB。
|
||||||
|
|
||||||
|
3. **按需安装额外组件**:
|
||||||
|
如果后续需要特定功能,可以单独安装:
|
||||||
|
```bash
|
||||||
|
# 需要 Jupyter
|
||||||
|
mamba install jupyter jupyros
|
||||||
|
|
||||||
|
# 需要可视化
|
||||||
|
mamba install matplotlib opencv
|
||||||
|
|
||||||
|
# 需要仿真(注意:这会安装大量依赖)
|
||||||
|
mamba install ros-humble-gazebo-ros
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **预打包环境问题**:
|
||||||
|
预打包环境(方式一)包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。
|
||||||
|
|
||||||
|
**包选择建议**:
|
||||||
|
| 需求 | 推荐包 | 预估大小 |
|
||||||
|
|------|--------|----------|
|
||||||
|
| 日常使用/生产部署 | `unilabos` | ~2-3 GB |
|
||||||
|
| 开发调试(可编辑模式) | `unilabos-env` | ~2 GB |
|
||||||
|
| 仿真/可视化 | `unilabos-full` | ~8-10 GB |
|
||||||
|
|
||||||
### 问题 9: 如何更新到最新版本?
|
### 问题 9: 如何更新到最新版本?
|
||||||
|
|
||||||
@@ -503,14 +608,15 @@ mamba update ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-f
|
|||||||
## 需要帮助?
|
## 需要帮助?
|
||||||
|
|
||||||
- **故障排查**: 查看更详细的故障排查信息
|
- **故障排查**: 查看更详细的故障排查信息
|
||||||
- **GitHub Issues**: [报告问题](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
- **GitHub Issues**: [报告问题](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||||
- **开发者文档**: 查看开发者指南获取更多技术细节
|
- **开发者文档**: 查看开发者指南获取更多技术细节
|
||||||
- **社区讨论**: [GitHub Discussions](https://github.com/dptech-corp/Uni-Lab-OS/discussions)
|
- **社区讨论**: [GitHub Discussions](https://github.com/deepmodeling/Uni-Lab-OS/discussions)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**提示**:
|
**提示**:
|
||||||
|
|
||||||
- 生产环境推荐使用方式二(手动安装)的稳定版本
|
- **大多数用户**推荐使用方式二(手动安装)的 `unilabos` 标准版
|
||||||
- 开发和测试推荐使用方式三(开发者安装)
|
- **开发者**推荐使用方式三(开发者安装),安装 `unilabos-env` 后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖
|
||||||
- 快速体验和演示推荐使用方式一(一键安装)
|
- **仿真/可视化**推荐安装 `unilabos-full` 完整版
|
||||||
|
- **快速体验和演示**推荐使用方式一(一键安装)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: ros-humble-unilabos-msgs
|
name: ros-humble-unilabos-msgs
|
||||||
version: 0.10.13
|
version: 0.10.17
|
||||||
source:
|
source:
|
||||||
path: ../../unilabos_msgs
|
path: ../../unilabos_msgs
|
||||||
target_directory: src
|
target_directory: src
|
||||||
@@ -17,7 +17,7 @@ build:
|
|||||||
- bash $SRC_DIR/build_ament_cmake.sh
|
- bash $SRC_DIR/build_ament_cmake.sh
|
||||||
|
|
||||||
about:
|
about:
|
||||||
repository: https://github.com/dptech-corp/Uni-Lab-OS
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
license: BSD-3-Clause
|
license: BSD-3-Clause
|
||||||
description: "ros-humble-unilabos-msgs is a package that provides message definitions for Uni-Lab-OS."
|
description: "ros-humble-unilabos-msgs is a package that provides message definitions for Uni-Lab-OS."
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ requirements:
|
|||||||
build:
|
build:
|
||||||
- ${{ compiler('cxx') }}
|
- ${{ compiler('cxx') }}
|
||||||
- ${{ compiler('c') }}
|
- ${{ compiler('c') }}
|
||||||
- python ==3.11.11
|
- python ==3.11.14
|
||||||
- numpy
|
- numpy
|
||||||
- if: build_platform != target_platform
|
- if: build_platform != target_platform
|
||||||
then:
|
then:
|
||||||
@@ -63,14 +63,14 @@ requirements:
|
|||||||
- robostack-staging::ros-humble-rosidl-default-generators
|
- robostack-staging::ros-humble-rosidl-default-generators
|
||||||
- robostack-staging::ros-humble-std-msgs
|
- robostack-staging::ros-humble-std-msgs
|
||||||
- robostack-staging::ros-humble-geometry-msgs
|
- robostack-staging::ros-humble-geometry-msgs
|
||||||
- robostack-staging::ros2-distro-mutex=0.6
|
- robostack-staging::ros2-distro-mutex=0.7
|
||||||
run:
|
run:
|
||||||
- robostack-staging::ros-humble-action-msgs
|
- robostack-staging::ros-humble-action-msgs
|
||||||
- robostack-staging::ros-humble-ros-workspace
|
- robostack-staging::ros-humble-ros-workspace
|
||||||
- robostack-staging::ros-humble-rosidl-default-runtime
|
- robostack-staging::ros-humble-rosidl-default-runtime
|
||||||
- robostack-staging::ros-humble-std-msgs
|
- robostack-staging::ros-humble-std-msgs
|
||||||
- robostack-staging::ros-humble-geometry-msgs
|
- robostack-staging::ros-humble-geometry-msgs
|
||||||
- robostack-staging::ros2-distro-mutex=0.6
|
- robostack-staging::ros2-distro-mutex=0.7
|
||||||
- if: osx and x86_64
|
- if: osx and x86_64
|
||||||
then:
|
then:
|
||||||
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: "0.10.13"
|
version: "0.10.17"
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../..
|
path: ../..
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ Verification:
|
|||||||
-------------
|
-------------
|
||||||
|
|
||||||
The verify_installation.py script will check:
|
The verify_installation.py script will check:
|
||||||
- Python version (3.11.11)
|
- Python version (3.11.14)
|
||||||
- ROS2 rclpy installation
|
- ROS2 rclpy installation
|
||||||
- UniLabOS installation and dependencies
|
- UniLabOS installation and dependencies
|
||||||
|
|
||||||
@@ -104,7 +104,7 @@ Build Information:
|
|||||||
|
|
||||||
Branch: {branch}
|
Branch: {branch}
|
||||||
Platform: {platform}
|
Platform: {platform}
|
||||||
Python: 3.11.11
|
Python: 3.11.14
|
||||||
Date: {build_date}
|
Date: {build_date}
|
||||||
|
|
||||||
Troubleshooting:
|
Troubleshooting:
|
||||||
@@ -126,7 +126,7 @@ If installation fails:
|
|||||||
For more help:
|
For more help:
|
||||||
- Documentation: docs/user_guide/installation.md
|
- Documentation: docs/user_guide/installation.md
|
||||||
- Quick Start: QUICK_START_CONDA_PACK.md
|
- Quick Start: QUICK_START_CONDA_PACK.md
|
||||||
- Issues: https://github.com/dptech-corp/Uni-Lab-OS/issues
|
- Issues: https://github.com/deepmodeling/Uni-Lab-OS/issues
|
||||||
|
|
||||||
License:
|
License:
|
||||||
--------
|
--------
|
||||||
@@ -134,7 +134,7 @@ License:
|
|||||||
UniLabOS is licensed under GPL-3.0-only.
|
UniLabOS is licensed under GPL-3.0-only.
|
||||||
See LICENSE file for details.
|
See LICENSE file for details.
|
||||||
|
|
||||||
Repository: https://github.com/dptech-corp/Uni-Lab-OS
|
Repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return readme
|
return readme
|
||||||
|
|||||||
214
scripts/dev_install.py
Normal file
214
scripts/dev_install.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Development installation script for UniLabOS.
|
||||||
|
Auto-detects Chinese locale and uses appropriate mirror.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/dev_install.py
|
||||||
|
python scripts/dev_install.py --no-mirror # Force no mirror
|
||||||
|
python scripts/dev_install.py --china # Force China mirror
|
||||||
|
python scripts/dev_install.py --skip-deps # Skip pip dependencies installation
|
||||||
|
|
||||||
|
Flow:
|
||||||
|
1. pip install -e . (install unilabos in editable mode)
|
||||||
|
2. Detect Chinese locale
|
||||||
|
3. Use uv to install pip dependencies from requirements.txt
|
||||||
|
4. Special packages (like pylabrobot) are handled by environment_check.py at runtime
|
||||||
|
"""
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Tsinghua mirror URL
|
||||||
|
TSINGHUA_MIRROR = "https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
|
||||||
|
|
||||||
|
|
||||||
|
def is_chinese_locale() -> bool:
|
||||||
|
"""
|
||||||
|
Detect if system is in Chinese locale.
|
||||||
|
Same logic as EnvironmentChecker._is_chinese_locale()
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
lang = locale.getdefaultlocale()[0]
|
||||||
|
if lang and ("zh" in lang.lower() or "chinese" in lang.lower()):
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(cmd: list, description: str, retry: int = 2) -> bool:
|
||||||
|
"""Run command with retry support."""
|
||||||
|
print(f"[INFO] {description}")
|
||||||
|
print(f"[CMD] {' '.join(cmd)}")
|
||||||
|
|
||||||
|
for attempt in range(retry + 1):
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, check=True, timeout=600)
|
||||||
|
print(f"[OK] {description}")
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
if attempt < retry:
|
||||||
|
print(f"[WARN] Attempt {attempt + 1} failed, retrying...")
|
||||||
|
else:
|
||||||
|
print(f"[ERROR] {description} failed: {e}")
|
||||||
|
return False
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
print(f"[ERROR] {description} timed out")
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def install_editable(project_root: Path, use_mirror: bool) -> bool:
|
||||||
|
"""Install unilabos in editable mode using pip."""
|
||||||
|
cmd = [sys.executable, "-m", "pip", "install", "-e", str(project_root)]
|
||||||
|
if use_mirror:
|
||||||
|
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||||
|
|
||||||
|
return run_command(cmd, "Installing unilabos in editable mode")
|
||||||
|
|
||||||
|
|
||||||
|
def install_requirements_uv(requirements_file: Path, use_mirror: bool) -> bool:
|
||||||
|
"""Install pip dependencies using uv (installed via conda-forge::uv)."""
|
||||||
|
cmd = ["uv", "pip", "install", "-r", str(requirements_file)]
|
||||||
|
if use_mirror:
|
||||||
|
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||||
|
|
||||||
|
return run_command(cmd, "Installing pip dependencies with uv", retry=2)
|
||||||
|
|
||||||
|
|
||||||
|
def install_requirements_pip(requirements_file: Path, use_mirror: bool) -> bool:
|
||||||
|
"""Fallback: Install pip dependencies using pip."""
|
||||||
|
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
||||||
|
if use_mirror:
|
||||||
|
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||||
|
|
||||||
|
return run_command(cmd, "Installing pip dependencies with pip", retry=2)
|
||||||
|
|
||||||
|
|
||||||
|
def check_uv_available() -> bool:
|
||||||
|
"""Check if uv is available (installed via conda-forge::uv)."""
|
||||||
|
try:
|
||||||
|
subprocess.run(["uv", "--version"], capture_output=True, check=True)
|
||||||
|
return True
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Development installation script for UniLabOS")
|
||||||
|
parser.add_argument("--china", action="store_true", help="Force use China mirror (Tsinghua)")
|
||||||
|
parser.add_argument("--no-mirror", action="store_true", help="Force use default PyPI (no mirror)")
|
||||||
|
parser.add_argument(
|
||||||
|
"--skip-deps", action="store_true", help="Skip pip dependencies installation (only install unilabos)"
|
||||||
|
)
|
||||||
|
parser.add_argument("--use-pip", action="store_true", help="Use pip instead of uv for dependencies")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Determine project root
|
||||||
|
script_dir = Path(__file__).parent
|
||||||
|
project_root = script_dir.parent
|
||||||
|
requirements_file = project_root / "unilabos" / "utils" / "requirements.txt"
|
||||||
|
|
||||||
|
if not (project_root / "setup.py").exists():
|
||||||
|
print(f"[ERROR] setup.py not found in {project_root}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print("=" * 60)
|
||||||
|
print("UniLabOS Development Installation")
|
||||||
|
print("=" * 60)
|
||||||
|
print(f"Project root: {project_root}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Determine mirror usage based on locale
|
||||||
|
if args.no_mirror:
|
||||||
|
use_mirror = False
|
||||||
|
print("[INFO] Mirror disabled by --no-mirror flag")
|
||||||
|
elif args.china:
|
||||||
|
use_mirror = True
|
||||||
|
print("[INFO] China mirror enabled by --china flag")
|
||||||
|
else:
|
||||||
|
use_mirror = is_chinese_locale()
|
||||||
|
if use_mirror:
|
||||||
|
print("[INFO] Chinese locale detected, using Tsinghua mirror")
|
||||||
|
else:
|
||||||
|
print("[INFO] Non-Chinese locale detected, using default PyPI")
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Step 1: Install unilabos in editable mode
|
||||||
|
print("[STEP 1] Installing unilabos in editable mode...")
|
||||||
|
if not install_editable(project_root, use_mirror):
|
||||||
|
print("[ERROR] Failed to install unilabos")
|
||||||
|
print()
|
||||||
|
print("Manual fallback:")
|
||||||
|
if use_mirror:
|
||||||
|
print(f" pip install -e {project_root} -i {TSINGHUA_MIRROR}")
|
||||||
|
else:
|
||||||
|
print(f" pip install -e {project_root}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Step 2: Install pip dependencies
|
||||||
|
if args.skip_deps:
|
||||||
|
print("[INFO] Skipping pip dependencies installation (--skip-deps)")
|
||||||
|
else:
|
||||||
|
print("[STEP 2] Installing pip dependencies...")
|
||||||
|
|
||||||
|
if not requirements_file.exists():
|
||||||
|
print(f"[WARN] Requirements file not found: {requirements_file}")
|
||||||
|
print("[INFO] Skipping dependencies installation")
|
||||||
|
else:
|
||||||
|
# Try uv first (faster), fallback to pip
|
||||||
|
if args.use_pip:
|
||||||
|
print("[INFO] Using pip (--use-pip flag)")
|
||||||
|
success = install_requirements_pip(requirements_file, use_mirror)
|
||||||
|
elif check_uv_available():
|
||||||
|
print("[INFO] Using uv (installed via conda-forge::uv)")
|
||||||
|
success = install_requirements_uv(requirements_file, use_mirror)
|
||||||
|
if not success:
|
||||||
|
print("[WARN] uv failed, falling back to pip...")
|
||||||
|
success = install_requirements_pip(requirements_file, use_mirror)
|
||||||
|
else:
|
||||||
|
print("[WARN] uv not available (should be installed via: mamba install conda-forge::uv)")
|
||||||
|
print("[INFO] Falling back to pip...")
|
||||||
|
success = install_requirements_pip(requirements_file, use_mirror)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
print()
|
||||||
|
print("[WARN] Failed to install some dependencies automatically.")
|
||||||
|
print("You can manually install them:")
|
||||||
|
if use_mirror:
|
||||||
|
print(f" uv pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
||||||
|
print(" or:")
|
||||||
|
print(f" pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
||||||
|
else:
|
||||||
|
print(f" uv pip install -r {requirements_file}")
|
||||||
|
print(" or:")
|
||||||
|
print(f" pip install -r {requirements_file}")
|
||||||
|
|
||||||
|
print()
|
||||||
|
print("=" * 60)
|
||||||
|
print("Installation complete!")
|
||||||
|
print("=" * 60)
|
||||||
|
print()
|
||||||
|
print("Note: Some special packages (like pylabrobot) are installed")
|
||||||
|
print("automatically at runtime by unilabos if needed.")
|
||||||
|
print()
|
||||||
|
print("Verify installation:")
|
||||||
|
print(' python -c "import unilabos; print(unilabos.__version__)"')
|
||||||
|
print()
|
||||||
|
print("If you encounter issues, you can manually install dependencies:")
|
||||||
|
if use_mirror:
|
||||||
|
print(f" uv pip install -r unilabos/utils/requirements.txt -i {TSINGHUA_MIRROR}")
|
||||||
|
else:
|
||||||
|
print(" uv pip install -r unilabos/utils/requirements.txt")
|
||||||
|
print()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
2
setup.py
2
setup.py
@@ -4,7 +4,7 @@ package_name = 'unilabos'
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='0.10.13',
|
version='0.10.17',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=['setuptools'],
|
install_requires=['setuptools'],
|
||||||
|
|||||||
15
tests/devices/liquid_handling/README.md
Normal file
15
tests/devices/liquid_handling/README.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Liquid handling 集成测试
|
||||||
|
|
||||||
|
`test_transfer_liquid.py` 现在会调用 PRCXI 的 RViz 仿真 backend,运行前请确保:
|
||||||
|
|
||||||
|
1. 已安装包含 `pylabrobot`、`rclpy` 的运行环境;
|
||||||
|
2. 启动 ROS 依赖(`rviz` 可选,但是 `rviz_backend` 会创建 ROS 节点);
|
||||||
|
3. 在 shell 中设置 `UNILAB_SIM_TEST=1`,否则 pytest 会自动跳过这些慢速用例:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export UNILAB_SIM_TEST=1
|
||||||
|
pytest tests/devices/liquid_handling/test_transfer_liquid.py -m slow
|
||||||
|
```
|
||||||
|
|
||||||
|
如果只需验证逻辑层(不依赖仿真),可以直接运行 `tests/devices/liquid_handling/unit_test.py`,该文件使用 Fake backend,适合作为 CI 的快速测试。***
|
||||||
|
|
||||||
547
tests/devices/liquid_handling/unit_test.py
Normal file
547
tests/devices/liquid_handling/unit_test.py
Normal file
@@ -0,0 +1,547 @@
|
|||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Iterable, List, Optional, Sequence, Tuple
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class DummyContainer:
|
||||||
|
name: str
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"DummyContainer({self.name})"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class DummyTipSpot:
|
||||||
|
name: str
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
return f"DummyTipSpot({self.name})"
|
||||||
|
|
||||||
|
|
||||||
|
def make_tip_iter(n: int = 256) -> Iterable[List[DummyTipSpot]]:
|
||||||
|
"""Yield lists so code can safely call `tip.extend(next(self.current_tip))`."""
|
||||||
|
for i in range(n):
|
||||||
|
yield [DummyTipSpot(f"tip_{i}")]
|
||||||
|
|
||||||
|
|
||||||
|
class FakeLiquidHandler(LiquidHandlerAbstract):
|
||||||
|
"""不初始化真实 backend/deck;仅用来记录 transfer_liquid 内部调用序列。"""
|
||||||
|
|
||||||
|
def __init__(self, channel_num: int = 8):
|
||||||
|
# 不调用 super().__init__,避免真实硬件/后端依赖
|
||||||
|
self.channel_num = channel_num
|
||||||
|
self.support_touch_tip = True
|
||||||
|
self.current_tip = iter(make_tip_iter())
|
||||||
|
self.calls: List[Tuple[str, Any]] = []
|
||||||
|
|
||||||
|
async def pick_up_tips(self, tip_spots, use_channels=None, offsets=None, **backend_kwargs):
|
||||||
|
self.calls.append(("pick_up_tips", {"tips": list(tip_spots), "use_channels": use_channels}))
|
||||||
|
|
||||||
|
async def aspirate(
|
||||||
|
self,
|
||||||
|
resources: Sequence[Any],
|
||||||
|
vols: List[float],
|
||||||
|
use_channels: Optional[List[int]] = None,
|
||||||
|
flow_rates: Optional[List[Optional[float]]] = None,
|
||||||
|
offsets: Any = None,
|
||||||
|
liquid_height: Any = None,
|
||||||
|
blow_out_air_volume: Any = None,
|
||||||
|
spread: str = "wide",
|
||||||
|
**backend_kwargs,
|
||||||
|
):
|
||||||
|
self.calls.append(
|
||||||
|
(
|
||||||
|
"aspirate",
|
||||||
|
{
|
||||||
|
"resources": list(resources),
|
||||||
|
"vols": list(vols),
|
||||||
|
"use_channels": list(use_channels) if use_channels is not None else None,
|
||||||
|
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
||||||
|
"offsets": list(offsets) if offsets is not None else None,
|
||||||
|
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
||||||
|
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def dispense(
|
||||||
|
self,
|
||||||
|
resources: Sequence[Any],
|
||||||
|
vols: List[float],
|
||||||
|
use_channels: Optional[List[int]] = None,
|
||||||
|
flow_rates: Optional[List[Optional[float]]] = None,
|
||||||
|
offsets: Any = None,
|
||||||
|
liquid_height: Any = None,
|
||||||
|
blow_out_air_volume: Any = None,
|
||||||
|
spread: str = "wide",
|
||||||
|
**backend_kwargs,
|
||||||
|
):
|
||||||
|
self.calls.append(
|
||||||
|
(
|
||||||
|
"dispense",
|
||||||
|
{
|
||||||
|
"resources": list(resources),
|
||||||
|
"vols": list(vols),
|
||||||
|
"use_channels": list(use_channels) if use_channels is not None else None,
|
||||||
|
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
||||||
|
"offsets": list(offsets) if offsets is not None else None,
|
||||||
|
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
||||||
|
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def discard_tips(self, use_channels=None, *args, **kwargs):
|
||||||
|
# 有的分支是 discard_tips(use_channels=[0]),有的分支是 discard_tips([0..7])(位置参数)
|
||||||
|
self.calls.append(("discard_tips", {"use_channels": list(use_channels) if use_channels is not None else None}))
|
||||||
|
|
||||||
|
async def custom_delay(self, seconds=0, msg=None):
|
||||||
|
self.calls.append(("custom_delay", {"seconds": seconds, "msg": msg}))
|
||||||
|
|
||||||
|
async def touch_tip(self, targets):
|
||||||
|
# 原实现会访问 targets.get_size_x() 等;测试里只记录调用
|
||||||
|
self.calls.append(("touch_tip", {"targets": targets}))
|
||||||
|
|
||||||
|
def run(coro):
|
||||||
|
return asyncio.run(coro)
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_single_channel_basic_calls():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(64))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=[1, 2, 3],
|
||||||
|
dis_vols=[4, 5, 6],
|
||||||
|
mix_times=None, # 应该仍能执行(不 mix)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert [c[0] for c in lh.calls].count("pick_up_tips") == 3
|
||||||
|
assert [c[0] for c in lh.calls].count("aspirate") == 3
|
||||||
|
assert [c[0] for c in lh.calls].count("dispense") == 3
|
||||||
|
assert [c[0] for c in lh.calls].count("discard_tips") == 3
|
||||||
|
|
||||||
|
# 每次 aspirate/dispense 都是单孔列表
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
assert aspirates[0]["resources"] == [sources[0]]
|
||||||
|
assert aspirates[0]["vols"] == [1.0]
|
||||||
|
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert dispenses[2]["resources"] == [targets[2]]
|
||||||
|
assert dispenses[2]["vols"] == [6.0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_single_channel_before_stage_mixes_prior_to_aspirate():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(16))
|
||||||
|
|
||||||
|
source = DummyContainer("S0")
|
||||||
|
target = DummyContainer("T0")
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=[source],
|
||||||
|
targets=[target],
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=[5],
|
||||||
|
dis_vols=[5],
|
||||||
|
mix_stage="before",
|
||||||
|
mix_times=1,
|
||||||
|
mix_vol=3,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirate_calls = [(idx, payload) for idx, (name, payload) in enumerate(lh.calls) if name == "aspirate"]
|
||||||
|
assert len(aspirate_calls) >= 2
|
||||||
|
mix_idx, mix_payload = aspirate_calls[0]
|
||||||
|
assert mix_payload["resources"] == [target]
|
||||||
|
assert mix_payload["vols"] == [3]
|
||||||
|
transfer_idx, transfer_payload = aspirate_calls[1]
|
||||||
|
assert transfer_payload["resources"] == [source]
|
||||||
|
assert mix_idx < transfer_idx
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_eight_channel_groups_by_8():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(256))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
||||||
|
asp_vols = list(range(1, 17))
|
||||||
|
dis_vols = list(range(101, 117))
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=dis_vols,
|
||||||
|
mix_times=0, # 触发逻辑但不 mix
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# 16 个任务 -> 2 组,每组 8 通道一起做
|
||||||
|
assert [c[0] for c in lh.calls].count("pick_up_tips") == 2
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert len(aspirates) == 2
|
||||||
|
assert len(dispenses) == 2
|
||||||
|
|
||||||
|
assert aspirates[0]["resources"] == sources[0:8]
|
||||||
|
assert aspirates[0]["vols"] == [float(v) for v in asp_vols[0:8]]
|
||||||
|
assert dispenses[1]["resources"] == targets[8:16]
|
||||||
|
assert dispenses[1]["vols"] == [float(v) for v in dis_vols[8:16]]
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_eight_channel_requires_multiple_of_8_targets():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(64))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(9)]
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(9)]
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="multiple of 8"):
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=[1] * 9,
|
||||||
|
dis_vols=[1] * 9,
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_eight_channel_parameter_lists_are_chunked_per_8():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(512))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
||||||
|
asp_vols = [i + 1 for i in range(16)]
|
||||||
|
dis_vols = [200 + i for i in range(16)]
|
||||||
|
asp_flow_rates = [0.1 * (i + 1) for i in range(16)]
|
||||||
|
dis_flow_rates = [0.2 * (i + 1) for i in range(16)]
|
||||||
|
offsets = [f"offset_{i}" for i in range(16)]
|
||||||
|
liquid_heights = [i * 0.5 for i in range(16)]
|
||||||
|
blow_out_air_volume = [i + 0.05 for i in range(16)]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=dis_vols,
|
||||||
|
asp_flow_rates=asp_flow_rates,
|
||||||
|
dis_flow_rates=dis_flow_rates,
|
||||||
|
offsets=offsets,
|
||||||
|
liquid_height=liquid_heights,
|
||||||
|
blow_out_air_volume=blow_out_air_volume,
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert len(aspirates) == len(dispenses) == 2
|
||||||
|
|
||||||
|
for batch_idx in range(2):
|
||||||
|
start = batch_idx * 8
|
||||||
|
end = start + 8
|
||||||
|
asp_call = aspirates[batch_idx]
|
||||||
|
dis_call = dispenses[batch_idx]
|
||||||
|
assert asp_call["resources"] == sources[start:end]
|
||||||
|
assert asp_call["flow_rates"] == asp_flow_rates[start:end]
|
||||||
|
assert asp_call["offsets"] == offsets[start:end]
|
||||||
|
assert asp_call["liquid_height"] == liquid_heights[start:end]
|
||||||
|
assert asp_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
||||||
|
assert dis_call["flow_rates"] == dis_flow_rates[start:end]
|
||||||
|
assert dis_call["offsets"] == offsets[start:end]
|
||||||
|
assert dis_call["liquid_height"] == liquid_heights[start:end]
|
||||||
|
assert dis_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_one_eight_channel_handles_32_tasks_four_batches():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(1024))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(32)]
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(32)]
|
||||||
|
asp_vols = [i + 1 for i in range(32)]
|
||||||
|
dis_vols = [300 + i for i in range(32)]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=dis_vols,
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
pick_calls = [name for name, _ in lh.calls if name == "pick_up_tips"]
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert len(pick_calls) == 4
|
||||||
|
assert len(aspirates) == len(dispenses) == 4
|
||||||
|
assert aspirates[0]["resources"] == sources[0:8]
|
||||||
|
assert aspirates[-1]["resources"] == sources[24:32]
|
||||||
|
assert dispenses[0]["resources"] == targets[0:8]
|
||||||
|
assert dispenses[-1]["resources"] == targets[24:32]
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_many_single_channel_aspirates_total_when_asp_vol_too_small():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(64))
|
||||||
|
|
||||||
|
source = DummyContainer("SRC")
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
||||||
|
dis_vols = [10, 20, 30] # sum=60
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=[source],
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=10, # 小于 sum(dis_vols) -> 应吸 60
|
||||||
|
dis_vols=dis_vols,
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
assert len(aspirates) == 1
|
||||||
|
assert aspirates[0]["resources"] == [source]
|
||||||
|
assert aspirates[0]["vols"] == [60.0]
|
||||||
|
assert aspirates[0]["use_channels"] == [0]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert [d["vols"][0] for d in dispenses] == [10.0, 20.0, 30.0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_one_to_many_eight_channel_basic():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(128))
|
||||||
|
|
||||||
|
source = DummyContainer("SRC")
|
||||||
|
targets = [DummyContainer(f"T{i}") for i in range(8)]
|
||||||
|
dis_vols = [i + 1 for i in range(8)]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=[source],
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=999, # one-to-many 8ch 会按 dis_vols 吸(每通道各自)
|
||||||
|
dis_vols=dis_vols,
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
assert aspirates[0]["resources"] == [source] * 8
|
||||||
|
assert aspirates[0]["vols"] == [float(v) for v in dis_vols]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert dispenses[0]["resources"] == targets
|
||||||
|
assert dispenses[0]["vols"] == [float(v) for v in dis_vols]
|
||||||
|
|
||||||
|
|
||||||
|
def test_many_to_one_single_channel_standard_dispense_equals_asp_by_default():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(128))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||||
|
target = DummyContainer("T")
|
||||||
|
asp_vols = [5, 6, 7]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=[target],
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=1, # many-to-one 允许标量;非比例模式下实际每次分液=对应 asp_vol
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert [d["vols"][0] for d in dispenses] == [float(v) for v in asp_vols]
|
||||||
|
assert all(d["resources"] == [target] for d in dispenses)
|
||||||
|
|
||||||
|
|
||||||
|
def test_many_to_one_single_channel_before_stage_mixes_target_once():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(128))
|
||||||
|
|
||||||
|
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
||||||
|
target = DummyContainer("T")
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=[target],
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=[5, 6],
|
||||||
|
dis_vols=1,
|
||||||
|
mix_stage="before",
|
||||||
|
mix_times=2,
|
||||||
|
mix_vol=4,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirate_calls = [(idx, payload) for idx, (name, payload) in enumerate(lh.calls) if name == "aspirate"]
|
||||||
|
assert len(aspirate_calls) >= 1
|
||||||
|
mix_idx, mix_payload = aspirate_calls[0]
|
||||||
|
assert mix_payload["resources"] == [target]
|
||||||
|
assert mix_payload["vols"] == [4]
|
||||||
|
# 第一個 mix 之後會真正開始吸 source
|
||||||
|
assert any(call["resources"] == [sources[0]] for _, call in aspirate_calls[1:])
|
||||||
|
|
||||||
|
|
||||||
|
def test_many_to_one_single_channel_proportional_mixing_uses_dis_vols_per_source():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
lh.current_tip = iter(make_tip_iter(128))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||||
|
target = DummyContainer("T")
|
||||||
|
asp_vols = [5, 6, 7]
|
||||||
|
dis_vols = [1, 2, 3]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=[target],
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=dis_vols, # 比例模式
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert [d["vols"][0] for d in dispenses] == [float(v) for v in dis_vols]
|
||||||
|
|
||||||
|
|
||||||
|
def test_many_to_one_eight_channel_basic():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(256))
|
||||||
|
|
||||||
|
sources = [DummyContainer(f"S{i}") for i in range(8)]
|
||||||
|
target = DummyContainer("T")
|
||||||
|
asp_vols = [10 + i for i in range(8)]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=[target],
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=list(range(8)),
|
||||||
|
asp_vols=asp_vols,
|
||||||
|
dis_vols=999, # 非比例模式下每通道分液=对应 asp_vol
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert aspirates[0]["resources"] == sources
|
||||||
|
assert aspirates[0]["vols"] == [float(v) for v in asp_vols]
|
||||||
|
assert dispenses[0]["resources"] == [target] * 8
|
||||||
|
assert dispenses[0]["vols"] == [float(v) for v in asp_vols]
|
||||||
|
|
||||||
|
|
||||||
|
def test_transfer_liquid_mode_detection_unsupported_shape_raises():
|
||||||
|
lh = FakeLiquidHandler(channel_num=8)
|
||||||
|
lh.current_tip = iter(make_tip_iter(64))
|
||||||
|
|
||||||
|
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
||||||
|
targets = [DummyContainer("T0"), DummyContainer("T1"), DummyContainer("T2")]
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="Unsupported transfer mode"):
|
||||||
|
run(
|
||||||
|
lh.transfer_liquid(
|
||||||
|
sources=sources,
|
||||||
|
targets=targets,
|
||||||
|
tip_racks=[],
|
||||||
|
use_channels=[0],
|
||||||
|
asp_vols=[1, 1],
|
||||||
|
dis_vols=[1, 1, 1],
|
||||||
|
mix_times=0,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_mix_single_target_produces_matching_cycles():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
target = DummyContainer("T_mix")
|
||||||
|
|
||||||
|
run(lh.mix(targets=[target], mix_time=2, mix_vol=5))
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||||
|
assert len(aspirates) == len(dispenses) == 2
|
||||||
|
assert all(call["resources"] == [target] for call in aspirates)
|
||||||
|
assert all(call["vols"] == [5] for call in aspirates)
|
||||||
|
assert all(call["resources"] == [target] for call in dispenses)
|
||||||
|
assert all(call["vols"] == [5] for call in dispenses)
|
||||||
|
|
||||||
|
|
||||||
|
def test_mix_multiple_targets_supports_per_target_offsets():
|
||||||
|
lh = FakeLiquidHandler(channel_num=1)
|
||||||
|
targets = [DummyContainer("T0"), DummyContainer("T1")]
|
||||||
|
offsets = ["left", "right"]
|
||||||
|
heights = [0.1, 0.2]
|
||||||
|
rates = [0.5, 1.0]
|
||||||
|
|
||||||
|
run(
|
||||||
|
lh.mix(
|
||||||
|
targets=targets,
|
||||||
|
mix_time=1,
|
||||||
|
mix_vol=3,
|
||||||
|
offsets=offsets,
|
||||||
|
height_to_bottom=heights,
|
||||||
|
mix_rate=rates,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||||
|
assert len(aspirates) == 2
|
||||||
|
assert aspirates[0]["resources"] == [targets[0]]
|
||||||
|
assert aspirates[0]["offsets"] == [offsets[0]]
|
||||||
|
assert aspirates[0]["liquid_height"] == [heights[0]]
|
||||||
|
assert aspirates[0]["flow_rates"] == [rates[0]]
|
||||||
|
assert aspirates[1]["resources"] == [targets[1]]
|
||||||
|
assert aspirates[1]["offsets"] == [offsets[1]]
|
||||||
|
assert aspirates[1]["liquid_height"] == [heights[1]]
|
||||||
|
assert aspirates[1]["flow_rates"] == [rates[1]]
|
||||||
|
|
||||||
|
|
||||||
@@ -2,9 +2,8 @@ import pytest
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pylabrobot.resources import Resource as ResourcePLR
|
|
||||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
||||||
|
|||||||
213
tests/workflow/test.json
Normal file
213
tests/workflow/test.json
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
{
|
||||||
|
"workflow": [
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines",
|
||||||
|
"targets": "Liquid_1",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines",
|
||||||
|
"targets": "Liquid_2",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines",
|
||||||
|
"targets": "Liquid_3",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_2",
|
||||||
|
"targets": "Liquid_4",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_2",
|
||||||
|
"targets": "Liquid_5",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_2",
|
||||||
|
"targets": "Liquid_6",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_3",
|
||||||
|
"targets": "dest_set",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_3",
|
||||||
|
"targets": "dest_set_2",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "transfer_liquid",
|
||||||
|
"action_args": {
|
||||||
|
"sources": "cell_lines_3",
|
||||||
|
"targets": "dest_set_3",
|
||||||
|
"asp_vol": 100.0,
|
||||||
|
"dis_vol": 74.75,
|
||||||
|
"asp_flow_rate": 94.0,
|
||||||
|
"dis_flow_rate": 95.5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"reagent": {
|
||||||
|
"Liquid_1": {
|
||||||
|
"slot": 1,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A7",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 1"
|
||||||
|
},
|
||||||
|
"Liquid_4": {
|
||||||
|
"slot": 1,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A7",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 1"
|
||||||
|
},
|
||||||
|
"dest_set": {
|
||||||
|
"slot": 1,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A7",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 1"
|
||||||
|
},
|
||||||
|
"Liquid_2": {
|
||||||
|
"slot": 2,
|
||||||
|
"well": [
|
||||||
|
"A3",
|
||||||
|
"A5",
|
||||||
|
"A8"
|
||||||
|
],
|
||||||
|
"labware": "rep 2"
|
||||||
|
},
|
||||||
|
"Liquid_5": {
|
||||||
|
"slot": 2,
|
||||||
|
"well": [
|
||||||
|
"A3",
|
||||||
|
"A5",
|
||||||
|
"A8"
|
||||||
|
],
|
||||||
|
"labware": "rep 2"
|
||||||
|
},
|
||||||
|
"dest_set_2": {
|
||||||
|
"slot": 2,
|
||||||
|
"well": [
|
||||||
|
"A3",
|
||||||
|
"A5",
|
||||||
|
"A8"
|
||||||
|
],
|
||||||
|
"labware": "rep 2"
|
||||||
|
},
|
||||||
|
"Liquid_3": {
|
||||||
|
"slot": 3,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A6",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 3"
|
||||||
|
},
|
||||||
|
"Liquid_6": {
|
||||||
|
"slot": 3,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A6",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 3"
|
||||||
|
},
|
||||||
|
"dest_set_3": {
|
||||||
|
"slot": 3,
|
||||||
|
"well": [
|
||||||
|
"A4",
|
||||||
|
"A6",
|
||||||
|
"A10"
|
||||||
|
],
|
||||||
|
"labware": "rep 3"
|
||||||
|
},
|
||||||
|
"cell_lines": {
|
||||||
|
"slot": 4,
|
||||||
|
"well": [
|
||||||
|
"A1",
|
||||||
|
"A3",
|
||||||
|
"A5"
|
||||||
|
],
|
||||||
|
"labware": "DRUG + YOYO-MEDIA"
|
||||||
|
},
|
||||||
|
"cell_lines_2": {
|
||||||
|
"slot": 4,
|
||||||
|
"well": [
|
||||||
|
"A1",
|
||||||
|
"A3",
|
||||||
|
"A5"
|
||||||
|
],
|
||||||
|
"labware": "DRUG + YOYO-MEDIA"
|
||||||
|
},
|
||||||
|
"cell_lines_3": {
|
||||||
|
"slot": 4,
|
||||||
|
"well": [
|
||||||
|
"A1",
|
||||||
|
"A3",
|
||||||
|
"A5"
|
||||||
|
],
|
||||||
|
"labware": "DRUG + YOYO-MEDIA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1 +1 @@
|
|||||||
__version__ = "0.10.13"
|
__version__ = "0.10.17"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Dict, Any, List
|
from typing import Dict, Any, List
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -17,9 +16,14 @@ unilabos_dir = os.path.dirname(os.path.dirname(current_dir))
|
|||||||
if unilabos_dir not in sys.path:
|
if unilabos_dir not in sys.path:
|
||||||
sys.path.append(unilabos_dir)
|
sys.path.append(unilabos_dir)
|
||||||
|
|
||||||
|
from unilabos.app.utils import cleanup_for_restart
|
||||||
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
||||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||||
|
|
||||||
|
# Global restart flags (used by ws_client and web/server)
|
||||||
|
_restart_requested: bool = False
|
||||||
|
_restart_reason: str = ""
|
||||||
|
|
||||||
|
|
||||||
def load_config_from_file(config_path):
|
def load_config_from_file(config_path):
|
||||||
if config_path is None:
|
if config_path is None:
|
||||||
@@ -156,6 +160,17 @@ def parse_args():
|
|||||||
default=False,
|
default=False,
|
||||||
help="Complete registry information",
|
help="Complete registry information",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--check_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Run in check mode for CI: validates registry imports and ensures no file changes",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--no_update_feedback",
|
||||||
|
action="store_true",
|
||||||
|
help="Disable sending update feedback to server",
|
||||||
|
)
|
||||||
# workflow upload subcommand
|
# workflow upload subcommand
|
||||||
workflow_parser = subparsers.add_parser(
|
workflow_parser = subparsers.add_parser(
|
||||||
"workflow_upload",
|
"workflow_upload",
|
||||||
@@ -201,7 +216,10 @@ def main():
|
|||||||
args_dict = vars(args)
|
args_dict = vars(args)
|
||||||
|
|
||||||
# 环境检查 - 检查并自动安装必需的包 (可选)
|
# 环境检查 - 检查并自动安装必需的包 (可选)
|
||||||
if not args_dict.get("skip_env_check", False):
|
skip_env_check = args_dict.get("skip_env_check", False)
|
||||||
|
check_mode = args_dict.get("check_mode", False)
|
||||||
|
|
||||||
|
if not skip_env_check:
|
||||||
from unilabos.utils.environment_check import check_environment
|
from unilabos.utils.environment_check import check_environment
|
||||||
|
|
||||||
if not check_environment(auto_install=True):
|
if not check_environment(auto_install=True):
|
||||||
@@ -212,7 +230,21 @@ def main():
|
|||||||
|
|
||||||
# 加载配置文件,优先加载config,然后从env读取
|
# 加载配置文件,优先加载config,然后从env读取
|
||||||
config_path = args_dict.get("config")
|
config_path = args_dict.get("config")
|
||||||
if os.getcwd().endswith("unilabos_data"):
|
|
||||||
|
if check_mode:
|
||||||
|
args_dict["working_dir"] = os.path.abspath(os.getcwd())
|
||||||
|
# 当 skip_env_check 时,默认使用当前目录作为 working_dir
|
||||||
|
if skip_env_check and not args_dict.get("working_dir") and not config_path:
|
||||||
|
working_dir = os.path.abspath(os.getcwd())
|
||||||
|
print_status(f"跳过环境检查模式:使用当前目录作为工作目录 {working_dir}", "info")
|
||||||
|
# 检查当前目录是否有 local_config.py
|
||||||
|
local_config_in_cwd = os.path.join(working_dir, "local_config.py")
|
||||||
|
if os.path.exists(local_config_in_cwd):
|
||||||
|
config_path = local_config_in_cwd
|
||||||
|
print_status(f"发现本地配置文件: {config_path}", "info")
|
||||||
|
else:
|
||||||
|
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||||
|
elif os.getcwd().endswith("unilabos_data"):
|
||||||
working_dir = os.path.abspath(os.getcwd())
|
working_dir = os.path.abspath(os.getcwd())
|
||||||
else:
|
else:
|
||||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||||
@@ -231,7 +263,7 @@ def main():
|
|||||||
working_dir = os.path.dirname(config_path)
|
working_dir = os.path.dirname(config_path)
|
||||||
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
||||||
config_path = os.path.join(working_dir, "local_config.py")
|
config_path = os.path.join(working_dir, "local_config.py")
|
||||||
elif not config_path and (
|
elif not skip_env_check and not config_path and (
|
||||||
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
||||||
):
|
):
|
||||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||||
@@ -245,9 +277,11 @@ def main():
|
|||||||
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
||||||
else:
|
else:
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
# 加载配置文件
|
|
||||||
|
# 加载配置文件 (check_mode 跳过)
|
||||||
print_status(f"当前工作目录为 {working_dir}", "info")
|
print_status(f"当前工作目录为 {working_dir}", "info")
|
||||||
load_config_from_file(config_path)
|
if not check_mode:
|
||||||
|
load_config_from_file(config_path)
|
||||||
|
|
||||||
# 根据配置重新设置日志级别
|
# 根据配置重新设置日志级别
|
||||||
from unilabos.utils.log import configure_logger, logger
|
from unilabos.utils.log import configure_logger, logger
|
||||||
@@ -297,11 +331,13 @@ def main():
|
|||||||
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
||||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||||
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
||||||
|
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
||||||
BasicConfig.communication_protocol = "websocket"
|
BasicConfig.communication_protocol = "websocket"
|
||||||
machine_name = os.popen("hostname").read().strip()
|
machine_name = os.popen("hostname").read().strip()
|
||||||
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
||||||
BasicConfig.machine_name = machine_name
|
BasicConfig.machine_name = machine_name
|
||||||
BasicConfig.vis_2d_enable = args_dict["2d_vis"]
|
BasicConfig.vis_2d_enable = args_dict["2d_vis"]
|
||||||
|
BasicConfig.check_mode = check_mode
|
||||||
|
|
||||||
from unilabos.resources.graphio import (
|
from unilabos.resources.graphio import (
|
||||||
read_node_link_json,
|
read_node_link_json,
|
||||||
@@ -315,15 +351,19 @@ def main():
|
|||||||
from unilabos.app.web import start_server
|
from unilabos.app.web import start_server
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.resources.graphio import modify_to_backend_format
|
from unilabos.resources.graphio import modify_to_backend_format
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDict
|
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDict
|
||||||
|
|
||||||
# 显示启动横幅
|
# 显示启动横幅
|
||||||
print_unilab_banner(args_dict)
|
print_unilab_banner(args_dict)
|
||||||
|
|
||||||
# 注册表
|
# 注册表 - check_mode 时强制启用 complete_registry
|
||||||
lab_registry = build_registry(
|
complete_registry = args_dict.get("complete_registry", False) or check_mode
|
||||||
args_dict["registry_path"], args_dict.get("complete_registry", False), BasicConfig.upload_registry
|
lab_registry = build_registry(args_dict["registry_path"], complete_registry, BasicConfig.upload_registry)
|
||||||
)
|
|
||||||
|
# Check mode: complete_registry 完成后直接退出,git diff 检测由 CI workflow 执行
|
||||||
|
if check_mode:
|
||||||
|
print_status("Check mode: complete_registry 完成,退出", "info")
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
if BasicConfig.upload_registry:
|
if BasicConfig.upload_registry:
|
||||||
# 设备注册到服务端 - 需要 ak 和 sk
|
# 设备注册到服务端 - 需要 ak 和 sk
|
||||||
@@ -497,13 +537,19 @@ def main():
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
start_server(
|
restart_requested = start_server(
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
|
if restart_requested:
|
||||||
|
print_status("[Main] Restart requested, cleaning up...", "info")
|
||||||
|
cleanup_for_restart()
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
start_server(
|
|
||||||
|
# 启动服务器(默认支持WebSocket触发重启)
|
||||||
|
restart_requested = start_server(
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -54,6 +54,7 @@ class JobAddReq(BaseModel):
|
|||||||
action_type: str = Field(
|
action_type: str = Field(
|
||||||
examples=["unilabos_msgs.action._str_single_input.StrSingleInput"], description="action type", default=""
|
examples=["unilabos_msgs.action._str_single_input.StrSingleInput"], description="action type", default=""
|
||||||
)
|
)
|
||||||
|
sample_material: dict = Field(examples=[{"string": "string"}], description="sample uuid to material uuid")
|
||||||
action_args: dict = Field(examples=[{"string": "string"}], description="action arguments", default_factory=dict)
|
action_args: dict = Field(examples=[{"string": "string"}], description="action arguments", default_factory=dict)
|
||||||
task_id: str = Field(examples=["task_id"], description="task uuid (auto-generated if empty)", default="")
|
task_id: str = Field(examples=["task_id"], description="task uuid (auto-generated if empty)", default="")
|
||||||
job_id: str = Field(examples=["job_id"], description="goal uuid (auto-generated if empty)", default="")
|
job_id: str = Field(examples=["job_id"], description="goal uuid (auto-generated if empty)", default="")
|
||||||
|
|||||||
176
unilabos/app/utils.py
Normal file
176
unilabos/app/utils.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
"""
|
||||||
|
UniLabOS 应用工具函数
|
||||||
|
|
||||||
|
提供清理、重启等工具函数
|
||||||
|
"""
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def patch_rclpy_dll_windows():
|
||||||
|
"""在 Windows + conda 环境下为 rclpy 打 DLL 加载补丁"""
|
||||||
|
if sys.platform != "win32" or not os.environ.get("CONDA_PREFIX"):
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
import rclpy
|
||||||
|
|
||||||
|
return
|
||||||
|
except ImportError as e:
|
||||||
|
if not str(e).startswith("DLL load failed"):
|
||||||
|
return
|
||||||
|
cp = os.environ["CONDA_PREFIX"]
|
||||||
|
impl = os.path.join(cp, "Lib", "site-packages", "rclpy", "impl", "implementation_singleton.py")
|
||||||
|
pyd = glob.glob(os.path.join(cp, "Lib", "site-packages", "rclpy", "_rclpy_pybind11*.pyd"))
|
||||||
|
if not os.path.exists(impl) or not pyd:
|
||||||
|
return
|
||||||
|
with open(impl, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
lib_bin = os.path.join(cp, "Library", "bin").replace("\\", "/")
|
||||||
|
patch = f'# UniLabOS DLL Patch\nimport os,ctypes\nos.add_dll_directory("{lib_bin}") if hasattr(os,"add_dll_directory") else None\ntry: ctypes.CDLL("{pyd[0].replace(chr(92),"/")}")\nexcept: pass\n# End Patch\n'
|
||||||
|
shutil.copy2(impl, impl + ".bak")
|
||||||
|
with open(impl, "w", encoding="utf-8") as f:
|
||||||
|
f.write(patch + content)
|
||||||
|
|
||||||
|
|
||||||
|
patch_rclpy_dll_windows()
|
||||||
|
|
||||||
|
import gc
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
|
||||||
|
from unilabos.utils.banner_print import print_status
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_for_restart() -> bool:
|
||||||
|
"""
|
||||||
|
Clean up all resources for restart without exiting the process.
|
||||||
|
|
||||||
|
This function prepares the system for re-initialization by:
|
||||||
|
1. Stopping all communication clients
|
||||||
|
2. Destroying ROS nodes
|
||||||
|
3. Resetting singletons
|
||||||
|
4. Waiting for threads to finish
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if cleanup was successful, False otherwise
|
||||||
|
"""
|
||||||
|
print_status("[Restart] Starting cleanup for restart...", "info")
|
||||||
|
|
||||||
|
# Step 1: Stop WebSocket communication client
|
||||||
|
print_status("[Restart] Step 1: Stopping WebSocket client...", "info")
|
||||||
|
try:
|
||||||
|
from unilabos.app.communication import get_communication_client
|
||||||
|
|
||||||
|
comm_client = get_communication_client()
|
||||||
|
if comm_client is not None:
|
||||||
|
comm_client.stop()
|
||||||
|
print_status("[Restart] WebSocket client stopped", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error stopping WebSocket: {e}", "warning")
|
||||||
|
|
||||||
|
# Step 2: Get HostNode and cleanup ROS
|
||||||
|
print_status("[Restart] Step 2: Cleaning up ROS nodes...", "info")
|
||||||
|
try:
|
||||||
|
from unilabos.ros.nodes.presets.host_node import HostNode
|
||||||
|
import rclpy
|
||||||
|
from rclpy.timer import Timer
|
||||||
|
|
||||||
|
host_instance = HostNode.get_instance(timeout=5)
|
||||||
|
if host_instance is not None:
|
||||||
|
print_status(f"[Restart] Found HostNode: {host_instance.device_id}", "info")
|
||||||
|
|
||||||
|
# Gracefully shutdown background threads
|
||||||
|
print_status("[Restart] Shutting down background threads...", "info")
|
||||||
|
HostNode.shutdown_background_threads(timeout=5.0)
|
||||||
|
print_status("[Restart] Background threads shutdown complete", "info")
|
||||||
|
|
||||||
|
# Stop discovery timer
|
||||||
|
if hasattr(host_instance, "_discovery_timer") and isinstance(host_instance._discovery_timer, Timer):
|
||||||
|
host_instance._discovery_timer.cancel()
|
||||||
|
print_status("[Restart] Discovery timer cancelled", "info")
|
||||||
|
|
||||||
|
# Destroy device nodes
|
||||||
|
device_count = len(host_instance.devices_instances)
|
||||||
|
print_status(f"[Restart] Destroying {device_count} device instances...", "info")
|
||||||
|
for device_id, device_node in list(host_instance.devices_instances.items()):
|
||||||
|
try:
|
||||||
|
if hasattr(device_node, "ros_node_instance") and device_node.ros_node_instance is not None:
|
||||||
|
device_node.ros_node_instance.destroy_node()
|
||||||
|
print_status(f"[Restart] Device {device_id} destroyed", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error destroying device {device_id}: {e}", "warning")
|
||||||
|
|
||||||
|
# Clear devices instances
|
||||||
|
host_instance.devices_instances.clear()
|
||||||
|
host_instance.devices_names.clear()
|
||||||
|
|
||||||
|
# Destroy host node
|
||||||
|
try:
|
||||||
|
host_instance.destroy_node()
|
||||||
|
print_status("[Restart] HostNode destroyed", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error destroying HostNode: {e}", "warning")
|
||||||
|
|
||||||
|
# Reset HostNode state
|
||||||
|
HostNode.reset_state()
|
||||||
|
print_status("[Restart] HostNode state reset", "info")
|
||||||
|
|
||||||
|
# Shutdown executor first (to stop executor.spin() gracefully)
|
||||||
|
if hasattr(rclpy, "__executor") and rclpy.__executor is not None:
|
||||||
|
try:
|
||||||
|
rclpy.__executor.shutdown()
|
||||||
|
rclpy.__executor = None # Clear for restart
|
||||||
|
print_status("[Restart] ROS executor shutdown complete", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error shutting down executor: {e}", "warning")
|
||||||
|
|
||||||
|
# Shutdown rclpy
|
||||||
|
if rclpy.ok():
|
||||||
|
rclpy.shutdown()
|
||||||
|
print_status("[Restart] rclpy shutdown complete", "info")
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
print_status(f"[Restart] ROS modules not available: {e}", "warning")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error in ROS cleanup: {e}", "warning")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Step 3: Reset communication client singleton
|
||||||
|
print_status("[Restart] Step 3: Resetting singletons...", "info")
|
||||||
|
try:
|
||||||
|
from unilabos.app import communication
|
||||||
|
|
||||||
|
if hasattr(communication, "_communication_client"):
|
||||||
|
communication._communication_client = None
|
||||||
|
print_status("[Restart] Communication client singleton reset", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"[Restart] Error resetting communication singleton: {e}", "warning")
|
||||||
|
|
||||||
|
# Step 4: Wait for threads to finish
|
||||||
|
print_status("[Restart] Step 4: Waiting for threads to finish...", "info")
|
||||||
|
time.sleep(3) # Give threads time to finish
|
||||||
|
|
||||||
|
# Check remaining threads
|
||||||
|
remaining_threads = []
|
||||||
|
for t in threading.enumerate():
|
||||||
|
if t.name != "MainThread" and t.is_alive():
|
||||||
|
remaining_threads.append(t.name)
|
||||||
|
|
||||||
|
if remaining_threads:
|
||||||
|
print_status(
|
||||||
|
f"[Restart] Warning: {len(remaining_threads)} threads still running: {remaining_threads}", "warning"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print_status("[Restart] All threads stopped", "info")
|
||||||
|
|
||||||
|
# Step 5: Force garbage collection
|
||||||
|
print_status("[Restart] Step 5: Running garbage collection...", "info")
|
||||||
|
gc.collect()
|
||||||
|
gc.collect() # Run twice for weak references
|
||||||
|
print_status("[Restart] Garbage collection complete", "info")
|
||||||
|
|
||||||
|
print_status("[Restart] Cleanup complete. Ready for re-initialization.", "info")
|
||||||
|
return True
|
||||||
@@ -6,12 +6,10 @@ HTTP客户端模块
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
from threading import Thread
|
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils.log import info
|
from unilabos.utils.log import info
|
||||||
from unilabos.config.config import HTTPConfig, BasicConfig
|
from unilabos.config.config import HTTPConfig, BasicConfig
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
@@ -361,9 +359,7 @@ class HTTPClient:
|
|||||||
Returns:
|
Returns:
|
||||||
Dict: API响应数据,包含 code 和 data (uuid, name)
|
Dict: API响应数据,包含 code 和 data (uuid, name)
|
||||||
"""
|
"""
|
||||||
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
|
|
||||||
payload = {
|
payload = {
|
||||||
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
|
|
||||||
"name": name,
|
"name": name,
|
||||||
"data": {
|
"data": {
|
||||||
"workflow_uuid": workflow_uuid,
|
"workflow_uuid": workflow_uuid,
|
||||||
|
|||||||
@@ -327,6 +327,7 @@ def job_add(req: JobAddReq) -> JobData:
|
|||||||
queue_item,
|
queue_item,
|
||||||
action_type=action_type,
|
action_type=action_type,
|
||||||
action_kwargs=action_args,
|
action_kwargs=action_args,
|
||||||
|
sample_material=req.sample_material,
|
||||||
server_info=server_info,
|
server_info=server_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ Web服务器模块
|
|||||||
|
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
|
||||||
import uvicorn
|
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from starlette.responses import Response
|
from starlette.responses import Response
|
||||||
@@ -96,7 +95,7 @@ def setup_server() -> FastAPI:
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> None:
|
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> bool:
|
||||||
"""
|
"""
|
||||||
启动服务器
|
启动服务器
|
||||||
|
|
||||||
@@ -104,7 +103,14 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
host: 服务器主机
|
host: 服务器主机
|
||||||
port: 服务器端口
|
port: 服务器端口
|
||||||
open_browser: 是否自动打开浏览器
|
open_browser: 是否自动打开浏览器
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if restart was requested, False otherwise
|
||||||
"""
|
"""
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from uvicorn import Config, Server
|
||||||
|
|
||||||
# 设置服务器
|
# 设置服务器
|
||||||
setup_server()
|
setup_server()
|
||||||
|
|
||||||
@@ -123,7 +129,37 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
|
|
||||||
# 启动服务器
|
# 启动服务器
|
||||||
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
||||||
uvicorn.run(app, host=host, port=port, log_config=log_config)
|
|
||||||
|
# 使用支持重启的模式
|
||||||
|
config = Config(app=app, host=host, port=port, log_config=log_config)
|
||||||
|
server = Server(config)
|
||||||
|
|
||||||
|
# 启动服务器线程
|
||||||
|
server_thread = threading.Thread(target=server.run, daemon=True, name="uvicorn_server")
|
||||||
|
server_thread.start()
|
||||||
|
|
||||||
|
info("[Web] Server started, monitoring for restart requests...")
|
||||||
|
|
||||||
|
# 监控重启标志
|
||||||
|
import unilabos.app.main as main_module
|
||||||
|
|
||||||
|
while server_thread.is_alive():
|
||||||
|
if hasattr(main_module, "_restart_requested") and main_module._restart_requested:
|
||||||
|
info(
|
||||||
|
f"[Web] Restart requested via WebSocket, reason: {getattr(main_module, '_restart_reason', 'unknown')}"
|
||||||
|
)
|
||||||
|
main_module._restart_requested = False
|
||||||
|
|
||||||
|
# 停止服务器
|
||||||
|
server.should_exit = True
|
||||||
|
server_thread.join(timeout=5)
|
||||||
|
|
||||||
|
info("[Web] Server stopped, ready for restart")
|
||||||
|
return True
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
# 当脚本直接运行时启动服务器
|
# 当脚本直接运行时启动服务器
|
||||||
|
|||||||
@@ -359,7 +359,7 @@ class MessageProcessor:
|
|||||||
self.device_manager = device_manager
|
self.device_manager = device_manager
|
||||||
self.queue_processor = None # 延迟设置
|
self.queue_processor = None # 延迟设置
|
||||||
self.websocket_client = None # 延迟设置
|
self.websocket_client = None # 延迟设置
|
||||||
self.session_id = ""
|
self.session_id = str(uuid.uuid4())[:6] # 产生一个随机的session_id
|
||||||
|
|
||||||
# WebSocket连接
|
# WebSocket连接
|
||||||
self.websocket = None
|
self.websocket = None
|
||||||
@@ -488,7 +488,16 @@ class MessageProcessor:
|
|||||||
async for message in self.websocket:
|
async for message in self.websocket:
|
||||||
try:
|
try:
|
||||||
data = json.loads(message)
|
data = json.loads(message)
|
||||||
await self._process_message(data)
|
message_type = data.get("action", "")
|
||||||
|
message_data = data.get("data")
|
||||||
|
if self.session_id and self.session_id == data.get("edge_session"):
|
||||||
|
await self._process_message(message_type, message_data)
|
||||||
|
else:
|
||||||
|
if message_type.endswith("_material"):
|
||||||
|
logger.trace(f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息:{data}")
|
||||||
|
logger.debug(f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}")
|
||||||
|
else:
|
||||||
|
await self._process_message(message_type, message_data)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -531,7 +540,7 @@ class MessageProcessor:
|
|||||||
try:
|
try:
|
||||||
message_str = json.dumps(msg, ensure_ascii=False)
|
message_str = json.dumps(msg, ensure_ascii=False)
|
||||||
await self.websocket.send(message_str)
|
await self.websocket.send(message_str)
|
||||||
logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501
|
# logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[MessageProcessor] Failed to send message: {str(e)}")
|
logger.error(f"[MessageProcessor] Failed to send message: {str(e)}")
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
@@ -554,11 +563,8 @@ class MessageProcessor:
|
|||||||
finally:
|
finally:
|
||||||
logger.debug("[MessageProcessor] Send handler stopped")
|
logger.debug("[MessageProcessor] Send handler stopped")
|
||||||
|
|
||||||
async def _process_message(self, data: Dict[str, Any]):
|
async def _process_message(self, message_type: str, message_data: Dict[str, Any]):
|
||||||
"""处理收到的消息"""
|
"""处理收到的消息"""
|
||||||
message_type = data.get("action", "")
|
|
||||||
message_data = data.get("data")
|
|
||||||
|
|
||||||
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -571,16 +577,19 @@ class MessageProcessor:
|
|||||||
elif message_type == "cancel_action" or message_type == "cancel_task":
|
elif message_type == "cancel_action" or message_type == "cancel_task":
|
||||||
await self._handle_cancel_action(message_data)
|
await self._handle_cancel_action(message_data)
|
||||||
elif message_type == "add_material":
|
elif message_type == "add_material":
|
||||||
|
# noinspection PyTypeChecker
|
||||||
await self._handle_resource_tree_update(message_data, "add")
|
await self._handle_resource_tree_update(message_data, "add")
|
||||||
elif message_type == "update_material":
|
elif message_type == "update_material":
|
||||||
|
# noinspection PyTypeChecker
|
||||||
await self._handle_resource_tree_update(message_data, "update")
|
await self._handle_resource_tree_update(message_data, "update")
|
||||||
elif message_type == "remove_material":
|
elif message_type == "remove_material":
|
||||||
|
# noinspection PyTypeChecker
|
||||||
await self._handle_resource_tree_update(message_data, "remove")
|
await self._handle_resource_tree_update(message_data, "remove")
|
||||||
elif message_type == "session_id":
|
# elif message_type == "session_id":
|
||||||
self.session_id = message_data.get("session_id")
|
# self.session_id = message_data.get("session_id")
|
||||||
logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
# logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
||||||
elif message_type == "request_reload":
|
elif message_type == "request_restart":
|
||||||
await self._handle_request_reload(message_data)
|
await self._handle_request_restart(message_data)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
||||||
|
|
||||||
@@ -643,6 +652,8 @@ class MessageProcessor:
|
|||||||
async def _handle_job_start(self, data: Dict[str, Any]):
|
async def _handle_job_start(self, data: Dict[str, Any]):
|
||||||
"""处理job_start消息"""
|
"""处理job_start消息"""
|
||||||
try:
|
try:
|
||||||
|
if not data.get("sample_material"):
|
||||||
|
data["sample_material"] = {}
|
||||||
req = JobAddReq(**data)
|
req = JobAddReq(**data)
|
||||||
|
|
||||||
job_log = format_job_log(req.job_id, req.task_id, req.device_id, req.action)
|
job_log = format_job_log(req.job_id, req.task_id, req.device_id, req.action)
|
||||||
@@ -674,6 +685,7 @@ class MessageProcessor:
|
|||||||
queue_item,
|
queue_item,
|
||||||
action_type=req.action_type,
|
action_type=req.action_type,
|
||||||
action_kwargs=req.action_args,
|
action_kwargs=req.action_args,
|
||||||
|
sample_material=req.sample_material,
|
||||||
server_info=req.server_info,
|
server_info=req.server_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -839,7 +851,7 @@ class MessageProcessor:
|
|||||||
device_action_groups[key_add].append(item["uuid"])
|
device_action_groups[key_add].append(item["uuid"])
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"[MessageProcessor] Resource migrated: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
f"[资源同步] 跨站Transfer: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# 正常update
|
# 正常update
|
||||||
@@ -854,11 +866,11 @@ class MessageProcessor:
|
|||||||
device_action_groups[key] = []
|
device_action_groups[key] = []
|
||||||
device_action_groups[key].append(item["uuid"])
|
device_action_groups[key].append(item["uuid"])
|
||||||
|
|
||||||
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
logger.trace(f"[资源同步] 动作 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
||||||
|
|
||||||
# 为每个(device_id, action)创建独立的更新线程
|
# 为每个(device_id, action)创建独立的更新线程
|
||||||
for (device_id, actual_action), items in device_action_groups.items():
|
for (device_id, actual_action), items in device_action_groups.items():
|
||||||
logger.info(f"设备 {device_id} 物料更新 {actual_action} 数量: {len(items)}")
|
logger.trace(f"[资源同步] {device_id} 物料动作 {actual_action} 数量: {len(items)}")
|
||||||
|
|
||||||
def _notify_resource_tree(dev_id, act, item_list):
|
def _notify_resource_tree(dev_id, act, item_list):
|
||||||
try:
|
try:
|
||||||
@@ -890,19 +902,48 @@ class MessageProcessor:
|
|||||||
)
|
)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
async def _handle_request_reload(self, data: Dict[str, Any]):
|
async def _handle_request_restart(self, data: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
处理重载请求
|
处理重启请求
|
||||||
|
|
||||||
当LabGo发送request_reload时,重新发送设备注册信息
|
当LabGo发送request_restart时,执行清理并触发重启
|
||||||
"""
|
"""
|
||||||
reason = data.get("reason", "unknown")
|
reason = data.get("reason", "unknown")
|
||||||
logger.info(f"[MessageProcessor] Received reload request, reason: {reason}")
|
delay = data.get("delay", 2) # 默认延迟2秒
|
||||||
|
logger.info(f"[MessageProcessor] Received restart request, reason: {reason}, delay: {delay}s")
|
||||||
|
|
||||||
# 重新发送host_node_ready信息
|
# 发送确认消息
|
||||||
if self.websocket_client:
|
if self.websocket_client:
|
||||||
self.websocket_client.publish_host_ready()
|
await self.websocket_client.send_message({
|
||||||
logger.info("[MessageProcessor] Re-sent host_node_ready after reload request")
|
"action": "restart_acknowledged",
|
||||||
|
"data": {"reason": reason, "delay": delay}
|
||||||
|
})
|
||||||
|
|
||||||
|
# 设置全局重启标志
|
||||||
|
import unilabos.app.main as main_module
|
||||||
|
main_module._restart_requested = True
|
||||||
|
main_module._restart_reason = reason
|
||||||
|
|
||||||
|
# 延迟后执行清理
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
|
||||||
|
# 在新线程中执行清理,避免阻塞当前事件循环
|
||||||
|
def do_cleanup():
|
||||||
|
import time
|
||||||
|
time.sleep(0.5) # 给当前消息处理完成的时间
|
||||||
|
logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}")
|
||||||
|
try:
|
||||||
|
from unilabos.app.utils import cleanup_for_restart
|
||||||
|
if cleanup_for_restart():
|
||||||
|
logger.info("[MessageProcessor] Cleanup successful, main() will restart")
|
||||||
|
else:
|
||||||
|
logger.error("[MessageProcessor] Cleanup failed")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"[MessageProcessor] Error during cleanup: {e}")
|
||||||
|
|
||||||
|
cleanup_thread = threading.Thread(target=do_cleanup, name="RestartCleanupThread", daemon=True)
|
||||||
|
cleanup_thread.start()
|
||||||
|
logger.info(f"[MessageProcessor] Restart cleanup scheduled")
|
||||||
|
|
||||||
async def _send_action_state_response(
|
async def _send_action_state_response(
|
||||||
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
||||||
@@ -1256,7 +1297,7 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
self.message_processor.send_message(message)
|
self.message_processor.send_message(message)
|
||||||
logger.trace(f"[WebSocketClient] Device status published: {device_id}.{property_name}")
|
# logger.trace(f"[WebSocketClient] Device status published: {device_id}.{property_name}")
|
||||||
|
|
||||||
def publish_job_status(
|
def publish_job_status(
|
||||||
self, feedback_data: dict, item: QueueItem, status: str, return_info: Optional[dict] = None
|
self, feedback_data: dict, item: QueueItem, status: str, return_info: Optional[dict] = None
|
||||||
|
|||||||
@@ -95,8 +95,29 @@ def get_vessel_liquid_volume(G: nx.DiGraph, vessel: str) -> float:
|
|||||||
return total_volume
|
return total_volume
|
||||||
|
|
||||||
|
|
||||||
def is_integrated_pump(node_name):
|
def is_integrated_pump(node_class: str, node_name: str = "") -> bool:
|
||||||
return "pump" in node_name and "valve" in node_name
|
"""
|
||||||
|
判断是否为泵阀一体设备
|
||||||
|
"""
|
||||||
|
class_lower = (node_class or "").lower()
|
||||||
|
name_lower = (node_name or "").lower()
|
||||||
|
|
||||||
|
if "pump" not in class_lower and "pump" not in name_lower:
|
||||||
|
return False
|
||||||
|
|
||||||
|
integrated_markers = [
|
||||||
|
"valve",
|
||||||
|
"pump_valve",
|
||||||
|
"pumpvalve",
|
||||||
|
"integrated",
|
||||||
|
"transfer_pump",
|
||||||
|
]
|
||||||
|
|
||||||
|
for marker in integrated_markers:
|
||||||
|
if marker in class_lower or marker in name_lower:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def find_connected_pump(G, valve_node):
|
def find_connected_pump(G, valve_node):
|
||||||
@@ -186,7 +207,9 @@ def build_pump_valve_maps(G, pump_backbone):
|
|||||||
debug_print(f"🔧 过滤后的骨架: {filtered_backbone}")
|
debug_print(f"🔧 过滤后的骨架: {filtered_backbone}")
|
||||||
|
|
||||||
for node in filtered_backbone:
|
for node in filtered_backbone:
|
||||||
if is_integrated_pump(G.nodes[node]["class"]):
|
node_data = G.nodes.get(node, {})
|
||||||
|
node_class = node_data.get("class", "") or ""
|
||||||
|
if is_integrated_pump(node_class, node):
|
||||||
pumps_from_node[node] = node
|
pumps_from_node[node] = node
|
||||||
valve_from_node[node] = node
|
valve_from_node[node] = node
|
||||||
debug_print(f" - 集成泵-阀: {node}")
|
debug_print(f" - 集成泵-阀: {node}")
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ class BasicConfig:
|
|||||||
upload_registry = False
|
upload_registry = False
|
||||||
machine_name = "undefined"
|
machine_name = "undefined"
|
||||||
vis_2d_enable = False
|
vis_2d_enable = False
|
||||||
|
no_update_feedback = False
|
||||||
enable_resource_load = True
|
enable_resource_load = True
|
||||||
communication_protocol = "websocket"
|
communication_protocol = "websocket"
|
||||||
startup_json_path = None # 填写绝对路径
|
startup_json_path = None # 填写绝对路径
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Coin Cell Assembly Workstation
|
|||||||
"""
|
"""
|
||||||
from typing import Dict, Any, List, Optional, Union
|
from typing import Dict, Any, List, Optional, Union
|
||||||
|
|
||||||
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
||||||
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
||||||
from unilabos.device_comms.workstation_communication import (
|
from unilabos.device_comms.workstation_communication import (
|
||||||
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
||||||
@@ -61,7 +61,7 @@ class CoinCellAssemblyWorkstation(WorkstationBase):
|
|||||||
|
|
||||||
# 创建资源跟踪器(如果没有提供)
|
# 创建资源跟踪器(如果没有提供)
|
||||||
if resource_tracker is None:
|
if resource_tracker is None:
|
||||||
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
||||||
resource_tracker = DeviceNodeResourceTracker()
|
resource_tracker = DeviceNodeResourceTracker()
|
||||||
|
|
||||||
# 初始化基类
|
# 初始化基类
|
||||||
|
|||||||
73
unilabos/devices/LICENSE
Normal file
73
unilabos/devices/LICENSE
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
Uni-Lab-OS软件许可使用准则
|
||||||
|
|
||||||
|
|
||||||
|
本软件使用准则(以下简称"本准则")旨在规范用户在使用Uni-Lab-OS软件(以下简称"本软件")过程中的行为和义务。在下载、安装、使用或以任何方式访问本软件之前,请务必仔细阅读并理解以下条款和条件。若您不同意本准则的全部或部分内容,请您立即停止使用本软件。一旦您开始访问、下载、安装、使用本软件,即表示您已阅读、理解并同意接受本准则的约束。
|
||||||
|
|
||||||
|
1、使用许可
|
||||||
|
1.1 本软件的所有权及版权归北京深势科技有限公司(以下简称"深势科技")所有。在遵守本准则的前提下,深势科技特此授予学术用户(以下简称"您")一个全球范围内的、非排他性的、免版权费用的使用许可,可为了满足学术目的而使用本软件。
|
||||||
|
|
||||||
|
1.2 本准则下授予的许可仅适用于本软件的二进制代码版本。您不对本软件源代码拥有任何权利。
|
||||||
|
|
||||||
|
2、使用限制
|
||||||
|
2.1 本准则仅授予学术用户出于学术目的使用本软件,任何商业组织、商业机构或其他非学术用户不得使用本软件,如果违反本条款,深势科技将保留一切追诉的权利。
|
||||||
|
2.2 您将本软件用于任何商业行为,应取得深势科技的商业许可。
|
||||||
|
2.3 您不得将本软件或任何形式的衍生作品用于任何商业目的,也不得将其出售、出租、转让、分发或以其他方式提供给任何第三方。您必须确保本软件的使用仅限于您个人学术研究,禁止您为任何其他实体的利益使用本软件(无论是否收费)。
|
||||||
|
2.4 您不得以任何方式修改、破解、反编译、反汇编、反向工程、隔离、分离或以其他方式从任何程序或文档中提取源代码或试图发现本软件的源代码。您不得以任何方式去除、修改或屏蔽本软件中的任何版权、商标或其他专有权利声明。您不得使用本软件进行任何非法活动,包括但不限于侵犯他人的知识产权、隐私权等。
|
||||||
|
2.5 您同意将本软件仅用于合法的学术目的,且遵守您所在国家或地区的法律法规,您将承担因违反法律法规而产生的一切法律责任。
|
||||||
|
|
||||||
|
3、软件所有权
|
||||||
|
本软件在此仅作使用许可,并非出售。本软件及与软件有关的全部文档的所有权及其他所有权利(包括但不限于知识产权和商业秘密),始终是深势科技的专有财产,您不拥有任何权利,但本准则下被明确授予的有限的使用许可权利除外。
|
||||||
|
|
||||||
|
4、衍生作品传播规范
|
||||||
|
若您传播基于Uni-Lab-OS程序修改形成的作品,须同时满足以下全部条件:
|
||||||
|
4.1 作品必须包含显著声明,明确标注修改内容及修改日期;
|
||||||
|
4.2 作品必须声明本作品依据本许可协议发布;
|
||||||
|
4.3 必须将整个作品(包括修改部分)作为整体授予获取副本者本许可协议的保障,且该许可将自动延伸适用于作品全组件(无论其以何种形式打包);
|
||||||
|
4.4 若衍生作品含交互式用户界面:每个界面均须显示合规法律声明,若原始Uni-Lab-OS程序的交互界面未展示法律声明,您的衍生作品可免除此义务。
|
||||||
|
|
||||||
|
5、提出建议
|
||||||
|
您可以对本软件提出建议,前提是:
|
||||||
|
(i)您声明并保证,该建议未侵害任何第三方的任何知识产权;
|
||||||
|
(ii)您承认,深势科技有权使用该建议,但无使用该建议的义务;
|
||||||
|
(iii)您授予深势科技一项非独占的、不可撤销的、可分许可的、无版权费的、全球范围的著作权许可,以复制、分发、传播、公开展示、公开表演、修改、翻译、基于其制作衍生作品、生产、制作、推销、销售、提供销售和/或以其他方式整体或部分地使用该建议和基于其的衍生作品,包括但不限于,通过将该建议整体或部分地纳入深势科技的软件和/或其他软件,以及在现存的或将来任何时候存在的任何媒介中或通过该媒介体现,以及为从事上述活动而授予多个分许可;
|
||||||
|
(iv)您特此授予深势科技一项永久的、全球范围的、非独占性的、免费的、免特许权使用费的、不可撤销的专利许可,许可其制造、委托制造、使用、要约销售、销售、进口及以其他方式转让该建议和基于其的衍生专利。上述专利许可的适用范围仅限于以下专利权利要求:您有权许可的、且仅因您的建议本身,或因您的建议与所提交的本软件结合而必然构成侵权的专利权利要求。若任何实体针对您或其他实体提起专利诉讼(包括诉讼中的交叉诉讼或反诉),主张该建议或您所贡献的软件构成直接或间接专利侵权,则依据本协议授予的、针对该建议或软件的任何专利许可,自该诉讼提起之日起终止。
|
||||||
|
(v)您放弃对该建议的任何权利或主张,深势科技无需承担任何义务、版税或基于知识产权或其他方面的限制。
|
||||||
|
|
||||||
|
6、引用要求
|
||||||
|
如您使用本软件获得的成果发表在出版物上,您应在成果中承认对Uni-Lab-OS软件的使用并标注权利人名称。引用 Uni-Lab-OS时请使用以下内容:
|
||||||
|
@article{gao2025unilabos,
|
||||||
|
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||||
|
doi = {10.48550/arXiv.2512.21766},
|
||||||
|
publisher = {arXiv},
|
||||||
|
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||||
|
year = {2025}
|
||||||
|
}
|
||||||
|
|
||||||
|
7、保留权利
|
||||||
|
您认可,所有未被明确授予您的本软件的权利,无论是当前或今后存在的,均由深势科技予以保留,任何未经深势科技明确授权而使用本软件的行为将被视为侵权,深势科技有权追究侵权者的一切法律责任。
|
||||||
|
|
||||||
|
8、保密信息
|
||||||
|
您同意将本软件代码及相关文档视为深势科技的机密信息,您不会向任何第三方提供相关代码,并将采取合理审慎的使用态度来防止本软件代码及相关文档被泄露。
|
||||||
|
|
||||||
|
9、无保证
|
||||||
|
该软件是"按原样"提供的,没有任何明示或暗示的保证,不包含任何代码或规范没有缺陷、适销性、适用于特定目的或不侵犯第三方权利的保证。您同意您自主承担使用本软件或与本准则有关的全部风险。
|
||||||
|
|
||||||
|
10、免责条款
|
||||||
|
在任何情况下,无论基于侵权(包括过失)、合同或其他法律理论,除非适用法律强制规定(如故意或重大过失行为)或另有书面协议,深势科技不对被许可人因软件许可、使用或无法使用软件所致损害承担责任(包括任何性质的直接、间接、特殊、偶发或后果性损害,例如但不限于商誉损失、停工损失、计算机故障或失灵造成的损害,以及其他一切商业损害或损失),即使深势科技已被告知发生此类损害的可能性亦不例外。
|
||||||
|
被许可人在再分发软件或其衍生作品时,仅能以自身名义独立承担责任进行操作,不得代表深势科技或其他被许可人。
|
||||||
|
|
||||||
|
11、终止
|
||||||
|
如果您以任何方式违反本准则或未能遵守本准则的任何重要条款或条件,则您被授予的所有权利将自动终止。
|
||||||
|
|
||||||
|
12、举报
|
||||||
|
如果您认为有人违反了本准则,请向深势科技进行举报,深势科技将对您的身份进行严格保密,举报邮箱changjh@dp.tech。
|
||||||
|
|
||||||
|
13、法律管辖
|
||||||
|
本准则中的任何内容均不得解释为通过暗示、禁止反悔或其他方式授予本准则中授予的许可或权利以外的任何许可或权利。如果本准则的任何条款被认定为不可执行,则仅在必要的范围内对该条款进行修改,使其可执行。本准则应受中华人民共和国法律管辖,不适用法律冲突条款及《联合国国际货物销售合同公约》,因本准则产生的一切争议由北京市海淀区人民法院管辖。
|
||||||
|
|
||||||
|
14、未来版本
|
||||||
|
深势科技保留不经事先通知随时变更或停止本软件或本准则的权利。
|
||||||
|
|
||||||
|
15、语言优先
|
||||||
|
本准则同时具有中文版本和英文版本,如果英文版本和中文版本有冲突,以中文版本为准。
|
||||||
|
|
||||||
73
unilabos/devices/LICENSE_eng
Normal file
73
unilabos/devices/LICENSE_eng
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
Uni-Lab-OS License Agreement
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
This License Agreement (the "Agreement") is instituted to govern user conduct and obligations in relation to the utilization of the Uni-Lab-OS (the "Software"). By accessing, downloading, installing, or utilizing the Software in any manner, you hereby acknowledge that you have meticulously reviewed, comprehended, and consented to be legally bound by the terms herein. If you dissent from any provision of this Agreement, you must forthwith cease all interaction with the Software.
|
||||||
|
|
||||||
|
1. Grant of License
|
||||||
|
1.1 The proprietary rights to the Software are exclusively retained by Beijing DP Technology Co., Ltd. ("DP Technology"). Subject to full compliance with this Agreement, DP Technology hereby grants academic users ("Licensee") a worldwide, non-exclusive, royalty-free license to untilise the Software solely for non-commercial academic pursuits.
|
||||||
|
|
||||||
|
1.2 The foregoing license applies exclusively to the Software's executable binary code. No rights whatsoever are conferred to the Software's source code.
|
||||||
|
|
||||||
|
2. Usage Restrictions
|
||||||
|
2.1 This license is restricted to academic users engaging in scholastic activities. Commercial entities, institutions, or any non-academic parties are expressly prohibited from utilizing the Software. Violations of this clause shall entitle DP Technology to pursue all available legal remedies.
|
||||||
|
2.2 The Licensee shall obtain a commercial license from DP Technology for any commercial use of the Software.
|
||||||
|
2.3 The Licensee shall not utilise the Software or any derivative works for commercial purposes, nor distribute, sublicense, lease, transfer, or otherwise disseminate the Software to third parties. The Licensee is strictly prohibited from utilizing the Software for the benefit of any third-party entity, whether gratuitously or otherwise.
|
||||||
|
2.4 Reverse engineering, decompilation, disassembly, code isolation, or any attempt to derive source code from the Software is strictly prohibited. The Licensee shall not alter, circumvent, or remove copyright notices, trademarks, or proprietary legends embedded in the Software. Use of the Software for unlawful activities—including but not limited to intellectual property infringement or privacy violations—is categorically barred.
|
||||||
|
2.5 The Licensee warrants that the Software shall be utilised solely for lawful academic purposes in compliance with applicable jurisdictional statutes. All legal liabilities arising from noncompliance shall be borne exclusively by the Licensee.
|
||||||
|
|
||||||
|
3. Proprietary Rights
|
||||||
|
This Agreement confers a license to utilise the Software, not a transfer of ownership. All intellectual property rights—including copyrights, patents, trade secrets, and documentation—remain the exclusive dominion of DP Technology. The Licensee acquires no entitlements beyond the limited usage privileges expressly delineated herein.
|
||||||
|
|
||||||
|
4. Derivative Work
|
||||||
|
You may convey a work based on the Software, or the modifications to produce it from the Software, provided that you meet all of these conditions:
|
||||||
|
4.1 The work must carry prominent notices stating that you modified it, and giving a relevant date.
|
||||||
|
4.2 The work must carry prominent notices stating that it is released under this License.
|
||||||
|
4.3 You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it.
|
||||||
|
4.4 If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Software has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
|
||||||
|
|
||||||
|
5. Feedback and Proposals
|
||||||
|
Licensees may submit proposals, suggestions, or improvements pertaining to the Software ("Feedback") under the following conditions:
|
||||||
|
(a) Licensee represents and warrants that such Feedback does not infringe upon any third-party intellectual property rights;
|
||||||
|
(b) Licensee acknowledges that DP Technology reserves the right, but assumes no obligation, to utilize such Feedback;
|
||||||
|
(c) Licensee irrevocably grants DP Technology a non-exclusive, royalty-free, perpetual, worldwide, sublicensable copyright license to reproduce, distribute, modify, publicly perform or display, translate, create derivative works of, commercialize, and otherwise exploit the Feedback in any medium or format, whether now known or hereafter devised, including the right to grant multiple tiers of sublicenses to enable such activities;
|
||||||
|
(d) Licensee hereby grants DP Technology a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Feedback and such Derivative Works, where such license applies only to those patent claimss licensable by Licensee that are necessarily infringed by the Feedback(s) alone or by comibination of the Feedback(s) with the Software to which such Feedback(s) were submitted. If any entity institutes patent litigation against Licensee or any other entity (including a cross-claim orcounterclaim in a lawsuit) alleging that the Feedback, or the Software to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted under this Agreement for the Feedback or Software shall terminate as of the date such litigation is filed.
|
||||||
|
(e) Licensee hereby waives all claims, proprietary rights, or restrictions related to DP Technology's use of such Feedback.
|
||||||
|
|
||||||
|
6. Citation Requirement
|
||||||
|
If academic or research output generated using the Software is published, Licensee must explicitly acknowledge the use of Uni-Lab-OS and attribute ownership to DP Technology. The following citation must be included:
|
||||||
|
@article{gao2025unilabos,
|
||||||
|
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||||
|
doi = {10.48550/arXiv.2512.21766},
|
||||||
|
publisher = {arXiv},
|
||||||
|
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||||
|
year = {2025}
|
||||||
|
}
|
||||||
|
|
||||||
|
7. Reservation of Rights
|
||||||
|
All rights not expressly granted herein, whether existing now or arising in the future, are exclusively reserved by DP Technology. Any unauthorized use of the Software beyond the scope of this Agreement constitutes infringement, and DP Technology reserves all legal rights to pursue remedies against violators.
|
||||||
|
|
||||||
|
8. Confidentiality
|
||||||
|
Licensee agrees to treat the Software's code, documentation, and related materials as confidential information. Licensee shall not disclose such materials to third parties and shall employ reasonable safeguards to prevent unauthorized access, dissemination, or misuse.
|
||||||
|
|
||||||
|
9. Disclaimer of Warranties
|
||||||
|
The software is provided "as is," without warranties of any kind, express or implied, including but not limited to warranties of merchantability, fitness for a particular purpose, non-infringement, or error-free operation. Licensee accepts all risks associated with the use of the software.
|
||||||
|
|
||||||
|
10. Limitation of Liability
|
||||||
|
In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall DP Technology be liable to Licensee for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the software (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if DP Technology has been advised of the possibility of such damages.
|
||||||
|
While redistributing the Software or Derivative Works thereof, Licensee may act only on Licensee's own behalf and on Licensee's sole responsibility, not on behalf of DP Technology or any other Licensee.
|
||||||
|
|
||||||
|
11. Termination
|
||||||
|
All rights granted herein shall terminate immediately and automatically if Licensee materially breaches any provision of this Agreement.
|
||||||
|
|
||||||
|
12. Reporting Violations
|
||||||
|
To report suspected violations of this Agreement, notify DP Technology via the designated email address: changjh@dp.tech. DP Technology shall maintain the confidentiality of the reporter's identity.
|
||||||
|
|
||||||
|
13. Governing Law and Dispute Resolution
|
||||||
|
This Agreement shall be governed by the laws of the People's Republic of China, excluding its conflict of laws principles and the United Nations Convention on Contracts for the International Sale of Goods. Any dispute arising from this Agreement shall be exclusively adjudicated by the Haidian District People's Court in Beijing.
|
||||||
|
|
||||||
|
14. Amendments and Updates
|
||||||
|
DP Technology reserves the right to modify, suspend, or terminate the Software or this Agreement at any time without prior notice.
|
||||||
|
|
||||||
|
15. Language Priority
|
||||||
|
This Agreement is provided in both Chinese and English. In the event of any discrepancy, the Chinese version shall prevail.
|
||||||
|
|
||||||
0
unilabos/devices/Qone_nmr/__init__.py
Normal file
0
unilabos/devices/Qone_nmr/__init__.py
Normal file
@@ -13,7 +13,7 @@ from pylabrobot.resources import (
|
|||||||
import copy
|
import copy
|
||||||
from unilabos_msgs.msg import Resource
|
from unilabos_msgs.msg import Resource
|
||||||
|
|
||||||
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class LiquidHandlerBiomek:
|
class LiquidHandlerBiomek:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -15,35 +15,35 @@ class VirtualPumpMode(Enum):
|
|||||||
|
|
||||||
class VirtualTransferPump:
|
class VirtualTransferPump:
|
||||||
"""虚拟转移泵类 - 模拟泵的基本功能,无需实际硬件 🚰"""
|
"""虚拟转移泵类 - 模拟泵的基本功能,无需实际硬件 🚰"""
|
||||||
|
|
||||||
_ros_node: BaseROS2DeviceNode
|
_ros_node: BaseROS2DeviceNode
|
||||||
|
|
||||||
def __init__(self, device_id: str = None, config: dict = None, **kwargs):
|
def __init__(self, device_id: str = None, config: dict = None, **kwargs):
|
||||||
"""
|
"""
|
||||||
初始化虚拟转移泵
|
初始化虚拟转移泵
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
device_id: 设备ID
|
device_id: 设备ID
|
||||||
config: 配置字典,包含max_volume, port等参数
|
config: 配置字典,包含max_volume, port等参数
|
||||||
**kwargs: 其他参数,确保兼容性
|
**kwargs: 其他参数,确保兼容性
|
||||||
"""
|
"""
|
||||||
self.device_id = device_id or "virtual_transfer_pump"
|
self.device_id = device_id or "virtual_transfer_pump"
|
||||||
|
|
||||||
# 从config或kwargs中获取参数,确保类型正确
|
# 从config或kwargs中获取参数,确保类型正确
|
||||||
if config:
|
if config:
|
||||||
self.max_volume = float(config.get('max_volume', 25.0))
|
self.max_volume = float(config.get("max_volume", 25.0))
|
||||||
self.port = config.get('port', 'VIRTUAL')
|
self.port = config.get("port", "VIRTUAL")
|
||||||
else:
|
else:
|
||||||
self.max_volume = float(kwargs.get('max_volume', 25.0))
|
self.max_volume = float(kwargs.get("max_volume", 25.0))
|
||||||
self.port = kwargs.get('port', 'VIRTUAL')
|
self.port = kwargs.get("port", "VIRTUAL")
|
||||||
|
|
||||||
self._transfer_rate = float(kwargs.get('transfer_rate', 0))
|
self._transfer_rate = float(kwargs.get("transfer_rate", 0))
|
||||||
self.mode = kwargs.get('mode', VirtualPumpMode.Normal)
|
self.mode = kwargs.get("mode", VirtualPumpMode.Normal)
|
||||||
|
|
||||||
# 状态变量 - 确保都是正确类型
|
# 状态变量 - 确保都是正确类型
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
self._position = 0.0 # float
|
self._position = 0.0 # float
|
||||||
self._max_velocity = 5.0 # float
|
self._max_velocity = 5.0 # float
|
||||||
self._current_volume = 0.0 # float
|
self._current_volume = 0.0 # float
|
||||||
|
|
||||||
# 🚀 新增:快速模式设置 - 大幅缩短执行时间
|
# 🚀 新增:快速模式设置 - 大幅缩短执行时间
|
||||||
@@ -52,14 +52,16 @@ class VirtualTransferPump:
|
|||||||
self._fast_dispense_time = 1.0 # 快速喷射时间(秒)
|
self._fast_dispense_time = 1.0 # 快速喷射时间(秒)
|
||||||
|
|
||||||
self.logger = logging.getLogger(f"VirtualTransferPump.{self.device_id}")
|
self.logger = logging.getLogger(f"VirtualTransferPump.{self.device_id}")
|
||||||
|
|
||||||
print(f"🚰 === 虚拟转移泵 {self.device_id} 已创建 === ✨")
|
print(f"🚰 === 虚拟转移泵 {self.device_id} 已创建 === ✨")
|
||||||
print(f"💨 快速模式: {'启用' if self._fast_mode else '禁用'} | 移动时间: {self._fast_move_time}s | 喷射时间: {self._fast_dispense_time}s")
|
print(
|
||||||
|
f"💨 快速模式: {'启用' if self._fast_mode else '禁用'} | 移动时间: {self._fast_move_time}s | 喷射时间: {self._fast_dispense_time}s"
|
||||||
|
)
|
||||||
print(f"📊 最大容量: {self.max_volume}mL | 端口: {self.port}")
|
print(f"📊 最大容量: {self.max_volume}mL | 端口: {self.port}")
|
||||||
|
|
||||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||||
self._ros_node = ros_node
|
self._ros_node = ros_node
|
||||||
|
|
||||||
async def initialize(self) -> bool:
|
async def initialize(self) -> bool:
|
||||||
"""初始化虚拟泵 🚀"""
|
"""初始化虚拟泵 🚀"""
|
||||||
self.logger.info(f"🔧 初始化虚拟转移泵 {self.device_id} ✨")
|
self.logger.info(f"🔧 初始化虚拟转移泵 {self.device_id} ✨")
|
||||||
@@ -68,33 +70,33 @@ class VirtualTransferPump:
|
|||||||
self._current_volume = 0.0
|
self._current_volume = 0.0
|
||||||
self.logger.info(f"✅ 转移泵 {self.device_id} 初始化完成 🚰")
|
self.logger.info(f"✅ 转移泵 {self.device_id} 初始化完成 🚰")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def cleanup(self) -> bool:
|
async def cleanup(self) -> bool:
|
||||||
"""清理虚拟泵 🧹"""
|
"""清理虚拟泵 🧹"""
|
||||||
self.logger.info(f"🧹 清理虚拟转移泵 {self.device_id} 🔚")
|
self.logger.info(f"🧹 清理虚拟转移泵 {self.device_id} 🔚")
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
self.logger.info(f"✅ 转移泵 {self.device_id} 清理完成 💤")
|
self.logger.info(f"✅ 转移泵 {self.device_id} 清理完成 💤")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# 基本属性
|
# 基本属性
|
||||||
@property
|
@property
|
||||||
def status(self) -> str:
|
def status(self) -> str:
|
||||||
return self._status
|
return self._status
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def position(self) -> float:
|
def position(self) -> float:
|
||||||
"""当前柱塞位置 (ml) 📍"""
|
"""当前柱塞位置 (ml) 📍"""
|
||||||
return self._position
|
return self._position
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_volume(self) -> float:
|
def current_volume(self) -> float:
|
||||||
"""当前注射器中的体积 (ml) 💧"""
|
"""当前注射器中的体积 (ml) 💧"""
|
||||||
return self._current_volume
|
return self._current_volume
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def max_velocity(self) -> float:
|
def max_velocity(self) -> float:
|
||||||
return self._max_velocity
|
return self._max_velocity
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def transfer_rate(self) -> float:
|
def transfer_rate(self) -> float:
|
||||||
return self._transfer_rate
|
return self._transfer_rate
|
||||||
@@ -103,17 +105,17 @@ class VirtualTransferPump:
|
|||||||
"""设置最大速度 (ml/s) 🌊"""
|
"""设置最大速度 (ml/s) 🌊"""
|
||||||
self._max_velocity = max(0.1, min(50.0, velocity)) # 限制在合理范围内
|
self._max_velocity = max(0.1, min(50.0, velocity)) # 限制在合理范围内
|
||||||
self.logger.info(f"🌊 设置最大速度为 {self._max_velocity} mL/s")
|
self.logger.info(f"🌊 设置最大速度为 {self._max_velocity} mL/s")
|
||||||
|
|
||||||
def get_status(self) -> str:
|
def get_status(self) -> str:
|
||||||
"""获取泵状态 📋"""
|
"""获取泵状态 📋"""
|
||||||
return self._status
|
return self._status
|
||||||
|
|
||||||
async def _simulate_operation(self, duration: float):
|
async def _simulate_operation(self, duration: float):
|
||||||
"""模拟操作延时 ⏱️"""
|
"""模拟操作延时 ⏱️"""
|
||||||
self._status = "Busy"
|
self._status = "Busy"
|
||||||
await self._ros_node.sleep(duration)
|
await self._ros_node.sleep(duration)
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
|
|
||||||
def _calculate_duration(self, volume: float, velocity: float = None) -> float:
|
def _calculate_duration(self, volume: float, velocity: float = None) -> float:
|
||||||
"""
|
"""
|
||||||
计算操作持续时间 ⏰
|
计算操作持续时间 ⏰
|
||||||
@@ -121,10 +123,10 @@ class VirtualTransferPump:
|
|||||||
"""
|
"""
|
||||||
if velocity is None:
|
if velocity is None:
|
||||||
velocity = self._max_velocity
|
velocity = self._max_velocity
|
||||||
|
|
||||||
# 📊 计算理论时间(用于日志显示)
|
# 📊 计算理论时间(用于日志显示)
|
||||||
theoretical_duration = abs(volume) / velocity
|
theoretical_duration = abs(volume) / velocity
|
||||||
|
|
||||||
# 🚀 如果启用快速模式,使用固定的快速时间
|
# 🚀 如果启用快速模式,使用固定的快速时间
|
||||||
if self._fast_mode:
|
if self._fast_mode:
|
||||||
# 根据操作类型选择快速时间
|
# 根据操作类型选择快速时间
|
||||||
@@ -132,13 +134,13 @@ class VirtualTransferPump:
|
|||||||
actual_duration = self._fast_move_time
|
actual_duration = self._fast_move_time
|
||||||
else: # 很小的操作
|
else: # 很小的操作
|
||||||
actual_duration = 0.5
|
actual_duration = 0.5
|
||||||
|
|
||||||
self.logger.debug(f"⚡ 快速模式: 理论时间 {theoretical_duration:.2f}s → 实际时间 {actual_duration:.2f}s")
|
self.logger.debug(f"⚡ 快速模式: 理论时间 {theoretical_duration:.2f}s → 实际时间 {actual_duration:.2f}s")
|
||||||
return actual_duration
|
return actual_duration
|
||||||
else:
|
else:
|
||||||
# 正常模式使用理论时间
|
# 正常模式使用理论时间
|
||||||
return theoretical_duration
|
return theoretical_duration
|
||||||
|
|
||||||
def _calculate_display_duration(self, volume: float, velocity: float = None) -> float:
|
def _calculate_display_duration(self, volume: float, velocity: float = None) -> float:
|
||||||
"""
|
"""
|
||||||
计算显示用的持续时间(用于日志) 📊
|
计算显示用的持续时间(用于日志) 📊
|
||||||
@@ -147,16 +149,16 @@ class VirtualTransferPump:
|
|||||||
if velocity is None:
|
if velocity is None:
|
||||||
velocity = self._max_velocity
|
velocity = self._max_velocity
|
||||||
return abs(volume) / velocity
|
return abs(volume) / velocity
|
||||||
|
|
||||||
# 新的set_position方法 - 专门用于SetPumpPosition动作
|
# 新的set_position方法 - 专门用于SetPumpPosition动作
|
||||||
async def set_position(self, position: float, max_velocity: float = None):
|
async def set_position(self, position: float, max_velocity: float = None):
|
||||||
"""
|
"""
|
||||||
移动到绝对位置 - 专门用于SetPumpPosition动作 🎯
|
移动到绝对位置 - 专门用于SetPumpPosition动作 🎯
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
position (float): 目标位置 (ml)
|
position (float): 目标位置 (ml)
|
||||||
max_velocity (float): 移动速度 (ml/s)
|
max_velocity (float): 移动速度 (ml/s)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: 符合SetPumpPosition.action定义的结果
|
dict: 符合SetPumpPosition.action定义的结果
|
||||||
"""
|
"""
|
||||||
@@ -164,19 +166,19 @@ class VirtualTransferPump:
|
|||||||
# 验证并转换参数
|
# 验证并转换参数
|
||||||
target_position = float(position)
|
target_position = float(position)
|
||||||
velocity = float(max_velocity) if max_velocity is not None else self._max_velocity
|
velocity = float(max_velocity) if max_velocity is not None else self._max_velocity
|
||||||
|
|
||||||
# 限制位置在有效范围内
|
# 限制位置在有效范围内
|
||||||
target_position = max(0.0, min(float(self.max_volume), target_position))
|
target_position = max(0.0, min(float(self.max_volume), target_position))
|
||||||
|
|
||||||
# 计算移动距离
|
# 计算移动距离
|
||||||
volume_to_move = abs(target_position - self._position)
|
volume_to_move = abs(target_position - self._position)
|
||||||
|
|
||||||
# 📊 计算显示用的时间(用于日志)
|
# 📊 计算显示用的时间(用于日志)
|
||||||
display_duration = self._calculate_display_duration(volume_to_move, velocity)
|
display_duration = self._calculate_display_duration(volume_to_move, velocity)
|
||||||
|
|
||||||
# ⚡ 计算实际执行时间(快速模式)
|
# ⚡ 计算实际执行时间(快速模式)
|
||||||
actual_duration = self._calculate_duration(volume_to_move, velocity)
|
actual_duration = self._calculate_duration(volume_to_move, velocity)
|
||||||
|
|
||||||
# 🎯 确定操作类型和emoji
|
# 🎯 确定操作类型和emoji
|
||||||
if target_position > self._position:
|
if target_position > self._position:
|
||||||
operation_type = "吸液"
|
operation_type = "吸液"
|
||||||
@@ -187,28 +189,34 @@ class VirtualTransferPump:
|
|||||||
else:
|
else:
|
||||||
operation_type = "保持"
|
operation_type = "保持"
|
||||||
operation_emoji = "📍"
|
operation_emoji = "📍"
|
||||||
|
|
||||||
self.logger.info(f"🎯 SET_POSITION: {operation_type} {operation_emoji}")
|
self.logger.info(f"🎯 SET_POSITION: {operation_type} {operation_emoji}")
|
||||||
self.logger.info(f" 📍 位置: {self._position:.2f}mL → {target_position:.2f}mL (移动 {volume_to_move:.2f}mL)")
|
self.logger.info(
|
||||||
|
f" 📍 位置: {self._position:.2f}mL → {target_position:.2f}mL (移动 {volume_to_move:.2f}mL)"
|
||||||
|
)
|
||||||
self.logger.info(f" 🌊 速度: {velocity:.2f} mL/s")
|
self.logger.info(f" 🌊 速度: {velocity:.2f} mL/s")
|
||||||
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
||||||
|
|
||||||
if self._fast_mode:
|
if self._fast_mode:
|
||||||
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
||||||
|
|
||||||
# 🚀 模拟移动过程
|
# 🚀 模拟移动过程
|
||||||
if volume_to_move > 0.01: # 只有当移动距离足够大时才显示进度
|
if volume_to_move > 0.01: # 只有当移动距离足够大时才显示进度
|
||||||
start_position = self._position
|
start_position = self._position
|
||||||
steps = 5 if actual_duration > 0.5 else 2 # 根据实际时间调整步数
|
steps = 5 if actual_duration > 0.5 else 2 # 根据实际时间调整步数
|
||||||
step_duration = actual_duration / steps
|
step_duration = actual_duration / steps
|
||||||
|
|
||||||
self.logger.info(f"🚀 开始{operation_type}... {operation_emoji}")
|
self.logger.info(f"🚀 开始{operation_type}... {operation_emoji}")
|
||||||
|
|
||||||
for i in range(steps + 1):
|
for i in range(steps + 1):
|
||||||
# 计算当前位置和进度
|
# 计算当前位置和进度
|
||||||
progress = (i / steps) * 100 if steps > 0 else 100
|
progress = (i / steps) * 100 if steps > 0 else 100
|
||||||
current_pos = start_position + (target_position - start_position) * (i / steps) if steps > 0 else target_position
|
current_pos = (
|
||||||
|
start_position + (target_position - start_position) * (i / steps)
|
||||||
|
if steps > 0
|
||||||
|
else target_position
|
||||||
|
)
|
||||||
|
|
||||||
# 更新状态
|
# 更新状态
|
||||||
if i < steps:
|
if i < steps:
|
||||||
self._status = f"{operation_type}中"
|
self._status = f"{operation_type}中"
|
||||||
@@ -216,10 +224,10 @@ class VirtualTransferPump:
|
|||||||
else:
|
else:
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
status_emoji = "✅"
|
status_emoji = "✅"
|
||||||
|
|
||||||
self._position = current_pos
|
self._position = current_pos
|
||||||
self._current_volume = current_pos
|
self._current_volume = current_pos
|
||||||
|
|
||||||
# 显示进度(每25%或最后一步)
|
# 显示进度(每25%或最后一步)
|
||||||
if i == 0:
|
if i == 0:
|
||||||
self.logger.debug(f" 🔄 {operation_type}开始: {progress:.0f}%")
|
self.logger.debug(f" 🔄 {operation_type}开始: {progress:.0f}%")
|
||||||
@@ -227,7 +235,7 @@ class VirtualTransferPump:
|
|||||||
self.logger.debug(f" 🔄 {operation_type}进度: {progress:.0f}%")
|
self.logger.debug(f" 🔄 {operation_type}进度: {progress:.0f}%")
|
||||||
elif i == steps:
|
elif i == steps:
|
||||||
self.logger.info(f" ✅ {operation_type}完成: {progress:.0f}% | 当前位置: {current_pos:.2f}mL")
|
self.logger.info(f" ✅ {operation_type}完成: {progress:.0f}% | 当前位置: {current_pos:.2f}mL")
|
||||||
|
|
||||||
# 等待一小步时间
|
# 等待一小步时间
|
||||||
if i < steps and step_duration > 0:
|
if i < steps and step_duration > 0:
|
||||||
await self._ros_node.sleep(step_duration)
|
await self._ros_node.sleep(step_duration)
|
||||||
@@ -236,25 +244,27 @@ class VirtualTransferPump:
|
|||||||
self._position = target_position
|
self._position = target_position
|
||||||
self._current_volume = target_position
|
self._current_volume = target_position
|
||||||
self.logger.info(f" 📍 微调完成: {target_position:.2f}mL")
|
self.logger.info(f" 📍 微调完成: {target_position:.2f}mL")
|
||||||
|
|
||||||
# 确保最终位置准确
|
# 确保最终位置准确
|
||||||
self._position = target_position
|
self._position = target_position
|
||||||
self._current_volume = target_position
|
self._current_volume = target_position
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
|
|
||||||
# 📊 最终状态日志
|
# 📊 最终状态日志
|
||||||
if volume_to_move > 0.01:
|
if volume_to_move > 0.01:
|
||||||
self.logger.info(f"🎉 SET_POSITION 完成! 📍 最终位置: {self._position:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
|
self.logger.info(
|
||||||
|
f"🎉 SET_POSITION 完成! 📍 最终位置: {self._position:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL"
|
||||||
|
)
|
||||||
|
|
||||||
# 返回符合action定义的结果
|
# 返回符合action定义的结果
|
||||||
return {
|
return {
|
||||||
"success": True,
|
"success": True,
|
||||||
"message": f"✅ 成功移动到位置 {self._position:.2f}mL ({operation_type})",
|
"message": f"✅ 成功移动到位置 {self._position:.2f}mL ({operation_type})",
|
||||||
"final_position": self._position,
|
"final_position": self._position,
|
||||||
"final_volume": self._current_volume,
|
"final_volume": self._current_volume,
|
||||||
"operation_type": operation_type
|
"operation_type": operation_type,
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"❌ 设置位置失败: {str(e)}"
|
error_msg = f"❌ 设置位置失败: {str(e)}"
|
||||||
self.logger.error(error_msg)
|
self.logger.error(error_msg)
|
||||||
@@ -262,134 +272,136 @@ class VirtualTransferPump:
|
|||||||
"success": False,
|
"success": False,
|
||||||
"message": error_msg,
|
"message": error_msg,
|
||||||
"final_position": self._position,
|
"final_position": self._position,
|
||||||
"final_volume": self._current_volume
|
"final_volume": self._current_volume,
|
||||||
}
|
}
|
||||||
|
|
||||||
# 其他泵操作方法
|
# 其他泵操作方法
|
||||||
async def pull_plunger(self, volume: float, velocity: float = None):
|
async def pull_plunger(self, volume: float, velocity: float = None):
|
||||||
"""
|
"""
|
||||||
拉取柱塞(吸液) 📥
|
拉取柱塞(吸液) 📥
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
volume (float): 要拉取的体积 (ml)
|
volume (float): 要拉取的体积 (ml)
|
||||||
velocity (float): 拉取速度 (ml/s)
|
velocity (float): 拉取速度 (ml/s)
|
||||||
"""
|
"""
|
||||||
new_position = min(self.max_volume, self._position + volume)
|
new_position = min(self.max_volume, self._position + volume)
|
||||||
actual_volume = new_position - self._position
|
actual_volume = new_position - self._position
|
||||||
|
|
||||||
if actual_volume <= 0:
|
if actual_volume <= 0:
|
||||||
self.logger.warning("⚠️ 无法吸液 - 已达到最大容量")
|
self.logger.warning("⚠️ 无法吸液 - 已达到最大容量")
|
||||||
return
|
return
|
||||||
|
|
||||||
display_duration = self._calculate_display_duration(actual_volume, velocity)
|
display_duration = self._calculate_display_duration(actual_volume, velocity)
|
||||||
actual_duration = self._calculate_duration(actual_volume, velocity)
|
actual_duration = self._calculate_duration(actual_volume, velocity)
|
||||||
|
|
||||||
self.logger.info(f"📥 开始吸液: {actual_volume:.2f}mL")
|
self.logger.info(f"📥 开始吸液: {actual_volume:.2f}mL")
|
||||||
self.logger.info(f" 📍 位置: {self._position:.2f}mL → {new_position:.2f}mL")
|
self.logger.info(f" 📍 位置: {self._position:.2f}mL → {new_position:.2f}mL")
|
||||||
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
||||||
|
|
||||||
if self._fast_mode:
|
if self._fast_mode:
|
||||||
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
||||||
|
|
||||||
await self._simulate_operation(actual_duration)
|
await self._simulate_operation(actual_duration)
|
||||||
|
|
||||||
self._position = new_position
|
self._position = new_position
|
||||||
self._current_volume = new_position
|
self._current_volume = new_position
|
||||||
|
|
||||||
self.logger.info(f"✅ 吸液完成: {actual_volume:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
|
self.logger.info(f"✅ 吸液完成: {actual_volume:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
|
||||||
|
|
||||||
async def push_plunger(self, volume: float, velocity: float = None):
|
async def push_plunger(self, volume: float, velocity: float = None):
|
||||||
"""
|
"""
|
||||||
推出柱塞(排液) 📤
|
推出柱塞(排液) 📤
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
volume (float): 要推出的体积 (ml)
|
volume (float): 要推出的体积 (ml)
|
||||||
velocity (float): 推出速度 (ml/s)
|
velocity (float): 推出速度 (ml/s)
|
||||||
"""
|
"""
|
||||||
new_position = max(0, self._position - volume)
|
new_position = max(0, self._position - volume)
|
||||||
actual_volume = self._position - new_position
|
actual_volume = self._position - new_position
|
||||||
|
|
||||||
if actual_volume <= 0:
|
if actual_volume <= 0:
|
||||||
self.logger.warning("⚠️ 无法排液 - 已达到最小容量")
|
self.logger.warning("⚠️ 无法排液 - 已达到最小容量")
|
||||||
return
|
return
|
||||||
|
|
||||||
display_duration = self._calculate_display_duration(actual_volume, velocity)
|
display_duration = self._calculate_display_duration(actual_volume, velocity)
|
||||||
actual_duration = self._calculate_duration(actual_volume, velocity)
|
actual_duration = self._calculate_duration(actual_volume, velocity)
|
||||||
|
|
||||||
self.logger.info(f"📤 开始排液: {actual_volume:.2f}mL")
|
self.logger.info(f"📤 开始排液: {actual_volume:.2f}mL")
|
||||||
self.logger.info(f" 📍 位置: {self._position:.2f}mL → {new_position:.2f}mL")
|
self.logger.info(f" 📍 位置: {self._position:.2f}mL → {new_position:.2f}mL")
|
||||||
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
|
||||||
|
|
||||||
if self._fast_mode:
|
if self._fast_mode:
|
||||||
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
self.logger.info(f" ⚡ 快速模式: 实际用时 {actual_duration:.2f}s")
|
||||||
|
|
||||||
await self._simulate_operation(actual_duration)
|
await self._simulate_operation(actual_duration)
|
||||||
|
|
||||||
self._position = new_position
|
self._position = new_position
|
||||||
self._current_volume = new_position
|
self._current_volume = new_position
|
||||||
|
|
||||||
self.logger.info(f"✅ 排液完成: {actual_volume:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
|
self.logger.info(f"✅ 排液完成: {actual_volume:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
|
||||||
|
|
||||||
# 便捷操作方法
|
# 便捷操作方法
|
||||||
async def aspirate(self, volume: float, velocity: float = None):
|
async def aspirate(self, volume: float, velocity: float = None):
|
||||||
"""吸液操作 📥"""
|
"""吸液操作 📥"""
|
||||||
await self.pull_plunger(volume, velocity)
|
await self.pull_plunger(volume, velocity)
|
||||||
|
|
||||||
async def dispense(self, volume: float, velocity: float = None):
|
async def dispense(self, volume: float, velocity: float = None):
|
||||||
"""排液操作 📤"""
|
"""排液操作 📤"""
|
||||||
await self.push_plunger(volume, velocity)
|
await self.push_plunger(volume, velocity)
|
||||||
|
|
||||||
async def transfer(self, volume: float, aspirate_velocity: float = None, dispense_velocity: float = None):
|
async def transfer(self, volume: float, aspirate_velocity: float = None, dispense_velocity: float = None):
|
||||||
"""转移操作(先吸后排) 🔄"""
|
"""转移操作(先吸后排) 🔄"""
|
||||||
self.logger.info(f"🔄 开始转移操作: {volume:.2f}mL")
|
self.logger.info(f"🔄 开始转移操作: {volume:.2f}mL")
|
||||||
|
|
||||||
# 吸液
|
# 吸液
|
||||||
await self.aspirate(volume, aspirate_velocity)
|
await self.aspirate(volume, aspirate_velocity)
|
||||||
|
|
||||||
# 短暂停顿
|
# 短暂停顿
|
||||||
self.logger.debug("⏸️ 短暂停顿...")
|
self.logger.debug("⏸️ 短暂停顿...")
|
||||||
await self._ros_node.sleep(0.1)
|
await self._ros_node.sleep(0.1)
|
||||||
|
|
||||||
# 排液
|
# 排液
|
||||||
await self.dispense(volume, dispense_velocity)
|
await self.dispense(volume, dispense_velocity)
|
||||||
|
|
||||||
async def empty_syringe(self, velocity: float = None):
|
async def empty_syringe(self, velocity: float = None):
|
||||||
"""清空注射器"""
|
"""清空注射器"""
|
||||||
await self.set_position(0, velocity)
|
await self.set_position(0, velocity)
|
||||||
|
|
||||||
async def fill_syringe(self, velocity: float = None):
|
async def fill_syringe(self, velocity: float = None):
|
||||||
"""充满注射器"""
|
"""充满注射器"""
|
||||||
await self.set_position(self.max_volume, velocity)
|
await self.set_position(self.max_volume, velocity)
|
||||||
|
|
||||||
async def stop_operation(self):
|
async def stop_operation(self):
|
||||||
"""停止当前操作"""
|
"""停止当前操作"""
|
||||||
self._status = "Idle"
|
self._status = "Idle"
|
||||||
self.logger.info("Operation stopped")
|
self.logger.info("Operation stopped")
|
||||||
|
|
||||||
# 状态查询方法
|
# 状态查询方法
|
||||||
def get_position(self) -> float:
|
def get_position(self) -> float:
|
||||||
"""获取当前位置"""
|
"""获取当前位置"""
|
||||||
return self._position
|
return self._position
|
||||||
|
|
||||||
def get_current_volume(self) -> float:
|
def get_current_volume(self) -> float:
|
||||||
"""获取当前体积"""
|
"""获取当前体积"""
|
||||||
return self._current_volume
|
return self._current_volume
|
||||||
|
|
||||||
def get_remaining_capacity(self) -> float:
|
def get_remaining_capacity(self) -> float:
|
||||||
"""获取剩余容量"""
|
"""获取剩余容量"""
|
||||||
return self.max_volume - self._current_volume
|
return self.max_volume - self._current_volume
|
||||||
|
|
||||||
def is_empty(self) -> bool:
|
def is_empty(self) -> bool:
|
||||||
"""检查是否为空"""
|
"""检查是否为空"""
|
||||||
return self._current_volume <= 0.01 # 允许小量误差
|
return self._current_volume <= 0.01 # 允许小量误差
|
||||||
|
|
||||||
def is_full(self) -> bool:
|
def is_full(self) -> bool:
|
||||||
"""检查是否已满"""
|
"""检查是否已满"""
|
||||||
return self._current_volume >= (self.max_volume - 0.01) # 允许小量误差
|
return self._current_volume >= (self.max_volume - 0.01) # 允许小量误差
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"VirtualTransferPump({self.device_id}: {self._current_volume:.2f}/{self.max_volume} ml, {self._status})"
|
return (
|
||||||
|
f"VirtualTransferPump({self.device_id}: {self._current_volume:.2f}/{self.max_volume} ml, {self._status})"
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|
||||||
@@ -398,20 +410,20 @@ class VirtualTransferPump:
|
|||||||
async def demo():
|
async def demo():
|
||||||
"""虚拟泵使用示例"""
|
"""虚拟泵使用示例"""
|
||||||
pump = VirtualTransferPump("demo_pump", {"max_volume": 50.0})
|
pump = VirtualTransferPump("demo_pump", {"max_volume": 50.0})
|
||||||
|
|
||||||
await pump.initialize()
|
await pump.initialize()
|
||||||
|
|
||||||
print(f"Initial state: {pump}")
|
print(f"Initial state: {pump}")
|
||||||
|
|
||||||
# 测试set_position方法
|
# 测试set_position方法
|
||||||
result = await pump.set_position(10.0, max_velocity=2.0)
|
result = await pump.set_position(10.0, max_velocity=2.0)
|
||||||
print(f"Set position result: {result}")
|
print(f"Set position result: {result}")
|
||||||
print(f"After setting position to 10ml: {pump}")
|
print(f"After setting position to 10ml: {pump}")
|
||||||
|
|
||||||
# 吸液测试
|
# 吸液测试
|
||||||
await pump.aspirate(5.0, velocity=2.0)
|
await pump.aspirate(5.0, velocity=2.0)
|
||||||
print(f"After aspirating 5ml: {pump}")
|
print(f"After aspirating 5ml: {pump}")
|
||||||
|
|
||||||
# 清空测试
|
# 清空测试
|
||||||
result = await pump.set_position(0.0)
|
result = await pump.set_position(0.0)
|
||||||
print(f"Empty result: {result}")
|
print(f"Empty result: {result}")
|
||||||
|
|||||||
742
unilabos/devices/virtual/workbench.py
Normal file
742
unilabos/devices/virtual/workbench.py
Normal file
@@ -0,0 +1,742 @@
|
|||||||
|
"""
|
||||||
|
Virtual Workbench Device - 模拟工作台设备
|
||||||
|
包含:
|
||||||
|
- 1个机械臂 (每次操作3s, 独占锁)
|
||||||
|
- 3个加热台 (每次加热10s, 可并行)
|
||||||
|
|
||||||
|
工作流程:
|
||||||
|
1. A1-A5 物料同时启动,竞争机械臂
|
||||||
|
2. 机械臂将物料移动到空闲加热台
|
||||||
|
3. 加热完成后,机械臂将物料移动到C1-C5
|
||||||
|
|
||||||
|
注意:调用来自线程池,使用 threading.Lock 进行同步
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Dict, Any, Optional, List
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from threading import Lock, RLock
|
||||||
|
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||||
|
from unilabos.utils.decorator import not_action
|
||||||
|
from unilabos.resources.resource_tracker import SampleUUIDsType, LabSample, RETURN_UNILABOS_SAMPLES
|
||||||
|
|
||||||
|
|
||||||
|
# ============ TypedDict 返回类型定义 ============
|
||||||
|
|
||||||
|
|
||||||
|
class MoveToHeatingStationResult(TypedDict):
|
||||||
|
"""move_to_heating_station 返回类型"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
station_id: int
|
||||||
|
material_id: str
|
||||||
|
material_number: int
|
||||||
|
message: str
|
||||||
|
unilabos_samples: List[LabSample]
|
||||||
|
|
||||||
|
|
||||||
|
class StartHeatingResult(TypedDict):
|
||||||
|
"""start_heating 返回类型"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
station_id: int
|
||||||
|
material_id: str
|
||||||
|
material_number: int
|
||||||
|
message: str
|
||||||
|
unilabos_samples: List[LabSample]
|
||||||
|
|
||||||
|
|
||||||
|
class MoveToOutputResult(TypedDict):
|
||||||
|
"""move_to_output 返回类型"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
station_id: int
|
||||||
|
material_id: str
|
||||||
|
unilabos_samples: List[LabSample]
|
||||||
|
|
||||||
|
|
||||||
|
class PrepareMaterialsResult(TypedDict):
|
||||||
|
"""prepare_materials 返回类型 - 批量准备物料"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
count: int
|
||||||
|
material_1: int # 物料编号1
|
||||||
|
material_2: int # 物料编号2
|
||||||
|
material_3: int # 物料编号3
|
||||||
|
material_4: int # 物料编号4
|
||||||
|
material_5: int # 物料编号5
|
||||||
|
message: str
|
||||||
|
unilabos_samples: List[LabSample]
|
||||||
|
|
||||||
|
|
||||||
|
# ============ 状态枚举 ============
|
||||||
|
|
||||||
|
|
||||||
|
class HeatingStationState(Enum):
|
||||||
|
"""加热台状态枚举"""
|
||||||
|
|
||||||
|
IDLE = "idle" # 空闲
|
||||||
|
OCCUPIED = "occupied" # 已放置物料,等待加热
|
||||||
|
HEATING = "heating" # 加热中
|
||||||
|
COMPLETED = "completed" # 加热完成,等待取走
|
||||||
|
|
||||||
|
|
||||||
|
class ArmState(Enum):
|
||||||
|
"""机械臂状态枚举"""
|
||||||
|
|
||||||
|
IDLE = "idle" # 空闲
|
||||||
|
BUSY = "busy" # 工作中
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HeatingStation:
|
||||||
|
"""加热台数据结构"""
|
||||||
|
|
||||||
|
station_id: int
|
||||||
|
state: HeatingStationState = HeatingStationState.IDLE
|
||||||
|
current_material: Optional[str] = None # 当前物料 (如 "A1", "A2")
|
||||||
|
material_number: Optional[int] = None # 物料编号 (1-5)
|
||||||
|
heating_start_time: Optional[float] = None
|
||||||
|
heating_progress: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class VirtualWorkbench:
|
||||||
|
"""
|
||||||
|
Virtual Workbench Device - 虚拟工作台设备
|
||||||
|
|
||||||
|
模拟一个包含1个机械臂和3个加热台的工作站
|
||||||
|
- 机械臂操作耗时3秒,同一时间只能执行一个操作
|
||||||
|
- 加热台加热耗时10秒,3个加热台可并行工作
|
||||||
|
|
||||||
|
工作流:
|
||||||
|
1. 物料A1-A5并发启动(线程池),竞争机械臂使用权
|
||||||
|
2. 获取机械臂后,查找空闲加热台
|
||||||
|
3. 机械臂将物料放入加热台,开始加热
|
||||||
|
4. 加热完成后,机械臂将物料移动到目标位置Cn
|
||||||
|
"""
|
||||||
|
|
||||||
|
_ros_node: BaseROS2DeviceNode
|
||||||
|
|
||||||
|
# 配置常量
|
||||||
|
ARM_OPERATION_TIME: float = 3.0 # 机械臂操作时间(秒)
|
||||||
|
HEATING_TIME: float = 10.0 # 加热时间(秒)
|
||||||
|
NUM_HEATING_STATIONS: int = 3 # 加热台数量
|
||||||
|
|
||||||
|
def __init__(self, device_id: Optional[str] = None, config: Optional[Dict[str, Any]] = None, **kwargs):
|
||||||
|
# 处理可能的不同调用方式
|
||||||
|
if device_id is None and "id" in kwargs:
|
||||||
|
device_id = kwargs.pop("id")
|
||||||
|
if config is None and "config" in kwargs:
|
||||||
|
config = kwargs.pop("config")
|
||||||
|
|
||||||
|
self.device_id = device_id or "virtual_workbench"
|
||||||
|
self.config = config or {}
|
||||||
|
|
||||||
|
self.logger = logging.getLogger(f"VirtualWorkbench.{self.device_id}")
|
||||||
|
self.data: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
# 从config中获取可配置参数
|
||||||
|
self.ARM_OPERATION_TIME = float(self.config.get("arm_operation_time", 3.0))
|
||||||
|
self.HEATING_TIME = float(self.config.get("heating_time", 10.0))
|
||||||
|
self.NUM_HEATING_STATIONS = int(self.config.get("num_heating_stations", 3))
|
||||||
|
|
||||||
|
# 机械臂状态和锁 (使用threading.Lock)
|
||||||
|
self._arm_lock = Lock()
|
||||||
|
self._arm_state = ArmState.IDLE
|
||||||
|
self._arm_current_task: Optional[str] = None
|
||||||
|
|
||||||
|
# 加热台状态 (station_id -> HeatingStation) - 立即初始化,不依赖initialize()
|
||||||
|
self._heating_stations: Dict[int, HeatingStation] = {
|
||||||
|
i: HeatingStation(station_id=i) for i in range(1, self.NUM_HEATING_STATIONS + 1)
|
||||||
|
}
|
||||||
|
self._stations_lock = RLock() # 可重入锁,保护加热台状态
|
||||||
|
|
||||||
|
# 任务追踪
|
||||||
|
self._active_tasks: Dict[str, Dict[str, Any]] = {} # material_id -> task_info
|
||||||
|
self._tasks_lock = Lock()
|
||||||
|
|
||||||
|
# 处理其他kwargs参数
|
||||||
|
skip_keys = {"arm_operation_time", "heating_time", "num_heating_stations"}
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if key not in skip_keys and not hasattr(self, key):
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
self.logger.info(f"=== 虚拟工作台 {self.device_id} 已创建 ===")
|
||||||
|
self.logger.info(
|
||||||
|
f"机械臂操作时间: {self.ARM_OPERATION_TIME}s | "
|
||||||
|
f"加热时间: {self.HEATING_TIME}s | "
|
||||||
|
f"加热台数量: {self.NUM_HEATING_STATIONS}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@not_action
|
||||||
|
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||||
|
"""ROS节点初始化后回调"""
|
||||||
|
self._ros_node = ros_node
|
||||||
|
|
||||||
|
@not_action
|
||||||
|
def initialize(self) -> bool:
|
||||||
|
"""初始化虚拟工作台"""
|
||||||
|
self.logger.info(f"初始化虚拟工作台 {self.device_id}")
|
||||||
|
|
||||||
|
# 重置加热台状态 (已在__init__中创建,这里重置为初始状态)
|
||||||
|
with self._stations_lock:
|
||||||
|
for station in self._heating_stations.values():
|
||||||
|
station.state = HeatingStationState.IDLE
|
||||||
|
station.current_material = None
|
||||||
|
station.material_number = None
|
||||||
|
station.heating_progress = 0.0
|
||||||
|
|
||||||
|
# 初始化状态
|
||||||
|
self.data.update(
|
||||||
|
{
|
||||||
|
"status": "Ready",
|
||||||
|
"arm_state": ArmState.IDLE.value,
|
||||||
|
"arm_current_task": None,
|
||||||
|
"heating_stations": self._get_stations_status(),
|
||||||
|
"active_tasks_count": 0,
|
||||||
|
"message": "工作台就绪",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info(f"工作台初始化完成: {self.NUM_HEATING_STATIONS}个加热台就绪")
|
||||||
|
return True
|
||||||
|
|
||||||
|
@not_action
|
||||||
|
def cleanup(self) -> bool:
|
||||||
|
"""清理虚拟工作台"""
|
||||||
|
self.logger.info(f"清理虚拟工作台 {self.device_id}")
|
||||||
|
|
||||||
|
self._arm_state = ArmState.IDLE
|
||||||
|
self._arm_current_task = None
|
||||||
|
|
||||||
|
with self._stations_lock:
|
||||||
|
self._heating_stations.clear()
|
||||||
|
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks.clear()
|
||||||
|
|
||||||
|
self.data.update(
|
||||||
|
{
|
||||||
|
"status": "Offline",
|
||||||
|
"arm_state": ArmState.IDLE.value,
|
||||||
|
"heating_stations": {},
|
||||||
|
"message": "工作台已关闭",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _get_stations_status(self) -> Dict[int, Dict[str, Any]]:
|
||||||
|
"""获取所有加热台状态"""
|
||||||
|
with self._stations_lock:
|
||||||
|
return {
|
||||||
|
station_id: {
|
||||||
|
"state": station.state.value,
|
||||||
|
"current_material": station.current_material,
|
||||||
|
"material_number": station.material_number,
|
||||||
|
"heating_progress": station.heating_progress,
|
||||||
|
}
|
||||||
|
for station_id, station in self._heating_stations.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
def _update_data_status(self, message: Optional[str] = None):
|
||||||
|
"""更新状态数据"""
|
||||||
|
self.data.update(
|
||||||
|
{
|
||||||
|
"arm_state": self._arm_state.value,
|
||||||
|
"arm_current_task": self._arm_current_task,
|
||||||
|
"heating_stations": self._get_stations_status(),
|
||||||
|
"active_tasks_count": len(self._active_tasks),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if message:
|
||||||
|
self.data["message"] = message
|
||||||
|
|
||||||
|
def _find_available_heating_station(self) -> Optional[int]:
|
||||||
|
"""查找空闲的加热台
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
空闲加热台ID,如果没有则返回None
|
||||||
|
"""
|
||||||
|
with self._stations_lock:
|
||||||
|
for station_id, station in self._heating_stations.items():
|
||||||
|
if station.state == HeatingStationState.IDLE:
|
||||||
|
return station_id
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _acquire_arm(self, task_description: str) -> bool:
|
||||||
|
"""获取机械臂使用权(阻塞直到获取)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_description: 任务描述,用于日志
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
是否成功获取
|
||||||
|
"""
|
||||||
|
self.logger.info(f"[{task_description}] 等待获取机械臂...")
|
||||||
|
|
||||||
|
# 阻塞等待获取锁
|
||||||
|
self._arm_lock.acquire()
|
||||||
|
|
||||||
|
self._arm_state = ArmState.BUSY
|
||||||
|
self._arm_current_task = task_description
|
||||||
|
self._update_data_status(f"机械臂执行: {task_description}")
|
||||||
|
|
||||||
|
self.logger.info(f"[{task_description}] 成功获取机械臂使用权")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _release_arm(self):
|
||||||
|
"""释放机械臂"""
|
||||||
|
task = self._arm_current_task
|
||||||
|
self._arm_state = ArmState.IDLE
|
||||||
|
self._arm_current_task = None
|
||||||
|
self._arm_lock.release()
|
||||||
|
self._update_data_status(f"机械臂已释放 (完成: {task})")
|
||||||
|
self.logger.info(f"机械臂已释放 (完成: {task})")
|
||||||
|
|
||||||
|
def prepare_materials(
|
||||||
|
self,
|
||||||
|
sample_uuids: SampleUUIDsType,
|
||||||
|
count: int = 5,
|
||||||
|
) -> PrepareMaterialsResult:
|
||||||
|
"""
|
||||||
|
批量准备物料 - 虚拟起始节点
|
||||||
|
|
||||||
|
作为工作流的起始节点,生成指定数量的物料编号供后续节点使用。
|
||||||
|
输出5个handle (material_1 ~ material_5),分别对应实验1~5。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
count: 待生成的物料数量,默认5 (生成 A1-A5)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PrepareMaterialsResult: 包含 material_1 ~ material_5 用于传递给 move_to_heating_station
|
||||||
|
"""
|
||||||
|
# 生成物料列表 A1 - A{count}
|
||||||
|
materials = [i for i in range(1, count + 1)]
|
||||||
|
|
||||||
|
self.logger.info(f"[准备物料] 生成 {count} 个物料: " f"A1-A{count} -> material_1~material_{count}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"count": count,
|
||||||
|
"material_1": materials[0] if len(materials) > 0 else 0,
|
||||||
|
"material_2": materials[1] if len(materials) > 1 else 0,
|
||||||
|
"material_3": materials[2] if len(materials) > 2 else 0,
|
||||||
|
"material_4": materials[3] if len(materials) > 3 else 0,
|
||||||
|
"material_5": materials[4] if len(materials) > 4 else 0,
|
||||||
|
"message": f"已准备 {count} 个物料: A1-A{count}",
|
||||||
|
"unilabos_samples": [LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
def move_to_heating_station(
|
||||||
|
self,
|
||||||
|
sample_uuids: SampleUUIDsType,
|
||||||
|
material_number: int,
|
||||||
|
) -> MoveToHeatingStationResult:
|
||||||
|
"""
|
||||||
|
将物料从An位置移动到加热台
|
||||||
|
|
||||||
|
多线程并发调用时,会竞争机械臂使用权,并自动查找空闲加热台
|
||||||
|
|
||||||
|
Args:
|
||||||
|
material_number: 物料编号 (1-5)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MoveToHeatingStationResult: 包含 station_id, material_number 等用于传递给下一个节点
|
||||||
|
"""
|
||||||
|
# 根据物料编号生成物料ID
|
||||||
|
material_id = f"A{material_number}"
|
||||||
|
task_desc = f"移动{material_id}到加热台"
|
||||||
|
self.logger.info(f"[任务] {task_desc} - 开始执行")
|
||||||
|
|
||||||
|
# 记录任务
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks[material_id] = {
|
||||||
|
"status": "waiting_for_arm",
|
||||||
|
"start_time": time.time(),
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 步骤1: 等待获取机械臂使用权(竞争)
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks[material_id]["status"] = "waiting_for_arm"
|
||||||
|
self._acquire_arm(task_desc)
|
||||||
|
|
||||||
|
# 步骤2: 查找空闲加热台
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks[material_id]["status"] = "finding_station"
|
||||||
|
station_id = None
|
||||||
|
|
||||||
|
# 循环等待直到找到空闲加热台
|
||||||
|
while station_id is None:
|
||||||
|
station_id = self._find_available_heating_station()
|
||||||
|
if station_id is None:
|
||||||
|
self.logger.info(f"[{material_id}] 没有空闲加热台,等待中...")
|
||||||
|
# 释放机械臂,等待后重试
|
||||||
|
self._release_arm()
|
||||||
|
time.sleep(0.5)
|
||||||
|
self._acquire_arm(task_desc)
|
||||||
|
|
||||||
|
# 步骤3: 占用加热台 - 立即标记为OCCUPIED,防止其他任务选择同一加热台
|
||||||
|
with self._stations_lock:
|
||||||
|
self._heating_stations[station_id].state = HeatingStationState.OCCUPIED
|
||||||
|
self._heating_stations[station_id].current_material = material_id
|
||||||
|
self._heating_stations[station_id].material_number = material_number
|
||||||
|
|
||||||
|
# 步骤4: 模拟机械臂移动操作 (3秒)
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks[material_id]["status"] = "arm_moving"
|
||||||
|
self._active_tasks[material_id]["assigned_station"] = station_id
|
||||||
|
self.logger.info(f"[{material_id}] 机械臂正在移动到加热台{station_id}...")
|
||||||
|
|
||||||
|
time.sleep(self.ARM_OPERATION_TIME)
|
||||||
|
|
||||||
|
# 步骤5: 放入加热台完成
|
||||||
|
self._update_data_status(f"{material_id}已放入加热台{station_id}")
|
||||||
|
self.logger.info(f"[{material_id}] 已放入加热台{station_id} (用时{self.ARM_OPERATION_TIME}s)")
|
||||||
|
|
||||||
|
# 释放机械臂
|
||||||
|
self._release_arm()
|
||||||
|
|
||||||
|
with self._tasks_lock:
|
||||||
|
self._active_tasks[material_id]["status"] = "placed_on_station"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": material_id,
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"{material_id}已成功移动到加热台{station_id}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"[{material_id}] 移动失败: {str(e)}")
|
||||||
|
if self._arm_lock.locked():
|
||||||
|
self._release_arm()
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": -1,
|
||||||
|
"material_id": material_id,
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"移动失败: {str(e)}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
def start_heating(
|
||||||
|
self,
|
||||||
|
sample_uuids: SampleUUIDsType,
|
||||||
|
station_id: int,
|
||||||
|
material_number: int,
|
||||||
|
) -> StartHeatingResult:
|
||||||
|
"""
|
||||||
|
启动指定加热台的加热程序
|
||||||
|
|
||||||
|
Args:
|
||||||
|
station_id: 加热台ID (1-3),从 move_to_heating_station 的 handle 传入
|
||||||
|
material_number: 物料编号,从 move_to_heating_station 的 handle 传入
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
StartHeatingResult: 包含 station_id, material_number 等用于传递给下一个节点
|
||||||
|
"""
|
||||||
|
self.logger.info(f"[加热台{station_id}] 开始加热")
|
||||||
|
|
||||||
|
if station_id not in self._heating_stations:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": "",
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"无效的加热台ID: {station_id}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations[station_id]
|
||||||
|
|
||||||
|
if station.current_material is None:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": "",
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"加热台{station_id}上没有物料",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
if station.state == HeatingStationState.HEATING:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": station.current_material,
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"加热台{station_id}已经在加热中",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
material_id = station.current_material
|
||||||
|
|
||||||
|
# 开始加热
|
||||||
|
station.state = HeatingStationState.HEATING
|
||||||
|
station.heating_start_time = time.time()
|
||||||
|
station.heating_progress = 0.0
|
||||||
|
|
||||||
|
with self._tasks_lock:
|
||||||
|
if material_id in self._active_tasks:
|
||||||
|
self._active_tasks[material_id]["status"] = "heating"
|
||||||
|
|
||||||
|
self._update_data_status(f"加热台{station_id}开始加热{material_id}")
|
||||||
|
|
||||||
|
# 模拟加热过程 (10秒)
|
||||||
|
start_time = time.time()
|
||||||
|
while True:
|
||||||
|
elapsed = time.time() - start_time
|
||||||
|
progress = min(100.0, (elapsed / self.HEATING_TIME) * 100)
|
||||||
|
|
||||||
|
with self._stations_lock:
|
||||||
|
self._heating_stations[station_id].heating_progress = progress
|
||||||
|
|
||||||
|
self._update_data_status(f"加热台{station_id}加热中: {progress:.1f}%")
|
||||||
|
|
||||||
|
if elapsed >= self.HEATING_TIME:
|
||||||
|
break
|
||||||
|
|
||||||
|
time.sleep(1.0)
|
||||||
|
|
||||||
|
# 加热完成
|
||||||
|
with self._stations_lock:
|
||||||
|
self._heating_stations[station_id].state = HeatingStationState.COMPLETED
|
||||||
|
self._heating_stations[station_id].heating_progress = 100.0
|
||||||
|
|
||||||
|
with self._tasks_lock:
|
||||||
|
if material_id in self._active_tasks:
|
||||||
|
self._active_tasks[material_id]["status"] = "heating_completed"
|
||||||
|
|
||||||
|
self._update_data_status(f"加热台{station_id}加热完成")
|
||||||
|
self.logger.info(f"[加热台{station_id}] {material_id}加热完成 (用时{self.HEATING_TIME}s)")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": material_id,
|
||||||
|
"material_number": material_number,
|
||||||
|
"message": f"加热台{station_id}加热完成",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
def move_to_output(
|
||||||
|
self,
|
||||||
|
sample_uuids: SampleUUIDsType,
|
||||||
|
station_id: int,
|
||||||
|
material_number: int,
|
||||||
|
) -> MoveToOutputResult:
|
||||||
|
"""
|
||||||
|
将物料从加热台移动到输出位置Cn
|
||||||
|
|
||||||
|
Args:
|
||||||
|
station_id: 加热台ID (1-3),从 start_heating 的 handle 传入
|
||||||
|
material_number: 物料编号,从 start_heating 的 handle 传入,用于确定输出位置 Cn
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MoveToOutputResult: 包含执行结果
|
||||||
|
"""
|
||||||
|
output_number = material_number # 物料编号决定输出位置
|
||||||
|
|
||||||
|
if station_id not in self._heating_stations:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": "",
|
||||||
|
"output_position": f"C{output_number}",
|
||||||
|
"message": f"无效的加热台ID: {station_id}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations[station_id]
|
||||||
|
material_id = station.current_material
|
||||||
|
|
||||||
|
if material_id is None:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": "",
|
||||||
|
"output_position": f"C{output_number}",
|
||||||
|
"message": f"加热台{station_id}上没有物料",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
if station.state != HeatingStationState.COMPLETED:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": material_id,
|
||||||
|
"output_position": f"C{output_number}",
|
||||||
|
"message": f"加热台{station_id}尚未完成加热 (当前状态: {station.state.value})",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
output_position = f"C{output_number}"
|
||||||
|
task_desc = f"从加热台{station_id}移动{material_id}到{output_position}"
|
||||||
|
self.logger.info(f"[任务] {task_desc}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with self._tasks_lock:
|
||||||
|
if material_id in self._active_tasks:
|
||||||
|
self._active_tasks[material_id]["status"] = "waiting_for_arm_output"
|
||||||
|
|
||||||
|
# 获取机械臂
|
||||||
|
self._acquire_arm(task_desc)
|
||||||
|
|
||||||
|
with self._tasks_lock:
|
||||||
|
if material_id in self._active_tasks:
|
||||||
|
self._active_tasks[material_id]["status"] = "arm_moving_to_output"
|
||||||
|
|
||||||
|
# 模拟机械臂操作 (3秒)
|
||||||
|
self.logger.info(f"[{material_id}] 机械臂正在从加热台{station_id}取出并移动到{output_position}...")
|
||||||
|
time.sleep(self.ARM_OPERATION_TIME)
|
||||||
|
|
||||||
|
# 清空加热台
|
||||||
|
with self._stations_lock:
|
||||||
|
self._heating_stations[station_id].state = HeatingStationState.IDLE
|
||||||
|
self._heating_stations[station_id].current_material = None
|
||||||
|
self._heating_stations[station_id].material_number = None
|
||||||
|
self._heating_stations[station_id].heating_progress = 0.0
|
||||||
|
self._heating_stations[station_id].heating_start_time = None
|
||||||
|
|
||||||
|
# 释放机械臂
|
||||||
|
self._release_arm()
|
||||||
|
|
||||||
|
# 任务完成
|
||||||
|
with self._tasks_lock:
|
||||||
|
if material_id in self._active_tasks:
|
||||||
|
self._active_tasks[material_id]["status"] = "completed"
|
||||||
|
self._active_tasks[material_id]["end_time"] = time.time()
|
||||||
|
|
||||||
|
self._update_data_status(f"{material_id}已移动到{output_position}")
|
||||||
|
self.logger.info(f"[{material_id}] 已成功移动到{output_position} (用时{self.ARM_OPERATION_TIME}s)")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": material_id,
|
||||||
|
"output_position": output_position,
|
||||||
|
"message": f"{material_id}已成功移动到{output_position}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"移动到输出位置失败: {str(e)}")
|
||||||
|
if self._arm_lock.locked():
|
||||||
|
self._release_arm()
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"station_id": station_id,
|
||||||
|
"material_id": "",
|
||||||
|
"output_position": output_position,
|
||||||
|
"message": f"移动失败: {str(e)}",
|
||||||
|
"unilabos_samples": [
|
||||||
|
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
|
||||||
|
sample_uuid, content in sample_uuids.items()]
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============ 状态属性 ============
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> str:
|
||||||
|
return self.data.get("status", "Unknown")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arm_state(self) -> str:
|
||||||
|
return self._arm_state.value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arm_current_task(self) -> str:
|
||||||
|
return self._arm_current_task or ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_1_state(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(1)
|
||||||
|
return station.state.value if station else "unknown"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_1_material(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(1)
|
||||||
|
return station.current_material or "" if station else ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_1_progress(self) -> float:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(1)
|
||||||
|
return station.heating_progress if station else 0.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_2_state(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(2)
|
||||||
|
return station.state.value if station else "unknown"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_2_material(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(2)
|
||||||
|
return station.current_material or "" if station else ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_2_progress(self) -> float:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(2)
|
||||||
|
return station.heating_progress if station else 0.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_3_state(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(3)
|
||||||
|
return station.state.value if station else "unknown"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_3_material(self) -> str:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(3)
|
||||||
|
return station.current_material or "" if station else ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def heating_station_3_progress(self) -> float:
|
||||||
|
with self._stations_lock:
|
||||||
|
station = self._heating_stations.get(3)
|
||||||
|
return station.heating_progress if station else 0.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def active_tasks_count(self) -> int:
|
||||||
|
with self._tasks_lock:
|
||||||
|
return len(self._active_tasks)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def message(self) -> str:
|
||||||
|
return self.data.get("message", "")
|
||||||
0
unilabos/devices/xrd_d7mate/__init__.py
Normal file
0
unilabos/devices/xrd_d7mate/__init__.py
Normal file
0
unilabos/devices/zhida_hplc/__init__.py
Normal file
0
unilabos/devices/zhida_hplc/__init__.py
Normal file
@@ -638,7 +638,7 @@ liquid_handler:
|
|||||||
placeholder_keys: {}
|
placeholder_keys: {}
|
||||||
result: {}
|
result: {}
|
||||||
schema:
|
schema:
|
||||||
description: 吸头迭代函数。用于自动管理和切换吸头架中的吸头,实现批量实验中的吸头自动分配和追踪。该函数监控吸头使用状态,自动切换到下一个可用吸头位置,确保实验流程的连续性。适用于高通量实验、批量处理、自动化流水线等需要大量吸头管理的应用场景。
|
description: 吸头迭代函数。用于自动管理和切换枪头盒中的吸头,实现批量实验中的吸头自动分配和追踪。该函数监控吸头使用状态,自动切换到下一个可用吸头位置,确保实验流程的连续性。适用于高通量实验、批量处理、自动化流水线等需要大量吸头管理的应用场景。
|
||||||
properties:
|
properties:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -712,6 +712,43 @@ liquid_handler:
|
|||||||
title: set_group参数
|
title: set_group参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
|
auto-set_liquid_from_plate:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
liquid_names: null
|
||||||
|
plate: null
|
||||||
|
volumes: null
|
||||||
|
well_names: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
liquid_names:
|
||||||
|
type: string
|
||||||
|
plate:
|
||||||
|
type: string
|
||||||
|
volumes:
|
||||||
|
type: string
|
||||||
|
well_names:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- plate
|
||||||
|
- well_names
|
||||||
|
- liquid_names
|
||||||
|
- volumes
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: set_liquid_from_plate参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
auto-set_tiprack:
|
auto-set_tiprack:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
@@ -721,7 +758,7 @@ liquid_handler:
|
|||||||
placeholder_keys: {}
|
placeholder_keys: {}
|
||||||
result: {}
|
result: {}
|
||||||
schema:
|
schema:
|
||||||
description: 吸头架设置函数。用于配置和初始化液体处理系统的吸头架信息,包括吸头架位置、类型、容量等参数。该函数建立吸头资源管理系统,为后续的吸头选择和使用提供基础配置。适用于系统初始化、吸头架更换、实验配置等需要吸头资源管理的操作场景。
|
description: 枪头盒设置函数。用于配置和初始化液体处理系统的枪头盒信息,包括枪头盒位置、类型、容量等参数。该函数建立吸头资源管理系统,为后续的吸头选择和使用提供基础配置。适用于系统初始化、枪头盒更换、实验配置等需要吸头资源管理的操作场景。
|
||||||
properties:
|
properties:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -4093,32 +4130,43 @@ liquid_handler:
|
|||||||
- 0
|
- 0
|
||||||
handles:
|
handles:
|
||||||
input:
|
input:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources
|
handler_key: sources
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets
|
handler_key: targets
|
||||||
label: targets
|
label: targets
|
||||||
- data_key: liquid
|
- data_key: tip_racks
|
||||||
data_source: executor
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: tip_racks
|
||||||
|
label: tip_racks
|
||||||
|
output:
|
||||||
|
- data_key: sources
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: targets
|
||||||
|
label: 转移目标
|
||||||
|
- data_key: tip_racks
|
||||||
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: tip_rack
|
handler_key: tip_rack
|
||||||
label: tip_rack
|
label: 枪头盒
|
||||||
output:
|
output:
|
||||||
- data_key: liquid
|
- data_key: sources.@flatten
|
||||||
data_source: handle
|
data_source: executor
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources_out
|
handler_key: sources_out
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets_out
|
handler_key: targets_out
|
||||||
label: targets
|
label: 移液后目标孔
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
sources: unilabos_resources
|
sources: unilabos_resources
|
||||||
targets: unilabos_resources
|
targets: unilabos_resources
|
||||||
@@ -4764,13 +4812,13 @@ liquid_handler.biomek:
|
|||||||
targets: ''
|
targets: ''
|
||||||
handles:
|
handles:
|
||||||
input:
|
input:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources
|
handler_key: sources
|
||||||
label: sources
|
label: sources
|
||||||
output:
|
output:
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets
|
handler_key: targets
|
||||||
@@ -4923,29 +4971,29 @@ liquid_handler.biomek:
|
|||||||
volume: 0.0
|
volume: 0.0
|
||||||
handles:
|
handles:
|
||||||
input:
|
input:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources
|
handler_key: sources
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets
|
handler_key: targets
|
||||||
label: targets
|
label: targets
|
||||||
- data_key: liquid
|
- data_key: tip_racks
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: tip_rack
|
handler_key: tip_racks
|
||||||
label: tip_rack
|
label: tip_racks
|
||||||
output:
|
output:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources_out
|
handler_key: sources_out
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets_out
|
handler_key: targets_out
|
||||||
label: targets
|
label: targets
|
||||||
@@ -5114,19 +5162,32 @@ liquid_handler.biomek:
|
|||||||
- 0
|
- 0
|
||||||
handles:
|
handles:
|
||||||
input:
|
input:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: liquid-input
|
handler_key: sources
|
||||||
io_type: target
|
label: sources
|
||||||
label: Liquid Input
|
- data_key: targets
|
||||||
output:
|
data_source: handle
|
||||||
- data_key: liquid
|
|
||||||
data_source: executor
|
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: liquid-output
|
handler_key: targets
|
||||||
io_type: source
|
label: targets
|
||||||
label: Liquid Output
|
- data_key: tip_racks
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: tip_racks
|
||||||
|
label: tip_racks
|
||||||
|
output:
|
||||||
|
- data_key: sources
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: sources_out
|
||||||
|
label: sources
|
||||||
|
- data_key: targets
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: targets_out
|
||||||
|
label: targets
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
sources: unilabos_resources
|
sources: unilabos_resources
|
||||||
targets: unilabos_resources
|
targets: unilabos_resources
|
||||||
@@ -7604,6 +7665,43 @@ liquid_handler.prcxi:
|
|||||||
title: iter_tips参数
|
title: iter_tips参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
|
auto-magnetic_action:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
height: null
|
||||||
|
is_wait: null
|
||||||
|
module_no: null
|
||||||
|
time: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
height:
|
||||||
|
type: integer
|
||||||
|
is_wait:
|
||||||
|
type: boolean
|
||||||
|
module_no:
|
||||||
|
type: integer
|
||||||
|
time:
|
||||||
|
type: integer
|
||||||
|
required:
|
||||||
|
- time
|
||||||
|
- module_no
|
||||||
|
- height
|
||||||
|
- is_wait
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: magnetic_action参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommandAsync
|
||||||
auto-move_to:
|
auto-move_to:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
@@ -7637,6 +7735,31 @@ liquid_handler.prcxi:
|
|||||||
title: move_to参数
|
title: move_to参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommandAsync
|
type: UniLabJsonCommandAsync
|
||||||
|
auto-plr_pos_to_prcxi:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
resource: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
resource:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- resource
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: plr_pos_to_prcxi参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
auto-post_init:
|
auto-post_init:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
@@ -7757,6 +7880,47 @@ liquid_handler.prcxi:
|
|||||||
title: shaker_action参数
|
title: shaker_action参数
|
||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommandAsync
|
type: UniLabJsonCommandAsync
|
||||||
|
auto-shaking_incubation_action:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
amplitude: null
|
||||||
|
is_wait: null
|
||||||
|
module_no: null
|
||||||
|
temperature: null
|
||||||
|
time: null
|
||||||
|
handles: {}
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
amplitude:
|
||||||
|
type: integer
|
||||||
|
is_wait:
|
||||||
|
type: boolean
|
||||||
|
module_no:
|
||||||
|
type: integer
|
||||||
|
temperature:
|
||||||
|
type: integer
|
||||||
|
time:
|
||||||
|
type: integer
|
||||||
|
required:
|
||||||
|
- time
|
||||||
|
- module_no
|
||||||
|
- amplitude
|
||||||
|
- is_wait
|
||||||
|
- temperature
|
||||||
|
type: object
|
||||||
|
result: {}
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: shaking_incubation_action参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommandAsync
|
||||||
auto-touch_tip:
|
auto-touch_tip:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
@@ -8491,7 +8655,19 @@ liquid_handler.prcxi:
|
|||||||
z: 0.0
|
z: 0.0
|
||||||
sample_id: ''
|
sample_id: ''
|
||||||
type: ''
|
type: ''
|
||||||
handles: {}
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: plate
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: plate
|
||||||
|
label: plate
|
||||||
|
output:
|
||||||
|
- data_key: plate
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: plate
|
||||||
|
label: plate
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
plate: unilabos_resources
|
plate: unilabos_resources
|
||||||
to: unilabos_resources
|
to: unilabos_resources
|
||||||
@@ -9278,7 +9454,19 @@ liquid_handler.prcxi:
|
|||||||
z: 0.0
|
z: 0.0
|
||||||
sample_id: ''
|
sample_id: ''
|
||||||
type: ''
|
type: ''
|
||||||
handles: {}
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: wells
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: input_wells
|
||||||
|
label: 待设定液体孔
|
||||||
|
output:
|
||||||
|
- data_key: wells.@flatten
|
||||||
|
data_source: executor
|
||||||
|
data_type: resource
|
||||||
|
handler_key: output_wells
|
||||||
|
label: 已设定液体孔
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
wells: unilabos_resources
|
wells: unilabos_resources
|
||||||
result: {}
|
result: {}
|
||||||
@@ -9394,6 +9582,352 @@ liquid_handler.prcxi:
|
|||||||
title: LiquidHandlerSetLiquid
|
title: LiquidHandlerSetLiquid
|
||||||
type: object
|
type: object
|
||||||
type: LiquidHandlerSetLiquid
|
type: LiquidHandlerSetLiquid
|
||||||
|
set_liquid_from_plate:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
liquid_names: null
|
||||||
|
plate: null
|
||||||
|
volumes: null
|
||||||
|
well_names: null
|
||||||
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: '@this.0@@@plate'
|
||||||
|
data_source: handle
|
||||||
|
data_type: resource
|
||||||
|
handler_key: input_plate
|
||||||
|
label: 待设定液体板
|
||||||
|
output:
|
||||||
|
- data_key: plate.@flatten
|
||||||
|
data_source: executor
|
||||||
|
data_type: resource
|
||||||
|
handler_key: output_plate
|
||||||
|
label: 已设定液体板
|
||||||
|
- data_key: wells.@flatten
|
||||||
|
data_source: executor
|
||||||
|
data_type: resource
|
||||||
|
handler_key: output_wells
|
||||||
|
label: 已设定液体孔
|
||||||
|
- data_key: volumes
|
||||||
|
data_source: executor
|
||||||
|
data_type: number_array
|
||||||
|
handler_key: output_volumes
|
||||||
|
label: 各孔设定体积
|
||||||
|
placeholder_keys:
|
||||||
|
plate: unilabos_resources
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: ''
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
liquid_names:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
plate:
|
||||||
|
properties:
|
||||||
|
category:
|
||||||
|
type: string
|
||||||
|
children:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
config:
|
||||||
|
type: string
|
||||||
|
data:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
parent:
|
||||||
|
type: string
|
||||||
|
pose:
|
||||||
|
properties:
|
||||||
|
orientation:
|
||||||
|
properties:
|
||||||
|
w:
|
||||||
|
type: number
|
||||||
|
x:
|
||||||
|
type: number
|
||||||
|
y:
|
||||||
|
type: number
|
||||||
|
z:
|
||||||
|
type: number
|
||||||
|
required:
|
||||||
|
- x
|
||||||
|
- y
|
||||||
|
- z
|
||||||
|
- w
|
||||||
|
title: orientation
|
||||||
|
type: object
|
||||||
|
position:
|
||||||
|
properties:
|
||||||
|
x:
|
||||||
|
type: number
|
||||||
|
y:
|
||||||
|
type: number
|
||||||
|
z:
|
||||||
|
type: number
|
||||||
|
required:
|
||||||
|
- x
|
||||||
|
- y
|
||||||
|
- z
|
||||||
|
title: position
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- position
|
||||||
|
- orientation
|
||||||
|
title: pose
|
||||||
|
type: object
|
||||||
|
sample_id:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- name
|
||||||
|
- sample_id
|
||||||
|
- children
|
||||||
|
- parent
|
||||||
|
- type
|
||||||
|
- category
|
||||||
|
- pose
|
||||||
|
- config
|
||||||
|
- data
|
||||||
|
title: plate
|
||||||
|
type: object
|
||||||
|
volumes:
|
||||||
|
items:
|
||||||
|
type: number
|
||||||
|
type: array
|
||||||
|
well_names:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- plate
|
||||||
|
- well_names
|
||||||
|
- liquid_names
|
||||||
|
- volumes
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
$defs:
|
||||||
|
ResourceDict:
|
||||||
|
properties:
|
||||||
|
class:
|
||||||
|
description: Resource class name
|
||||||
|
title: Class
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
additionalProperties: true
|
||||||
|
description: Resource configuration
|
||||||
|
title: Config
|
||||||
|
type: object
|
||||||
|
data:
|
||||||
|
additionalProperties: true
|
||||||
|
description: 'Resource data, eg: container liquid data'
|
||||||
|
title: Data
|
||||||
|
type: object
|
||||||
|
description:
|
||||||
|
default: ''
|
||||||
|
description: Resource description
|
||||||
|
title: Description
|
||||||
|
type: string
|
||||||
|
extra:
|
||||||
|
additionalProperties: true
|
||||||
|
description: 'Extra data, eg: slot index'
|
||||||
|
title: Extra
|
||||||
|
type: object
|
||||||
|
icon:
|
||||||
|
default: ''
|
||||||
|
description: Resource icon
|
||||||
|
title: Icon
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
description: Resource ID
|
||||||
|
title: Id
|
||||||
|
type: string
|
||||||
|
model:
|
||||||
|
additionalProperties: true
|
||||||
|
description: Resource model
|
||||||
|
title: Model
|
||||||
|
type: object
|
||||||
|
name:
|
||||||
|
description: Resource name
|
||||||
|
title: Name
|
||||||
|
type: string
|
||||||
|
parent:
|
||||||
|
anyOf:
|
||||||
|
- $ref: '#/$defs/ResourceDict'
|
||||||
|
- type: 'null'
|
||||||
|
default: null
|
||||||
|
description: Parent resource object
|
||||||
|
parent_uuid:
|
||||||
|
anyOf:
|
||||||
|
- type: string
|
||||||
|
- type: 'null'
|
||||||
|
default: null
|
||||||
|
description: Parent resource uuid
|
||||||
|
title: Parent Uuid
|
||||||
|
pose:
|
||||||
|
$ref: '#/$defs/ResourceDictPosition'
|
||||||
|
description: Resource position
|
||||||
|
schema:
|
||||||
|
additionalProperties: true
|
||||||
|
description: Resource schema
|
||||||
|
title: Schema
|
||||||
|
type: object
|
||||||
|
type:
|
||||||
|
anyOf:
|
||||||
|
- const: device
|
||||||
|
type: string
|
||||||
|
- type: string
|
||||||
|
description: Resource type
|
||||||
|
title: Type
|
||||||
|
uuid:
|
||||||
|
description: Resource UUID
|
||||||
|
title: Uuid
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- uuid
|
||||||
|
- name
|
||||||
|
- type
|
||||||
|
- class
|
||||||
|
- config
|
||||||
|
- data
|
||||||
|
- extra
|
||||||
|
title: ResourceDict
|
||||||
|
type: object
|
||||||
|
ResourceDictPosition:
|
||||||
|
properties:
|
||||||
|
cross_section_type:
|
||||||
|
default: rectangle
|
||||||
|
description: Cross section type
|
||||||
|
enum:
|
||||||
|
- rectangle
|
||||||
|
- circle
|
||||||
|
- rounded_rectangle
|
||||||
|
title: Cross Section Type
|
||||||
|
type: string
|
||||||
|
layout:
|
||||||
|
default: x-y
|
||||||
|
description: Resource layout
|
||||||
|
enum:
|
||||||
|
- 2d
|
||||||
|
- x-y
|
||||||
|
- z-y
|
||||||
|
- x-z
|
||||||
|
title: Layout
|
||||||
|
type: string
|
||||||
|
position:
|
||||||
|
$ref: '#/$defs/ResourceDictPositionObject'
|
||||||
|
description: Resource position
|
||||||
|
position3d:
|
||||||
|
$ref: '#/$defs/ResourceDictPositionObject'
|
||||||
|
description: Resource position in 3D space
|
||||||
|
rotation:
|
||||||
|
$ref: '#/$defs/ResourceDictPositionObject'
|
||||||
|
description: Resource rotation
|
||||||
|
scale:
|
||||||
|
$ref: '#/$defs/ResourceDictPositionScale'
|
||||||
|
description: Resource scale
|
||||||
|
size:
|
||||||
|
$ref: '#/$defs/ResourceDictPositionSize'
|
||||||
|
description: Resource size
|
||||||
|
title: ResourceDictPosition
|
||||||
|
type: object
|
||||||
|
ResourceDictPositionObject:
|
||||||
|
properties:
|
||||||
|
x:
|
||||||
|
default: 0.0
|
||||||
|
description: X coordinate
|
||||||
|
title: X
|
||||||
|
type: number
|
||||||
|
y:
|
||||||
|
default: 0.0
|
||||||
|
description: Y coordinate
|
||||||
|
title: Y
|
||||||
|
type: number
|
||||||
|
z:
|
||||||
|
default: 0.0
|
||||||
|
description: Z coordinate
|
||||||
|
title: Z
|
||||||
|
type: number
|
||||||
|
title: ResourceDictPositionObject
|
||||||
|
type: object
|
||||||
|
ResourceDictPositionScale:
|
||||||
|
properties:
|
||||||
|
x:
|
||||||
|
default: 0.0
|
||||||
|
description: x scale
|
||||||
|
title: X
|
||||||
|
type: number
|
||||||
|
y:
|
||||||
|
default: 0.0
|
||||||
|
description: y scale
|
||||||
|
title: Y
|
||||||
|
type: number
|
||||||
|
z:
|
||||||
|
default: 0.0
|
||||||
|
description: z scale
|
||||||
|
title: Z
|
||||||
|
type: number
|
||||||
|
title: ResourceDictPositionScale
|
||||||
|
type: object
|
||||||
|
ResourceDictPositionSize:
|
||||||
|
properties:
|
||||||
|
depth:
|
||||||
|
default: 0.0
|
||||||
|
description: Depth
|
||||||
|
title: Depth
|
||||||
|
type: number
|
||||||
|
height:
|
||||||
|
default: 0.0
|
||||||
|
description: Height
|
||||||
|
title: Height
|
||||||
|
type: number
|
||||||
|
width:
|
||||||
|
default: 0.0
|
||||||
|
description: Width
|
||||||
|
title: Width
|
||||||
|
type: number
|
||||||
|
title: ResourceDictPositionSize
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
plate:
|
||||||
|
items:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/ResourceDict'
|
||||||
|
type: array
|
||||||
|
title: Plate
|
||||||
|
type: array
|
||||||
|
volumes:
|
||||||
|
items:
|
||||||
|
type: number
|
||||||
|
title: Volumes
|
||||||
|
type: array
|
||||||
|
wells:
|
||||||
|
items:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/ResourceDict'
|
||||||
|
type: array
|
||||||
|
title: Wells
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- plate
|
||||||
|
- wells
|
||||||
|
- volumes
|
||||||
|
title: SetLiquidFromPlateReturn
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: set_liquid_from_plate参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
set_tiprack:
|
set_tiprack:
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal:
|
goal:
|
||||||
@@ -9739,32 +10273,32 @@ liquid_handler.prcxi:
|
|||||||
- 0
|
- 0
|
||||||
handles:
|
handles:
|
||||||
input:
|
input:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources
|
handler_key: sources
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets
|
handler_key: targets
|
||||||
label: targets
|
label: targets
|
||||||
- data_key: liquid
|
- data_key: tip_racks
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: tip_rack
|
handler_key: tip_racks
|
||||||
label: tip_rack
|
label: tip_racks
|
||||||
output:
|
output:
|
||||||
- data_key: liquid
|
- data_key: sources
|
||||||
data_source: handle
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: sources_out
|
handler_key: sources_out
|
||||||
label: sources
|
label: sources
|
||||||
- data_key: liquid
|
- data_key: targets
|
||||||
data_source: executor
|
data_source: handle
|
||||||
data_type: resource
|
data_type: resource
|
||||||
handler_key: targets_out
|
handler_key: targets_out
|
||||||
label: targets
|
label: 移液后目标孔
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
sources: unilabos_resources
|
sources: unilabos_resources
|
||||||
targets: unilabos_resources
|
targets: unilabos_resources
|
||||||
@@ -10145,6 +10679,12 @@ liquid_handler.prcxi:
|
|||||||
type: string
|
type: string
|
||||||
deck:
|
deck:
|
||||||
type: object
|
type: object
|
||||||
|
deck_y:
|
||||||
|
default: 400
|
||||||
|
type: string
|
||||||
|
deck_z:
|
||||||
|
default: 300
|
||||||
|
type: string
|
||||||
host:
|
host:
|
||||||
type: string
|
type: string
|
||||||
is_9320:
|
is_9320:
|
||||||
@@ -10155,17 +10695,44 @@ liquid_handler.prcxi:
|
|||||||
type: string
|
type: string
|
||||||
port:
|
port:
|
||||||
type: integer
|
type: integer
|
||||||
|
rail_interval:
|
||||||
|
default: 0
|
||||||
|
type: string
|
||||||
|
rail_nums:
|
||||||
|
default: 4
|
||||||
|
type: string
|
||||||
|
rail_width:
|
||||||
|
default: 27.5
|
||||||
|
type: string
|
||||||
setup:
|
setup:
|
||||||
default: true
|
default: true
|
||||||
type: string
|
type: string
|
||||||
simulator:
|
simulator:
|
||||||
default: false
|
default: false
|
||||||
type: string
|
type: string
|
||||||
|
start_rail:
|
||||||
|
default: 2
|
||||||
|
type: string
|
||||||
step_mode:
|
step_mode:
|
||||||
default: false
|
default: false
|
||||||
type: string
|
type: string
|
||||||
timeout:
|
timeout:
|
||||||
type: number
|
type: number
|
||||||
|
x_increase:
|
||||||
|
default: -0.003636
|
||||||
|
type: string
|
||||||
|
x_offset:
|
||||||
|
default: -0.8
|
||||||
|
type: string
|
||||||
|
xy_coupling:
|
||||||
|
default: -0.0045
|
||||||
|
type: string
|
||||||
|
y_increase:
|
||||||
|
default: -0.003636
|
||||||
|
type: string
|
||||||
|
y_offset:
|
||||||
|
default: -37.98
|
||||||
|
type: string
|
||||||
required:
|
required:
|
||||||
- deck
|
- deck
|
||||||
- host
|
- host
|
||||||
|
|||||||
@@ -5792,3 +5792,481 @@ virtual_vacuum_pump:
|
|||||||
- status
|
- status
|
||||||
type: object
|
type: object
|
||||||
version: 1.0.0
|
version: 1.0.0
|
||||||
|
virtual_workbench:
|
||||||
|
category:
|
||||||
|
- virtual_device
|
||||||
|
class:
|
||||||
|
action_value_mappings:
|
||||||
|
auto-move_to_heating_station:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
material_number: null
|
||||||
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: material_number
|
||||||
|
data_source: handle
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: material_input
|
||||||
|
label: 物料编号
|
||||||
|
output:
|
||||||
|
- data_key: station_id
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_station
|
||||||
|
handler_key: heating_station_output
|
||||||
|
label: 加热台ID
|
||||||
|
- data_key: material_number
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: material_number_output
|
||||||
|
label: 物料编号
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: 将物料从An位置移动到空闲加热台,返回分配的加热台ID
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
material_number:
|
||||||
|
description: 物料编号,1-5,物料ID自动生成为A{n}
|
||||||
|
type: integer
|
||||||
|
required:
|
||||||
|
- material_number
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
$defs:
|
||||||
|
LabSample:
|
||||||
|
properties:
|
||||||
|
extra:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Extra
|
||||||
|
type: object
|
||||||
|
oss_path:
|
||||||
|
title: Oss Path
|
||||||
|
type: string
|
||||||
|
sample_uuid:
|
||||||
|
title: Sample Uuid
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- sample_uuid
|
||||||
|
- oss_path
|
||||||
|
- extra
|
||||||
|
title: LabSample
|
||||||
|
type: object
|
||||||
|
description: move_to_heating_station 返回类型
|
||||||
|
properties:
|
||||||
|
material_id:
|
||||||
|
title: Material Id
|
||||||
|
type: string
|
||||||
|
material_number:
|
||||||
|
title: Material Number
|
||||||
|
type: integer
|
||||||
|
message:
|
||||||
|
title: Message
|
||||||
|
type: string
|
||||||
|
station_id:
|
||||||
|
description: 分配的加热台ID
|
||||||
|
title: Station Id
|
||||||
|
type: integer
|
||||||
|
success:
|
||||||
|
title: Success
|
||||||
|
type: boolean
|
||||||
|
unilabos_samples:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/LabSample'
|
||||||
|
title: Unilabos Samples
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- success
|
||||||
|
- station_id
|
||||||
|
- material_id
|
||||||
|
- material_number
|
||||||
|
- message
|
||||||
|
- unilabos_samples
|
||||||
|
title: MoveToHeatingStationResult
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: move_to_heating_station参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-move_to_output:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
material_number: null
|
||||||
|
station_id: null
|
||||||
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: station_id
|
||||||
|
data_source: handle
|
||||||
|
data_type: workbench_station
|
||||||
|
handler_key: output_station_input
|
||||||
|
label: 加热台ID
|
||||||
|
- data_key: material_number
|
||||||
|
data_source: handle
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: output_material_input
|
||||||
|
label: 物料编号
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: 将物料从加热台移动到输出位置Cn
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
material_number:
|
||||||
|
description: 物料编号,用于确定输出位置Cn
|
||||||
|
type: integer
|
||||||
|
station_id:
|
||||||
|
description: 加热台ID,1-3,从上一节点传入
|
||||||
|
type: integer
|
||||||
|
required:
|
||||||
|
- station_id
|
||||||
|
- material_number
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
$defs:
|
||||||
|
LabSample:
|
||||||
|
properties:
|
||||||
|
extra:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Extra
|
||||||
|
type: object
|
||||||
|
oss_path:
|
||||||
|
title: Oss Path
|
||||||
|
type: string
|
||||||
|
sample_uuid:
|
||||||
|
title: Sample Uuid
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- sample_uuid
|
||||||
|
- oss_path
|
||||||
|
- extra
|
||||||
|
title: LabSample
|
||||||
|
type: object
|
||||||
|
description: move_to_output 返回类型
|
||||||
|
properties:
|
||||||
|
material_id:
|
||||||
|
title: Material Id
|
||||||
|
type: string
|
||||||
|
station_id:
|
||||||
|
title: Station Id
|
||||||
|
type: integer
|
||||||
|
success:
|
||||||
|
title: Success
|
||||||
|
type: boolean
|
||||||
|
unilabos_samples:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/LabSample'
|
||||||
|
title: Unilabos Samples
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- success
|
||||||
|
- station_id
|
||||||
|
- material_id
|
||||||
|
- unilabos_samples
|
||||||
|
title: MoveToOutputResult
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: move_to_output参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-prepare_materials:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
count: 5
|
||||||
|
handles:
|
||||||
|
output:
|
||||||
|
- data_key: material_1
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: channel_1
|
||||||
|
label: 实验1
|
||||||
|
- data_key: material_2
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: channel_2
|
||||||
|
label: 实验2
|
||||||
|
- data_key: material_3
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: channel_3
|
||||||
|
label: 实验3
|
||||||
|
- data_key: material_4
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: channel_4
|
||||||
|
label: 实验4
|
||||||
|
- data_key: material_5
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: channel_5
|
||||||
|
label: 实验5
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: 批量准备物料 - 虚拟起始节点,生成A1-A5物料,输出5个handle供后续节点使用
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
count:
|
||||||
|
default: 5
|
||||||
|
description: 待生成的物料数量,默认5 (生成 A1-A5)
|
||||||
|
type: integer
|
||||||
|
required: []
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
$defs:
|
||||||
|
LabSample:
|
||||||
|
properties:
|
||||||
|
extra:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Extra
|
||||||
|
type: object
|
||||||
|
oss_path:
|
||||||
|
title: Oss Path
|
||||||
|
type: string
|
||||||
|
sample_uuid:
|
||||||
|
title: Sample Uuid
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- sample_uuid
|
||||||
|
- oss_path
|
||||||
|
- extra
|
||||||
|
title: LabSample
|
||||||
|
type: object
|
||||||
|
description: prepare_materials 返回类型 - 批量准备物料
|
||||||
|
properties:
|
||||||
|
count:
|
||||||
|
title: Count
|
||||||
|
type: integer
|
||||||
|
material_1:
|
||||||
|
title: Material 1
|
||||||
|
type: integer
|
||||||
|
material_2:
|
||||||
|
title: Material 2
|
||||||
|
type: integer
|
||||||
|
material_3:
|
||||||
|
title: Material 3
|
||||||
|
type: integer
|
||||||
|
material_4:
|
||||||
|
title: Material 4
|
||||||
|
type: integer
|
||||||
|
material_5:
|
||||||
|
title: Material 5
|
||||||
|
type: integer
|
||||||
|
message:
|
||||||
|
title: Message
|
||||||
|
type: string
|
||||||
|
success:
|
||||||
|
title: Success
|
||||||
|
type: boolean
|
||||||
|
unilabos_samples:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/LabSample'
|
||||||
|
title: Unilabos Samples
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- success
|
||||||
|
- count
|
||||||
|
- material_1
|
||||||
|
- material_2
|
||||||
|
- material_3
|
||||||
|
- material_4
|
||||||
|
- material_5
|
||||||
|
- message
|
||||||
|
- unilabos_samples
|
||||||
|
title: PrepareMaterialsResult
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: prepare_materials参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
auto-start_heating:
|
||||||
|
feedback: {}
|
||||||
|
goal: {}
|
||||||
|
goal_default:
|
||||||
|
material_number: null
|
||||||
|
station_id: null
|
||||||
|
handles:
|
||||||
|
input:
|
||||||
|
- data_key: station_id
|
||||||
|
data_source: handle
|
||||||
|
data_type: workbench_station
|
||||||
|
handler_key: station_id_input
|
||||||
|
label: 加热台ID
|
||||||
|
- data_key: material_number
|
||||||
|
data_source: handle
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: material_number_input
|
||||||
|
label: 物料编号
|
||||||
|
output:
|
||||||
|
- data_key: station_id
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_station
|
||||||
|
handler_key: heating_done_station
|
||||||
|
label: 加热完成-加热台ID
|
||||||
|
- data_key: material_number
|
||||||
|
data_source: executor
|
||||||
|
data_type: workbench_material
|
||||||
|
handler_key: heating_done_material
|
||||||
|
label: 加热完成-物料编号
|
||||||
|
placeholder_keys: {}
|
||||||
|
result: {}
|
||||||
|
schema:
|
||||||
|
description: 启动指定加热台的加热程序
|
||||||
|
properties:
|
||||||
|
feedback: {}
|
||||||
|
goal:
|
||||||
|
properties:
|
||||||
|
material_number:
|
||||||
|
description: 物料编号,从上一节点传入
|
||||||
|
type: integer
|
||||||
|
station_id:
|
||||||
|
description: 加热台ID,1-3,从上一节点传入
|
||||||
|
type: integer
|
||||||
|
required:
|
||||||
|
- station_id
|
||||||
|
- material_number
|
||||||
|
type: object
|
||||||
|
result:
|
||||||
|
$defs:
|
||||||
|
LabSample:
|
||||||
|
properties:
|
||||||
|
extra:
|
||||||
|
additionalProperties: true
|
||||||
|
title: Extra
|
||||||
|
type: object
|
||||||
|
oss_path:
|
||||||
|
title: Oss Path
|
||||||
|
type: string
|
||||||
|
sample_uuid:
|
||||||
|
title: Sample Uuid
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- sample_uuid
|
||||||
|
- oss_path
|
||||||
|
- extra
|
||||||
|
title: LabSample
|
||||||
|
type: object
|
||||||
|
description: start_heating 返回类型
|
||||||
|
properties:
|
||||||
|
material_id:
|
||||||
|
title: Material Id
|
||||||
|
type: string
|
||||||
|
material_number:
|
||||||
|
title: Material Number
|
||||||
|
type: integer
|
||||||
|
message:
|
||||||
|
title: Message
|
||||||
|
type: string
|
||||||
|
station_id:
|
||||||
|
title: Station Id
|
||||||
|
type: integer
|
||||||
|
success:
|
||||||
|
title: Success
|
||||||
|
type: boolean
|
||||||
|
unilabos_samples:
|
||||||
|
items:
|
||||||
|
$ref: '#/$defs/LabSample'
|
||||||
|
title: Unilabos Samples
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- success
|
||||||
|
- station_id
|
||||||
|
- material_id
|
||||||
|
- material_number
|
||||||
|
- message
|
||||||
|
- unilabos_samples
|
||||||
|
title: StartHeatingResult
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- goal
|
||||||
|
title: start_heating参数
|
||||||
|
type: object
|
||||||
|
type: UniLabJsonCommand
|
||||||
|
module: unilabos.devices.virtual.workbench:VirtualWorkbench
|
||||||
|
status_types:
|
||||||
|
active_tasks_count: int
|
||||||
|
arm_current_task: str
|
||||||
|
arm_state: str
|
||||||
|
heating_station_1_material: str
|
||||||
|
heating_station_1_progress: float
|
||||||
|
heating_station_1_state: str
|
||||||
|
heating_station_2_material: str
|
||||||
|
heating_station_2_progress: float
|
||||||
|
heating_station_2_state: str
|
||||||
|
heating_station_3_material: str
|
||||||
|
heating_station_3_progress: float
|
||||||
|
heating_station_3_state: str
|
||||||
|
message: str
|
||||||
|
status: str
|
||||||
|
type: python
|
||||||
|
config_info: []
|
||||||
|
description: Virtual Workbench with 1 robotic arm and 3 heating stations for concurrent
|
||||||
|
material processing
|
||||||
|
handles: []
|
||||||
|
icon: ''
|
||||||
|
init_param_schema:
|
||||||
|
config:
|
||||||
|
properties:
|
||||||
|
config:
|
||||||
|
type: string
|
||||||
|
device_id:
|
||||||
|
type: string
|
||||||
|
required: []
|
||||||
|
type: object
|
||||||
|
data:
|
||||||
|
properties:
|
||||||
|
active_tasks_count:
|
||||||
|
type: integer
|
||||||
|
arm_current_task:
|
||||||
|
type: string
|
||||||
|
arm_state:
|
||||||
|
type: string
|
||||||
|
heating_station_1_material:
|
||||||
|
type: string
|
||||||
|
heating_station_1_progress:
|
||||||
|
type: number
|
||||||
|
heating_station_1_state:
|
||||||
|
type: string
|
||||||
|
heating_station_2_material:
|
||||||
|
type: string
|
||||||
|
heating_station_2_progress:
|
||||||
|
type: number
|
||||||
|
heating_station_2_state:
|
||||||
|
type: string
|
||||||
|
heating_station_3_material:
|
||||||
|
type: string
|
||||||
|
heating_station_3_progress:
|
||||||
|
type: number
|
||||||
|
heating_station_3_state:
|
||||||
|
type: string
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- status
|
||||||
|
- arm_state
|
||||||
|
- arm_current_task
|
||||||
|
- heating_station_1_state
|
||||||
|
- heating_station_1_material
|
||||||
|
- heating_station_1_progress
|
||||||
|
- heating_station_2_state
|
||||||
|
- heating_station_2_material
|
||||||
|
- heating_station_2_progress
|
||||||
|
- heating_station_3_state
|
||||||
|
- heating_station_3_material
|
||||||
|
- heating_station_3_progress
|
||||||
|
- active_tasks_count
|
||||||
|
- message
|
||||||
|
type: object
|
||||||
|
version: 1.0.0
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import importlib
|
import importlib
|
||||||
|
import threading
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Union, Tuple
|
from typing import Any, Dict, List, Union, Tuple
|
||||||
|
|
||||||
@@ -60,6 +62,7 @@ class Registry:
|
|||||||
self.device_module_to_registry = {}
|
self.device_module_to_registry = {}
|
||||||
self.resource_type_registry = {}
|
self.resource_type_registry = {}
|
||||||
self._setup_called = False # 跟踪setup是否已调用
|
self._setup_called = False # 跟踪setup是否已调用
|
||||||
|
self._registry_lock = threading.Lock() # 多线程加载时的锁
|
||||||
# 其他状态变量
|
# 其他状态变量
|
||||||
# self.is_host_mode = False # 移至BasicConfig中
|
# self.is_host_mode = False # 移至BasicConfig中
|
||||||
|
|
||||||
@@ -71,6 +74,20 @@ class Registry:
|
|||||||
|
|
||||||
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
|
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
|
||||||
|
|
||||||
|
# 获取 HostNode 类的增强信息,用于自动生成 action schema
|
||||||
|
host_node_enhanced_info = get_enhanced_class_info(
|
||||||
|
"unilabos.ros.nodes.presets.host_node:HostNode", use_dynamic=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# 为 test_latency 生成 schema,保留原有 description
|
||||||
|
test_latency_method_info = host_node_enhanced_info.get("action_methods", {}).get("test_latency", {})
|
||||||
|
test_latency_schema = self._generate_unilab_json_command_schema(
|
||||||
|
test_latency_method_info.get("args", []),
|
||||||
|
"test_latency",
|
||||||
|
test_latency_method_info.get("return_annotation"),
|
||||||
|
)
|
||||||
|
test_latency_schema["description"] = "用于测试延迟的动作,返回延迟时间和时间差。"
|
||||||
|
|
||||||
self.device_type_registry.update(
|
self.device_type_registry.update(
|
||||||
{
|
{
|
||||||
"host_node": {
|
"host_node": {
|
||||||
@@ -124,28 +141,47 @@ class Registry:
|
|||||||
"output": [
|
"output": [
|
||||||
{
|
{
|
||||||
"handler_key": "labware",
|
"handler_key": "labware",
|
||||||
"label": "Labware",
|
|
||||||
"data_type": "resource",
|
"data_type": "resource",
|
||||||
"data_source": "handle",
|
"label": "Labware",
|
||||||
"data_key": "liquid",
|
"data_source": "executor",
|
||||||
}
|
"data_key": "created_resource_tree.@flatten",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"handler_key": "liquid_slots",
|
||||||
|
"data_type": "resource",
|
||||||
|
"label": "LiquidSlots",
|
||||||
|
"data_source": "executor",
|
||||||
|
"data_key": "liquid_input_resource_tree.@flatten",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"handler_key": "materials",
|
||||||
|
"data_type": "resource",
|
||||||
|
"label": "AllMaterials",
|
||||||
|
"data_source": "executor",
|
||||||
|
"data_key": "[created_resource_tree,liquid_input_resource_tree].@flatten.@flatten",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"placeholder_keys": {
|
"placeholder_keys": {
|
||||||
"res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择
|
"res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择
|
||||||
"device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择
|
"device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择
|
||||||
"parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择
|
"parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择
|
||||||
|
"class_name": "unilabos_class",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"test_latency": {
|
"test_latency": {
|
||||||
"type": self.EmptyIn,
|
"type": (
|
||||||
|
"UniLabJsonCommandAsync"
|
||||||
|
if test_latency_method_info.get("is_async", False)
|
||||||
|
else "UniLabJsonCommand"
|
||||||
|
),
|
||||||
"goal": {},
|
"goal": {},
|
||||||
"feedback": {},
|
"feedback": {},
|
||||||
"result": {},
|
"result": {},
|
||||||
"schema": ros_action_to_json_schema(
|
"schema": test_latency_schema,
|
||||||
self.EmptyIn, "用于测试延迟的动作,返回延迟时间和时间差。"
|
"goal_default": {
|
||||||
),
|
arg["name"]: arg["default"] for arg in test_latency_method_info.get("args", [])
|
||||||
"goal_default": {},
|
},
|
||||||
"handles": {},
|
"handles": {},
|
||||||
},
|
},
|
||||||
"auto-test_resource": {
|
"auto-test_resource": {
|
||||||
@@ -186,7 +222,17 @@ class Registry:
|
|||||||
"resources": "unilabos_resources",
|
"resources": "unilabos_resources",
|
||||||
},
|
},
|
||||||
"goal_default": {},
|
"goal_default": {},
|
||||||
"handles": {},
|
"handles": {
|
||||||
|
"input": [
|
||||||
|
{
|
||||||
|
"handler_key": "input_resources",
|
||||||
|
"data_type": "resource",
|
||||||
|
"label": "InputResources",
|
||||||
|
"data_source": "handle",
|
||||||
|
"data_key": "resources", # 不为空
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -218,67 +264,115 @@ class Registry:
|
|||||||
# 标记setup已被调用
|
# 标记setup已被调用
|
||||||
self._setup_called = True
|
self._setup_called = True
|
||||||
|
|
||||||
|
def _load_single_resource_file(
|
||||||
|
self, file: Path, complete_registry: bool, upload_registry: bool
|
||||||
|
) -> Tuple[Dict[str, Any], Dict[str, Any], bool]:
|
||||||
|
"""
|
||||||
|
加载单个资源文件 (线程安全)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(data, complete_data, is_valid): 资源数据, 完整数据, 是否有效
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open(file, encoding="utf-8", mode="r") as f:
|
||||||
|
data = yaml.safe_load(io.StringIO(f.read()))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[UniLab Registry] 读取资源文件失败: {file}, 错误: {e}")
|
||||||
|
return {}, {}, False
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return {}, {}, False
|
||||||
|
|
||||||
|
complete_data = {}
|
||||||
|
for resource_id, resource_info in data.items():
|
||||||
|
if "version" not in resource_info:
|
||||||
|
resource_info["version"] = "1.0.0"
|
||||||
|
if "category" not in resource_info:
|
||||||
|
resource_info["category"] = [file.stem]
|
||||||
|
elif file.stem not in resource_info["category"]:
|
||||||
|
resource_info["category"].append(file.stem)
|
||||||
|
elif not isinstance(resource_info.get("category"), list):
|
||||||
|
resource_info["category"] = [resource_info["category"]]
|
||||||
|
if "config_info" not in resource_info:
|
||||||
|
resource_info["config_info"] = []
|
||||||
|
if "icon" not in resource_info:
|
||||||
|
resource_info["icon"] = ""
|
||||||
|
if "handles" not in resource_info:
|
||||||
|
resource_info["handles"] = []
|
||||||
|
if "init_param_schema" not in resource_info:
|
||||||
|
resource_info["init_param_schema"] = {}
|
||||||
|
if "config_info" in resource_info:
|
||||||
|
del resource_info["config_info"]
|
||||||
|
if "file_path" in resource_info:
|
||||||
|
del resource_info["file_path"]
|
||||||
|
complete_data[resource_id] = copy.deepcopy(dict(sorted(resource_info.items())))
|
||||||
|
if upload_registry:
|
||||||
|
class_info = resource_info.get("class", {})
|
||||||
|
if len(class_info) and "module" in class_info:
|
||||||
|
if class_info.get("type") == "pylabrobot":
|
||||||
|
res_class = get_class(class_info["module"])
|
||||||
|
if callable(res_class) and not isinstance(res_class, type):
|
||||||
|
res_instance = res_class(res_class.__name__)
|
||||||
|
res_ulr = tree_to_list([resource_plr_to_ulab(res_instance)])
|
||||||
|
resource_info["config_info"] = res_ulr
|
||||||
|
resource_info["registry_type"] = "resource"
|
||||||
|
resource_info["file_path"] = str(file.absolute()).replace("\\", "/")
|
||||||
|
|
||||||
|
complete_data = dict(sorted(complete_data.items()))
|
||||||
|
complete_data = copy.deepcopy(complete_data)
|
||||||
|
|
||||||
|
if complete_registry:
|
||||||
|
try:
|
||||||
|
with open(file, "w", encoding="utf-8") as f:
|
||||||
|
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[UniLab Registry] 写入资源文件失败: {file}, 错误: {e}")
|
||||||
|
|
||||||
|
return data, complete_data, True
|
||||||
|
|
||||||
def load_resource_types(self, path: os.PathLike, complete_registry: bool, upload_registry: bool):
|
def load_resource_types(self, path: os.PathLike, complete_registry: bool, upload_registry: bool):
|
||||||
abs_path = Path(path).absolute()
|
abs_path = Path(path).absolute()
|
||||||
resource_path = abs_path / "resources"
|
resource_path = abs_path / "resources"
|
||||||
files = list(resource_path.glob("*/*.yaml"))
|
files = list(resource_path.glob("*/*.yaml"))
|
||||||
logger.trace(f"[UniLab Registry] load resources? {resource_path.exists()}, total: {len(files)}")
|
logger.debug(f"[UniLab Registry] resources: {resource_path.exists()}, total: {len(files)}")
|
||||||
current_resource_number = len(self.resource_type_registry) + 1
|
|
||||||
for i, file in enumerate(files):
|
|
||||||
with open(file, encoding="utf-8", mode="r") as f:
|
|
||||||
data = yaml.safe_load(io.StringIO(f.read()))
|
|
||||||
complete_data = {}
|
|
||||||
if data:
|
|
||||||
# 为每个资源添加文件路径信息
|
|
||||||
for resource_id, resource_info in data.items():
|
|
||||||
if "version" not in resource_info:
|
|
||||||
resource_info["version"] = "1.0.0"
|
|
||||||
if "category" not in resource_info:
|
|
||||||
resource_info["category"] = [file.stem]
|
|
||||||
elif file.stem not in resource_info["category"]:
|
|
||||||
resource_info["category"].append(file.stem)
|
|
||||||
elif not isinstance(resource_info.get("category"), list):
|
|
||||||
resource_info["category"] = [resource_info["category"]]
|
|
||||||
if "config_info" not in resource_info:
|
|
||||||
resource_info["config_info"] = []
|
|
||||||
if "icon" not in resource_info:
|
|
||||||
resource_info["icon"] = ""
|
|
||||||
if "handles" not in resource_info:
|
|
||||||
resource_info["handles"] = []
|
|
||||||
if "init_param_schema" not in resource_info:
|
|
||||||
resource_info["init_param_schema"] = {}
|
|
||||||
if "config_info" in resource_info:
|
|
||||||
del resource_info["config_info"]
|
|
||||||
if "file_path" in resource_info:
|
|
||||||
del resource_info["file_path"]
|
|
||||||
complete_data[resource_id] = copy.deepcopy(dict(sorted(resource_info.items())))
|
|
||||||
if upload_registry:
|
|
||||||
class_info = resource_info.get("class", {})
|
|
||||||
if len(class_info) and "module" in class_info:
|
|
||||||
if class_info.get("type") == "pylabrobot":
|
|
||||||
res_class = get_class(class_info["module"])
|
|
||||||
if callable(res_class) and not isinstance(
|
|
||||||
res_class, type
|
|
||||||
): # 有的是类,有的是函数,这里暂时只登记函数类的
|
|
||||||
res_instance = res_class(res_class.__name__)
|
|
||||||
res_ulr = tree_to_list([resource_plr_to_ulab(res_instance)])
|
|
||||||
resource_info["config_info"] = res_ulr
|
|
||||||
resource_info["registry_type"] = "resource"
|
|
||||||
resource_info["file_path"] = str(file.absolute()).replace("\\", "/")
|
|
||||||
complete_data = dict(sorted(complete_data.items()))
|
|
||||||
complete_data = copy.deepcopy(complete_data)
|
|
||||||
if complete_registry:
|
|
||||||
with open(file, "w", encoding="utf-8") as f:
|
|
||||||
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
|
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 使用线程池并行加载
|
||||||
|
max_workers = min(8, len(files))
|
||||||
|
results = []
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
|
future_to_file = {
|
||||||
|
executor.submit(self._load_single_resource_file, file, complete_registry, upload_registry): file
|
||||||
|
for file in files
|
||||||
|
}
|
||||||
|
for future in as_completed(future_to_file):
|
||||||
|
file = future_to_file[future]
|
||||||
|
try:
|
||||||
|
data, complete_data, is_valid = future.result()
|
||||||
|
if is_valid:
|
||||||
|
results.append((file, data))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[UniLab Registry] 处理资源文件异常: {file}, 错误: {e}")
|
||||||
|
|
||||||
|
# 线程安全地更新注册表
|
||||||
|
current_resource_number = len(self.resource_type_registry) + 1
|
||||||
|
with self._registry_lock:
|
||||||
|
for i, (file, data) in enumerate(results):
|
||||||
self.resource_type_registry.update(data)
|
self.resource_type_registry.update(data)
|
||||||
logger.trace( # type: ignore
|
logger.trace(
|
||||||
f"[UniLab Registry] Resource-{current_resource_number} File-{i+1}/{len(files)} "
|
f"[UniLab Registry] Resource-{current_resource_number} File-{i+1}/{len(results)} "
|
||||||
+ f"Add {list(data.keys())}"
|
+ f"Add {list(data.keys())}"
|
||||||
)
|
)
|
||||||
current_resource_number += 1
|
current_resource_number += 1
|
||||||
else:
|
|
||||||
logger.debug(f"[UniLab Registry] Res File-{i+1}/{len(files)} Not Valid YAML File: {file.absolute()}")
|
# 记录无效文件
|
||||||
|
valid_files = {r[0] for r in results}
|
||||||
|
for file in files:
|
||||||
|
if file not in valid_files:
|
||||||
|
logger.debug(f"[UniLab Registry] Res File Not Valid YAML File: {file.absolute()}")
|
||||||
|
|
||||||
def _extract_class_docstrings(self, module_string: str) -> Dict[str, str]:
|
def _extract_class_docstrings(self, module_string: str) -> Dict[str, str]:
|
||||||
"""
|
"""
|
||||||
@@ -455,7 +549,11 @@ class Registry:
|
|||||||
return status_schema
|
return status_schema
|
||||||
|
|
||||||
def _generate_unilab_json_command_schema(
|
def _generate_unilab_json_command_schema(
|
||||||
self, method_args: List[Dict[str, Any]], method_name: str, return_annotation: Any = None
|
self,
|
||||||
|
method_args: List[Dict[str, Any]],
|
||||||
|
method_name: str,
|
||||||
|
return_annotation: Any = None,
|
||||||
|
previous_schema: Dict[str, Any] | None = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
根据UniLabJsonCommand方法信息生成JSON Schema,暂不支持嵌套类型
|
根据UniLabJsonCommand方法信息生成JSON Schema,暂不支持嵌套类型
|
||||||
@@ -464,6 +562,7 @@ class Registry:
|
|||||||
method_args: 方法信息字典,包含args等
|
method_args: 方法信息字典,包含args等
|
||||||
method_name: 方法名称
|
method_name: 方法名称
|
||||||
return_annotation: 返回类型注解,用于生成result schema(仅支持TypedDict)
|
return_annotation: 返回类型注解,用于生成result schema(仅支持TypedDict)
|
||||||
|
previous_schema: 之前的 schema,用于保留 goal/feedback/result 下一级字段的 description
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
JSON Schema格式的参数schema
|
JSON Schema格式的参数schema
|
||||||
@@ -497,7 +596,7 @@ class Registry:
|
|||||||
if return_annotation is not None and self._is_typed_dict(return_annotation):
|
if return_annotation is not None and self._is_typed_dict(return_annotation):
|
||||||
result_schema = self._generate_typed_dict_result_schema(return_annotation)
|
result_schema = self._generate_typed_dict_result_schema(return_annotation)
|
||||||
|
|
||||||
return {
|
final_schema = {
|
||||||
"title": f"{method_name}参数",
|
"title": f"{method_name}参数",
|
||||||
"description": f"",
|
"description": f"",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -505,6 +604,40 @@ class Registry:
|
|||||||
"required": ["goal"],
|
"required": ["goal"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# 保留之前 schema 中 goal/feedback/result 下一级字段的 description
|
||||||
|
if previous_schema:
|
||||||
|
self._preserve_field_descriptions(final_schema, previous_schema)
|
||||||
|
|
||||||
|
return final_schema
|
||||||
|
|
||||||
|
def _preserve_field_descriptions(self, new_schema: Dict[str, Any], previous_schema: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
保留之前 schema 中 goal/feedback/result 下一级字段的 description 和 title
|
||||||
|
|
||||||
|
Args:
|
||||||
|
new_schema: 新生成的 schema(会被修改)
|
||||||
|
previous_schema: 之前的 schema
|
||||||
|
"""
|
||||||
|
for section in ["goal", "feedback", "result"]:
|
||||||
|
new_section = new_schema.get("properties", {}).get(section, {})
|
||||||
|
prev_section = previous_schema.get("properties", {}).get(section, {})
|
||||||
|
|
||||||
|
if not new_section or not prev_section:
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_props = new_section.get("properties", {})
|
||||||
|
prev_props = prev_section.get("properties", {})
|
||||||
|
|
||||||
|
for field_name, field_schema in new_props.items():
|
||||||
|
if field_name in prev_props:
|
||||||
|
prev_field = prev_props[field_name]
|
||||||
|
# 保留字段的 description
|
||||||
|
if "description" in prev_field and prev_field["description"]:
|
||||||
|
field_schema["description"] = prev_field["description"]
|
||||||
|
# 保留字段的 title(用户自定义的中文名)
|
||||||
|
if "title" in prev_field and prev_field["title"]:
|
||||||
|
field_schema["title"] = prev_field["title"]
|
||||||
|
|
||||||
def _is_typed_dict(self, annotation: Any) -> bool:
|
def _is_typed_dict(self, annotation: Any) -> bool:
|
||||||
"""
|
"""
|
||||||
检查类型注解是否是TypedDict
|
检查类型注解是否是TypedDict
|
||||||
@@ -591,209 +724,244 @@ class Registry:
|
|||||||
"handles": {},
|
"handles": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def _load_single_device_file(
|
||||||
|
self, file: Path, complete_registry: bool, get_yaml_from_goal_type
|
||||||
|
) -> Tuple[Dict[str, Any], Dict[str, Any], bool, List[str]]:
|
||||||
|
"""
|
||||||
|
加载单个设备文件 (线程安全)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(data, complete_data, is_valid, device_ids): 设备数据, 完整数据, 是否有效, 设备ID列表
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open(file, encoding="utf-8", mode="r") as f:
|
||||||
|
data = yaml.safe_load(io.StringIO(f.read()))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[UniLab Registry] 读取设备文件失败: {file}, 错误: {e}")
|
||||||
|
return {}, {}, False, []
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return {}, {}, False, []
|
||||||
|
|
||||||
|
complete_data = {}
|
||||||
|
action_str_type_mapping = {
|
||||||
|
"UniLabJsonCommand": "UniLabJsonCommand",
|
||||||
|
"UniLabJsonCommandAsync": "UniLabJsonCommandAsync",
|
||||||
|
}
|
||||||
|
status_str_type_mapping = {}
|
||||||
|
device_ids = []
|
||||||
|
|
||||||
|
for device_id, device_config in data.items():
|
||||||
|
if "version" not in device_config:
|
||||||
|
device_config["version"] = "1.0.0"
|
||||||
|
if "category" not in device_config:
|
||||||
|
device_config["category"] = [file.stem]
|
||||||
|
elif file.stem not in device_config["category"]:
|
||||||
|
device_config["category"].append(file.stem)
|
||||||
|
if "config_info" not in device_config:
|
||||||
|
device_config["config_info"] = []
|
||||||
|
if "description" not in device_config:
|
||||||
|
device_config["description"] = ""
|
||||||
|
if "icon" not in device_config:
|
||||||
|
device_config["icon"] = ""
|
||||||
|
if "handles" not in device_config:
|
||||||
|
device_config["handles"] = []
|
||||||
|
if "init_param_schema" not in device_config:
|
||||||
|
device_config["init_param_schema"] = {}
|
||||||
|
if "class" in device_config:
|
||||||
|
if "status_types" not in device_config["class"] or device_config["class"]["status_types"] is None:
|
||||||
|
device_config["class"]["status_types"] = {}
|
||||||
|
if (
|
||||||
|
"action_value_mappings" not in device_config["class"]
|
||||||
|
or device_config["class"]["action_value_mappings"] is None
|
||||||
|
):
|
||||||
|
device_config["class"]["action_value_mappings"] = {}
|
||||||
|
enhanced_info = {}
|
||||||
|
if complete_registry:
|
||||||
|
device_config["class"]["status_types"].clear()
|
||||||
|
enhanced_info = get_enhanced_class_info(device_config["class"]["module"], use_dynamic=True)
|
||||||
|
if not enhanced_info.get("dynamic_import_success", False):
|
||||||
|
continue
|
||||||
|
device_config["class"]["status_types"].update(
|
||||||
|
{k: v["return_type"] for k, v in enhanced_info["status_methods"].items()}
|
||||||
|
)
|
||||||
|
for status_name, status_type in device_config["class"]["status_types"].items():
|
||||||
|
if isinstance(status_type, tuple) or status_type in ["Any", "None", "Unknown"]:
|
||||||
|
status_type = "String"
|
||||||
|
device_config["class"]["status_types"][status_name] = status_type
|
||||||
|
try:
|
||||||
|
target_type = self._replace_type_with_class(status_type, device_id, f"状态 {status_name}")
|
||||||
|
except ROSMsgNotFound:
|
||||||
|
continue
|
||||||
|
if target_type in [dict, list]:
|
||||||
|
target_type = String
|
||||||
|
status_str_type_mapping[status_type] = target_type
|
||||||
|
device_config["class"]["status_types"] = dict(sorted(device_config["class"]["status_types"].items()))
|
||||||
|
if complete_registry:
|
||||||
|
old_action_configs = {}
|
||||||
|
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
||||||
|
old_action_configs[action_name] = action_config
|
||||||
|
|
||||||
|
device_config["class"]["action_value_mappings"] = {
|
||||||
|
k: v
|
||||||
|
for k, v in device_config["class"]["action_value_mappings"].items()
|
||||||
|
if not k.startswith("auto-")
|
||||||
|
}
|
||||||
|
device_config["class"]["action_value_mappings"].update(
|
||||||
|
{
|
||||||
|
f"auto-{k}": {
|
||||||
|
"type": "UniLabJsonCommandAsync" if v["is_async"] else "UniLabJsonCommand",
|
||||||
|
"goal": {},
|
||||||
|
"feedback": {},
|
||||||
|
"result": {},
|
||||||
|
"schema": self._generate_unilab_json_command_schema(
|
||||||
|
v["args"],
|
||||||
|
k,
|
||||||
|
v.get("return_annotation"),
|
||||||
|
old_action_configs.get(f"auto-{k}", {}).get("schema"),
|
||||||
|
),
|
||||||
|
"goal_default": {i["name"]: i["default"] for i in v["args"]},
|
||||||
|
"handles": old_action_configs.get(f"auto-{k}", {}).get("handles", []),
|
||||||
|
"placeholder_keys": {
|
||||||
|
i["name"]: (
|
||||||
|
"unilabos_resources"
|
||||||
|
if i["type"] == "unilabos.registry.placeholder_type:ResourceSlot"
|
||||||
|
or i["type"] == ("list", "unilabos.registry.placeholder_type:ResourceSlot")
|
||||||
|
else "unilabos_devices"
|
||||||
|
)
|
||||||
|
for i in v["args"]
|
||||||
|
if i.get("type", "")
|
||||||
|
in [
|
||||||
|
"unilabos.registry.placeholder_type:ResourceSlot",
|
||||||
|
"unilabos.registry.placeholder_type:DeviceSlot",
|
||||||
|
("list", "unilabos.registry.placeholder_type:ResourceSlot"),
|
||||||
|
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for k, v in enhanced_info["action_methods"].items()
|
||||||
|
if k not in device_config["class"]["action_value_mappings"]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for action_name, old_config in old_action_configs.items():
|
||||||
|
if action_name in device_config["class"]["action_value_mappings"]:
|
||||||
|
old_schema = old_config.get("schema", {})
|
||||||
|
if "description" in old_schema and old_schema["description"]:
|
||||||
|
device_config["class"]["action_value_mappings"][action_name]["schema"][
|
||||||
|
"description"
|
||||||
|
] = old_schema["description"]
|
||||||
|
device_config["init_param_schema"] = {}
|
||||||
|
device_config["init_param_schema"]["config"] = self._generate_unilab_json_command_schema(
|
||||||
|
enhanced_info["init_params"], "__init__"
|
||||||
|
)["properties"]["goal"]
|
||||||
|
device_config["init_param_schema"]["data"] = self._generate_status_types_schema(
|
||||||
|
enhanced_info["status_methods"]
|
||||||
|
)
|
||||||
|
|
||||||
|
device_config.pop("schema", None)
|
||||||
|
device_config["class"]["action_value_mappings"] = dict(
|
||||||
|
sorted(device_config["class"]["action_value_mappings"].items())
|
||||||
|
)
|
||||||
|
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
||||||
|
if "handles" not in action_config:
|
||||||
|
action_config["handles"] = {}
|
||||||
|
elif isinstance(action_config["handles"], list):
|
||||||
|
if len(action_config["handles"]):
|
||||||
|
logger.error(f"设备{device_id} {action_name} 的handles配置错误,应该是字典类型")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
action_config["handles"] = {}
|
||||||
|
if "type" in action_config:
|
||||||
|
action_type_str: str = action_config["type"]
|
||||||
|
if not action_type_str.startswith("UniLabJsonCommand"):
|
||||||
|
try:
|
||||||
|
target_type = self._replace_type_with_class(
|
||||||
|
action_type_str, device_id, f"动作 {action_name}"
|
||||||
|
)
|
||||||
|
except ROSMsgNotFound:
|
||||||
|
continue
|
||||||
|
action_str_type_mapping[action_type_str] = target_type
|
||||||
|
if target_type is not None:
|
||||||
|
action_config["goal_default"] = yaml.safe_load(
|
||||||
|
io.StringIO(get_yaml_from_goal_type(target_type.Goal))
|
||||||
|
)
|
||||||
|
action_config["schema"] = ros_action_to_json_schema(target_type)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"[UniLab Registry] 设备 {device_id} 的动作 {action_name} 类型为空,跳过替换"
|
||||||
|
)
|
||||||
|
complete_data[device_id] = copy.deepcopy(dict(sorted(device_config.items())))
|
||||||
|
for status_name, status_type in device_config["class"]["status_types"].items():
|
||||||
|
device_config["class"]["status_types"][status_name] = status_str_type_mapping[status_type]
|
||||||
|
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
||||||
|
if action_config["type"] not in action_str_type_mapping:
|
||||||
|
continue
|
||||||
|
action_config["type"] = action_str_type_mapping[action_config["type"]]
|
||||||
|
self._add_builtin_actions(device_config, device_id)
|
||||||
|
device_config["file_path"] = str(file.absolute()).replace("\\", "/")
|
||||||
|
device_config["registry_type"] = "device"
|
||||||
|
device_ids.append(device_id)
|
||||||
|
|
||||||
|
complete_data = dict(sorted(complete_data.items()))
|
||||||
|
complete_data = copy.deepcopy(complete_data)
|
||||||
|
try:
|
||||||
|
with open(file, "w", encoding="utf-8") as f:
|
||||||
|
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[UniLab Registry] 写入设备文件失败: {file}, 错误: {e}")
|
||||||
|
|
||||||
|
return data, complete_data, True, device_ids
|
||||||
|
|
||||||
def load_device_types(self, path: os.PathLike, complete_registry: bool):
|
def load_device_types(self, path: os.PathLike, complete_registry: bool):
|
||||||
# return
|
|
||||||
abs_path = Path(path).absolute()
|
abs_path = Path(path).absolute()
|
||||||
devices_path = abs_path / "devices"
|
devices_path = abs_path / "devices"
|
||||||
device_comms_path = abs_path / "device_comms"
|
device_comms_path = abs_path / "device_comms"
|
||||||
files = list(devices_path.glob("*.yaml")) + list(device_comms_path.glob("*.yaml"))
|
files = list(devices_path.glob("*.yaml")) + list(device_comms_path.glob("*.yaml"))
|
||||||
logger.trace( # type: ignore
|
logger.trace(
|
||||||
f"[UniLab Registry] devices: {devices_path.exists()}, device_comms: {device_comms_path.exists()}, "
|
f"[UniLab Registry] devices: {devices_path.exists()}, device_comms: {device_comms_path.exists()}, "
|
||||||
+ f"total: {len(files)}"
|
+ f"total: {len(files)}"
|
||||||
)
|
)
|
||||||
current_device_number = len(self.device_type_registry) + 1
|
|
||||||
|
if not files:
|
||||||
|
return
|
||||||
|
|
||||||
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
|
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
|
||||||
|
|
||||||
for i, file in enumerate(files):
|
# 使用线程池并行加载
|
||||||
with open(file, encoding="utf-8", mode="r") as f:
|
max_workers = min(8, len(files))
|
||||||
data = yaml.safe_load(io.StringIO(f.read()))
|
results = []
|
||||||
complete_data = {}
|
|
||||||
action_str_type_mapping = {
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||||
"UniLabJsonCommand": "UniLabJsonCommand",
|
future_to_file = {
|
||||||
"UniLabJsonCommandAsync": "UniLabJsonCommandAsync",
|
executor.submit(self._load_single_device_file, file, complete_registry, get_yaml_from_goal_type): file
|
||||||
|
for file in files
|
||||||
}
|
}
|
||||||
status_str_type_mapping = {}
|
for future in as_completed(future_to_file):
|
||||||
if data:
|
file = future_to_file[future]
|
||||||
# 在添加到注册表前处理类型替换
|
try:
|
||||||
for device_id, device_config in data.items():
|
data, complete_data, is_valid, device_ids = future.result()
|
||||||
# 添加文件路径信息 - 使用规范化的完整文件路径
|
if is_valid:
|
||||||
if "version" not in device_config:
|
results.append((file, data, device_ids))
|
||||||
device_config["version"] = "1.0.0"
|
except Exception as e:
|
||||||
if "category" not in device_config:
|
logger.warning(f"[UniLab Registry] 处理设备文件异常: {file}, 错误: {e}")
|
||||||
device_config["category"] = [file.stem]
|
|
||||||
elif file.stem not in device_config["category"]:
|
|
||||||
device_config["category"].append(file.stem)
|
|
||||||
if "config_info" not in device_config:
|
|
||||||
device_config["config_info"] = []
|
|
||||||
if "description" not in device_config:
|
|
||||||
device_config["description"] = ""
|
|
||||||
if "icon" not in device_config:
|
|
||||||
device_config["icon"] = ""
|
|
||||||
if "handles" not in device_config:
|
|
||||||
device_config["handles"] = []
|
|
||||||
if "init_param_schema" not in device_config:
|
|
||||||
device_config["init_param_schema"] = {}
|
|
||||||
if "class" in device_config:
|
|
||||||
if (
|
|
||||||
"status_types" not in device_config["class"]
|
|
||||||
or device_config["class"]["status_types"] is None
|
|
||||||
):
|
|
||||||
device_config["class"]["status_types"] = {}
|
|
||||||
if (
|
|
||||||
"action_value_mappings" not in device_config["class"]
|
|
||||||
or device_config["class"]["action_value_mappings"] is None
|
|
||||||
):
|
|
||||||
device_config["class"]["action_value_mappings"] = {}
|
|
||||||
enhanced_info = {}
|
|
||||||
if complete_registry:
|
|
||||||
device_config["class"]["status_types"].clear()
|
|
||||||
enhanced_info = get_enhanced_class_info(device_config["class"]["module"], use_dynamic=True)
|
|
||||||
if not enhanced_info.get("dynamic_import_success", False):
|
|
||||||
continue
|
|
||||||
device_config["class"]["status_types"].update(
|
|
||||||
{k: v["return_type"] for k, v in enhanced_info["status_methods"].items()}
|
|
||||||
)
|
|
||||||
for status_name, status_type in device_config["class"]["status_types"].items():
|
|
||||||
if isinstance(status_type, tuple) or status_type in ["Any", "None", "Unknown"]:
|
|
||||||
status_type = "String" # 替换成ROS的String,便于显示
|
|
||||||
device_config["class"]["status_types"][status_name] = status_type
|
|
||||||
try:
|
|
||||||
target_type = self._replace_type_with_class(
|
|
||||||
status_type, device_id, f"状态 {status_name}"
|
|
||||||
)
|
|
||||||
except ROSMsgNotFound:
|
|
||||||
continue
|
|
||||||
if target_type in [
|
|
||||||
dict,
|
|
||||||
list,
|
|
||||||
]: # 对于嵌套类型返回的对象,暂时处理成字符串,无法直接进行转换
|
|
||||||
target_type = String
|
|
||||||
status_str_type_mapping[status_type] = target_type
|
|
||||||
device_config["class"]["status_types"] = dict(
|
|
||||||
sorted(device_config["class"]["status_types"].items())
|
|
||||||
)
|
|
||||||
if complete_registry:
|
|
||||||
# 保存原有的description信息
|
|
||||||
old_descriptions = {}
|
|
||||||
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
|
||||||
if "description" in action_config.get("schema", {}):
|
|
||||||
description = action_config["schema"]["description"]
|
|
||||||
if len(description):
|
|
||||||
old_descriptions[action_name] = action_config["schema"]["description"]
|
|
||||||
|
|
||||||
device_config["class"]["action_value_mappings"] = {
|
# 线程安全地更新注册表
|
||||||
k: v
|
current_device_number = len(self.device_type_registry) + 1
|
||||||
for k, v in device_config["class"]["action_value_mappings"].items()
|
with self._registry_lock:
|
||||||
if not k.startswith("auto-")
|
for file, data, device_ids in results:
|
||||||
}
|
self.device_type_registry.update(data)
|
||||||
# 处理动作值映射
|
for device_id in device_ids:
|
||||||
device_config["class"]["action_value_mappings"].update(
|
logger.trace(
|
||||||
{
|
f"[UniLab Registry] Device-{current_device_number} Add {device_id} "
|
||||||
f"auto-{k}": {
|
|
||||||
"type": "UniLabJsonCommandAsync" if v["is_async"] else "UniLabJsonCommand",
|
|
||||||
"goal": {},
|
|
||||||
"feedback": {},
|
|
||||||
"result": {},
|
|
||||||
"schema": self._generate_unilab_json_command_schema(
|
|
||||||
v["args"], k, v.get("return_annotation")
|
|
||||||
),
|
|
||||||
"goal_default": {i["name"]: i["default"] for i in v["args"]},
|
|
||||||
"handles": [],
|
|
||||||
"placeholder_keys": {
|
|
||||||
i["name"]: (
|
|
||||||
"unilabos_resources"
|
|
||||||
if i["type"] == "unilabos.registry.placeholder_type:ResourceSlot"
|
|
||||||
or i["type"]
|
|
||||||
== ("list", "unilabos.registry.placeholder_type:ResourceSlot")
|
|
||||||
else "unilabos_devices"
|
|
||||||
)
|
|
||||||
for i in v["args"]
|
|
||||||
if i.get("type", "")
|
|
||||||
in [
|
|
||||||
"unilabos.registry.placeholder_type:ResourceSlot",
|
|
||||||
"unilabos.registry.placeholder_type:DeviceSlot",
|
|
||||||
("list", "unilabos.registry.placeholder_type:ResourceSlot"),
|
|
||||||
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
|
|
||||||
]
|
|
||||||
},
|
|
||||||
}
|
|
||||||
# 不生成已配置action的动作
|
|
||||||
for k, v in enhanced_info["action_methods"].items()
|
|
||||||
if k not in device_config["class"]["action_value_mappings"]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# 恢复原有的description信息(auto开头的不修改)
|
|
||||||
for action_name, description in old_descriptions.items():
|
|
||||||
if action_name in device_config["class"]["action_value_mappings"]: # 有一些会被删除
|
|
||||||
device_config["class"]["action_value_mappings"][action_name]["schema"][
|
|
||||||
"description"
|
|
||||||
] = description
|
|
||||||
device_config["init_param_schema"] = {}
|
|
||||||
device_config["init_param_schema"]["config"] = self._generate_unilab_json_command_schema(
|
|
||||||
enhanced_info["init_params"], "__init__"
|
|
||||||
)["properties"]["goal"]
|
|
||||||
device_config["init_param_schema"]["data"] = self._generate_status_types_schema(
|
|
||||||
enhanced_info["status_methods"]
|
|
||||||
)
|
|
||||||
|
|
||||||
device_config.pop("schema", None)
|
|
||||||
device_config["class"]["action_value_mappings"] = dict(
|
|
||||||
sorted(device_config["class"]["action_value_mappings"].items())
|
|
||||||
)
|
|
||||||
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
|
||||||
if "handles" not in action_config:
|
|
||||||
action_config["handles"] = {}
|
|
||||||
elif isinstance(action_config["handles"], list):
|
|
||||||
if len(action_config["handles"]):
|
|
||||||
logger.error(f"设备{device_id} {action_name} 的handles配置错误,应该是字典类型")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
action_config["handles"] = {}
|
|
||||||
if "type" in action_config:
|
|
||||||
action_type_str: str = action_config["type"]
|
|
||||||
# 通过Json发放指令,而不是通过特殊的ros action进行处理
|
|
||||||
if not action_type_str.startswith("UniLabJsonCommand"):
|
|
||||||
try:
|
|
||||||
target_type = self._replace_type_with_class(
|
|
||||||
action_type_str, device_id, f"动作 {action_name}"
|
|
||||||
)
|
|
||||||
except ROSMsgNotFound:
|
|
||||||
continue
|
|
||||||
action_str_type_mapping[action_type_str] = target_type
|
|
||||||
if target_type is not None:
|
|
||||||
action_config["goal_default"] = yaml.safe_load(
|
|
||||||
io.StringIO(get_yaml_from_goal_type(target_type.Goal))
|
|
||||||
)
|
|
||||||
action_config["schema"] = ros_action_to_json_schema(target_type)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
f"[UniLab Registry] 设备 {device_id} 的动作 {action_name} 类型为空,跳过替换"
|
|
||||||
)
|
|
||||||
complete_data[device_id] = copy.deepcopy(dict(sorted(device_config.items()))) # 稍后dump到文件
|
|
||||||
for status_name, status_type in device_config["class"]["status_types"].items():
|
|
||||||
device_config["class"]["status_types"][status_name] = status_str_type_mapping[status_type]
|
|
||||||
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
|
|
||||||
if action_config["type"] not in action_str_type_mapping:
|
|
||||||
continue
|
|
||||||
action_config["type"] = action_str_type_mapping[action_config["type"]]
|
|
||||||
# 添加内置的驱动命令动作
|
|
||||||
self._add_builtin_actions(device_config, device_id)
|
|
||||||
device_config["file_path"] = str(file.absolute()).replace("\\", "/")
|
|
||||||
device_config["registry_type"] = "device"
|
|
||||||
logger.trace( # type: ignore
|
|
||||||
f"[UniLab Registry] Device-{current_device_number} File-{i+1}/{len(files)} Add {device_id} "
|
|
||||||
+ f"[{data[device_id].get('name', '未命名设备')}]"
|
+ f"[{data[device_id].get('name', '未命名设备')}]"
|
||||||
)
|
)
|
||||||
current_device_number += 1
|
current_device_number += 1
|
||||||
complete_data = dict(sorted(complete_data.items()))
|
|
||||||
complete_data = copy.deepcopy(complete_data)
|
# 记录无效文件
|
||||||
with open(file, "w", encoding="utf-8") as f:
|
valid_files = {r[0] for r in results}
|
||||||
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
|
for file in files:
|
||||||
self.device_type_registry.update(data)
|
if file not in valid_files:
|
||||||
else:
|
logger.debug(f"[UniLab Registry] Device File Not Valid YAML File: {file.absolute()}")
|
||||||
logger.debug(
|
|
||||||
f"[UniLab Registry] Device File-{i+1}/{len(files)} Not Valid YAML File: {file.absolute()}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def obtain_registry_device_info(self):
|
def obtain_registry_device_info(self):
|
||||||
devices = []
|
devices = []
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ class RegularContainer(Container):
|
|||||||
def get_regular_container(name="container"):
|
def get_regular_container(name="container"):
|
||||||
r = RegularContainer(name=name)
|
r = RegularContainer(name=name)
|
||||||
r.category = "container"
|
r.category = "container"
|
||||||
return RegularContainer(name=name)
|
return r
|
||||||
|
|
||||||
#
|
#
|
||||||
# class RegularContainer(object):
|
# class RegularContainer(object):
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from unilabos.config.config import BasicConfig
|
|||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
||||||
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
||||||
from unilabos.ros.nodes.resource_tracker import (
|
from unilabos.resources.resource_tracker import (
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
)
|
)
|
||||||
@@ -151,12 +151,40 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
|
|||||||
"""
|
"""
|
||||||
# 构建 id 到 uuid 的映射
|
# 构建 id 到 uuid 的映射
|
||||||
id_to_uuid: Dict[str, str] = {}
|
id_to_uuid: Dict[str, str] = {}
|
||||||
|
uuid_to_id: Dict[str, str] = {}
|
||||||
for node in resource_tree_set.all_nodes:
|
for node in resource_tree_set.all_nodes:
|
||||||
id_to_uuid[node.res_content.id] = node.res_content.uuid
|
id_to_uuid[node.res_content.id] = node.res_content.uuid
|
||||||
|
uuid_to_id[node.res_content.uuid] = node.res_content.id
|
||||||
|
|
||||||
|
# 第三遍处理:为每个 link 添加 source_uuid 和 target_uuid
|
||||||
|
for link in links:
|
||||||
|
source_id = link.get("source")
|
||||||
|
target_id = link.get("target")
|
||||||
|
|
||||||
|
# 添加 source_uuid
|
||||||
|
if source_id and source_id in id_to_uuid:
|
||||||
|
link["source_uuid"] = id_to_uuid[source_id]
|
||||||
|
|
||||||
|
# 添加 target_uuid
|
||||||
|
if target_id and target_id in id_to_uuid:
|
||||||
|
link["target_uuid"] = id_to_uuid[target_id]
|
||||||
|
|
||||||
|
source_uuid = link.get("source_uuid")
|
||||||
|
target_uuid = link.get("target_uuid")
|
||||||
|
|
||||||
|
# 添加 source_uuid
|
||||||
|
if source_uuid and source_uuid in uuid_to_id:
|
||||||
|
link["source"] = uuid_to_id[source_uuid]
|
||||||
|
|
||||||
|
# 添加 target_uuid
|
||||||
|
if target_uuid and target_uuid in uuid_to_id:
|
||||||
|
link["target"] = uuid_to_id[target_uuid]
|
||||||
|
|
||||||
# 第一遍处理:将字符串类型的port转换为字典格式
|
# 第一遍处理:将字符串类型的port转换为字典格式
|
||||||
for link in links:
|
for link in links:
|
||||||
port = link.get("port")
|
port = link.get("port")
|
||||||
|
if port is None:
|
||||||
|
continue
|
||||||
if link.get("type", "physical") == "physical":
|
if link.get("type", "physical") == "physical":
|
||||||
link["type"] = "fluid"
|
link["type"] = "fluid"
|
||||||
if isinstance(port, int):
|
if isinstance(port, int):
|
||||||
@@ -179,13 +207,15 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
|
|||||||
link["port"] = {link["source"]: None, link["target"]: None}
|
link["port"] = {link["source"]: None, link["target"]: None}
|
||||||
|
|
||||||
# 构建边字典,键为(source节点, target节点),值为对应的port信息
|
# 构建边字典,键为(source节点, target节点),值为对应的port信息
|
||||||
edges = {(link["source"], link["target"]): link["port"] for link in links}
|
edges = {(link["source"], link["target"]): link["port"] for link in links if link.get("port")}
|
||||||
|
|
||||||
# 第二遍处理:填充反向边的dest信息
|
# 第二遍处理:填充反向边的dest信息
|
||||||
delete_reverses = []
|
delete_reverses = []
|
||||||
for i, link in enumerate(links):
|
for i, link in enumerate(links):
|
||||||
s, t = link["source"], link["target"]
|
s, t = link["source"], link["target"]
|
||||||
current_port = link["port"]
|
current_port = link.get("port")
|
||||||
|
if current_port is None:
|
||||||
|
continue
|
||||||
if current_port.get(t) is None:
|
if current_port.get(t) is None:
|
||||||
reverse_key = (t, s)
|
reverse_key = (t, s)
|
||||||
reverse_port = edges.get(reverse_key)
|
reverse_port = edges.get(reverse_key)
|
||||||
@@ -200,20 +230,6 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
|
|||||||
current_port[t] = current_port[s]
|
current_port[t] = current_port[s]
|
||||||
# 删除已被使用反向端口信息的反向边
|
# 删除已被使用反向端口信息的反向边
|
||||||
standardized_links = [link for i, link in enumerate(links) if i not in delete_reverses]
|
standardized_links = [link for i, link in enumerate(links) if i not in delete_reverses]
|
||||||
|
|
||||||
# 第三遍处理:为每个 link 添加 source_uuid 和 target_uuid
|
|
||||||
for link in standardized_links:
|
|
||||||
source_id = link.get("source")
|
|
||||||
target_id = link.get("target")
|
|
||||||
|
|
||||||
# 添加 source_uuid
|
|
||||||
if source_id and source_id in id_to_uuid:
|
|
||||||
link["source_uuid"] = id_to_uuid[source_id]
|
|
||||||
|
|
||||||
# 添加 target_uuid
|
|
||||||
if target_id and target_id in id_to_uuid:
|
|
||||||
link["target_uuid"] = id_to_uuid[target_id]
|
|
||||||
|
|
||||||
return standardized_links
|
return standardized_links
|
||||||
|
|
||||||
|
|
||||||
@@ -260,7 +276,7 @@ def read_node_link_json(
|
|||||||
resource_tree_set = canonicalize_nodes_data(nodes)
|
resource_tree_set = canonicalize_nodes_data(nodes)
|
||||||
|
|
||||||
# 标准化边数据
|
# 标准化边数据
|
||||||
links = data.get("links", [])
|
links = data.get("links", data.get("edges", []))
|
||||||
standardized_links = canonicalize_links_ports(links, resource_tree_set)
|
standardized_links = canonicalize_links_ports(links, resource_tree_set)
|
||||||
|
|
||||||
# 构建 NetworkX 图(需要转换回 dict 格式)
|
# 构建 NetworkX 图(需要转换回 dict 格式)
|
||||||
@@ -284,6 +300,8 @@ def modify_to_backend_format(data: list[dict[str, Any]]) -> list[dict[str, Any]]
|
|||||||
edge["sourceHandle"] = port[source]
|
edge["sourceHandle"] = port[source]
|
||||||
elif "source_port" in edge:
|
elif "source_port" in edge:
|
||||||
edge["sourceHandle"] = edge.pop("source_port")
|
edge["sourceHandle"] = edge.pop("source_port")
|
||||||
|
elif "source_handle" in edge:
|
||||||
|
edge["sourceHandle"] = edge.pop("source_handle")
|
||||||
else:
|
else:
|
||||||
typ = edge.get("type")
|
typ = edge.get("type")
|
||||||
if typ == "communication":
|
if typ == "communication":
|
||||||
@@ -292,6 +310,8 @@ def modify_to_backend_format(data: list[dict[str, Any]]) -> list[dict[str, Any]]
|
|||||||
edge["targetHandle"] = port[target]
|
edge["targetHandle"] = port[target]
|
||||||
elif "target_port" in edge:
|
elif "target_port" in edge:
|
||||||
edge["targetHandle"] = edge.pop("target_port")
|
edge["targetHandle"] = edge.pop("target_port")
|
||||||
|
elif "target_handle" in edge:
|
||||||
|
edge["targetHandle"] = edge.pop("target_handle")
|
||||||
else:
|
else:
|
||||||
typ = edge.get("type")
|
typ = edge.get("type")
|
||||||
if typ == "communication":
|
if typ == "communication":
|
||||||
@@ -597,6 +617,8 @@ def resource_plr_to_ulab(resource_plr: "ResourcePLR", parent_name: str = None, w
|
|||||||
"tube": "tube",
|
"tube": "tube",
|
||||||
"bottle_carrier": "bottle_carrier",
|
"bottle_carrier": "bottle_carrier",
|
||||||
"plate_adapter": "plate_adapter",
|
"plate_adapter": "plate_adapter",
|
||||||
|
"electrode_sheet": "electrode_sheet",
|
||||||
|
"material_hole": "material_hole",
|
||||||
}
|
}
|
||||||
if source in replace_info:
|
if source in replace_info:
|
||||||
return replace_info[source]
|
return replace_info[source]
|
||||||
@@ -1151,11 +1173,7 @@ def initialize_resource(resource_config: dict, resource_type: Any = None) -> Uni
|
|||||||
if resource_class_config["type"] == "pylabrobot":
|
if resource_class_config["type"] == "pylabrobot":
|
||||||
resource_plr = RESOURCE(name=resource_config["name"])
|
resource_plr = RESOURCE(name=resource_config["name"])
|
||||||
if resource_type != ResourcePLR:
|
if resource_type != ResourcePLR:
|
||||||
tree_sets = ResourceTreeSet.from_plr_resources([resource_plr])
|
tree_sets = ResourceTreeSet.from_plr_resources([resource_plr], known_newly_created=True)
|
||||||
# r = resource_plr_to_ulab(resource_plr=resource_plr, parent_name=resource_config.get("parent", None))
|
|
||||||
# # r = resource_plr_to_ulab(resource_plr=resource_plr)
|
|
||||||
# if resource_config.get("position") is not None:
|
|
||||||
# r["position"] = resource_config["position"]
|
|
||||||
r = tree_sets.dump()
|
r = tree_sets.dump()
|
||||||
else:
|
else:
|
||||||
r = resource_plr
|
r = resource_plr
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
category: Optional[str] = "carrier",
|
category: Optional[str] = "carrier",
|
||||||
model: Optional[str] = None,
|
model: Optional[str] = None,
|
||||||
invisible_slots: Optional[str] = None,
|
invisible_slots: Optional[str] = None,
|
||||||
|
content_type: Optional[List[str]] = ["bottle", "container", "tube", "bottle_carrier", "tip_rack"],
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
name=name,
|
name=name,
|
||||||
@@ -92,6 +93,7 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
self.num_items_x, self.num_items_y, self.num_items_z = num_items_x, num_items_y, num_items_z
|
self.num_items_x, self.num_items_y, self.num_items_z = num_items_x, num_items_y, num_items_z
|
||||||
self.invisible_slots = [] if invisible_slots is None else invisible_slots
|
self.invisible_slots = [] if invisible_slots is None else invisible_slots
|
||||||
self.layout = "z-y" if self.num_items_z > 1 and self.num_items_x == 1 else "x-z" if self.num_items_z > 1 and self.num_items_y == 1 else "x-y"
|
self.layout = "z-y" if self.num_items_z > 1 and self.num_items_x == 1 else "x-z" if self.num_items_z > 1 and self.num_items_y == 1 else "x-y"
|
||||||
|
self.content_type = content_type
|
||||||
|
|
||||||
if isinstance(sites, dict):
|
if isinstance(sites, dict):
|
||||||
sites = sites or {}
|
sites = sites or {}
|
||||||
@@ -149,6 +151,7 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
|
|
||||||
if not reassign and self.sites[idx] is not None:
|
if not reassign and self.sites[idx] is not None:
|
||||||
raise ValueError(f"a site with index {idx} already exists")
|
raise ValueError(f"a site with index {idx} already exists")
|
||||||
|
location = list(self.child_locations.values())[idx]
|
||||||
super().assign_child_resource(resource, location=location, reassign=reassign)
|
super().assign_child_resource(resource, location=location, reassign=reassign)
|
||||||
self.sites[idx] = resource
|
self.sites[idx] = resource
|
||||||
|
|
||||||
@@ -418,7 +421,7 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
self[identifier] if isinstance(self[identifier], str) else None,
|
self[identifier] if isinstance(self[identifier], str) else None,
|
||||||
"position": {"x": location.x, "y": location.y, "z": location.z},
|
"position": {"x": location.x, "y": location.y, "z": location.z},
|
||||||
"size": self.child_size[identifier],
|
"size": self.child_size[identifier],
|
||||||
"content_type": ["bottle", "container", "tube", "bottle_carrier", "tip_rack"]
|
"content_type": self.content_type
|
||||||
} for identifier, location in self.child_locations.items()]
|
} for identifier, location in self.child_locations.items()]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from pydantic import BaseModel, field_serializer, field_validator
|
from pydantic import BaseModel, field_serializer, field_validator, ValidationError
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union
|
from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
from unilabos.resources.plr_additional_res_reg import register
|
from unilabos.resources.plr_additional_res_reg import register
|
||||||
from unilabos.utils.log import logger
|
from unilabos.utils.log import logger
|
||||||
|
|
||||||
@@ -13,10 +15,33 @@ if TYPE_CHECKING:
|
|||||||
from pylabrobot.resources import Resource as PLRResource
|
from pylabrobot.resources import Resource as PLRResource
|
||||||
|
|
||||||
|
|
||||||
|
EXTRA_CLASS = "unilabos_resource_class"
|
||||||
|
EXTRA_SAMPLE_UUID = "sample_uuid"
|
||||||
|
EXTRA_UNILABOS_SAMPLE_UUID = "unilabos_sample_uuid"
|
||||||
|
|
||||||
|
# 函数参数名常量 - 用于自动注入 sample_uuids 列表
|
||||||
|
PARAM_SAMPLE_UUIDS = "sample_uuids"
|
||||||
|
|
||||||
|
# JSON Command 中的系统参数字段名
|
||||||
|
JSON_UNILABOS_PARAM = "unilabos_param"
|
||||||
|
|
||||||
|
# 返回值中的 samples 字段名
|
||||||
|
RETURN_UNILABOS_SAMPLES = "unilabos_samples"
|
||||||
|
|
||||||
|
# sample_uuids 参数类型 (用于 virtual bench 等设备添加 sample_uuids 参数)
|
||||||
|
SampleUUIDsType = Dict[str, Optional["PLRResource"]]
|
||||||
|
|
||||||
|
|
||||||
|
class LabSample(TypedDict):
|
||||||
|
sample_uuid: str
|
||||||
|
oss_path: str
|
||||||
|
extra: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionSize(BaseModel):
|
class ResourceDictPositionSize(BaseModel):
|
||||||
depth: float = Field(description="Depth", default=0.0)
|
depth: float = Field(description="Depth", default=0.0) # z
|
||||||
width: float = Field(description="Width", default=0.0)
|
width: float = Field(description="Width", default=0.0) # x
|
||||||
height: float = Field(description="Height", default=0.0)
|
height: float = Field(description="Height", default=0.0) # y
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionScale(BaseModel):
|
class ResourceDictPositionScale(BaseModel):
|
||||||
@@ -147,20 +172,24 @@ class ResourceDictInstance(object):
|
|||||||
if not content.get("extra"): # MagicCode
|
if not content.get("extra"): # MagicCode
|
||||||
content["extra"] = {}
|
content["extra"] = {}
|
||||||
if "position" in content:
|
if "position" in content:
|
||||||
pose = content.get("pose",{})
|
pose = content.get("pose", {})
|
||||||
if "position" not in pose :
|
if "position" not in pose:
|
||||||
if "position" in content["position"]:
|
if "position" in content["position"]:
|
||||||
pose["position"] = content["position"]["position"]
|
pose["position"] = content["position"]["position"]
|
||||||
else:
|
else:
|
||||||
pose["position"] = {"x": 0, "y": 0, "z": 0}
|
pose["position"] = {"x": 0, "y": 0, "z": 0}
|
||||||
if "size" not in pose:
|
if "size" not in pose:
|
||||||
pose["size"] = {
|
pose["size"] = {
|
||||||
"width": content["config"].get("size_x", 0),
|
"width": content["config"].get("size_x", 0),
|
||||||
"height": content["config"].get("size_y", 0),
|
"height": content["config"].get("size_y", 0),
|
||||||
"depth": content["config"].get("size_z", 0)
|
"depth": content["config"].get("size_z", 0),
|
||||||
}
|
}
|
||||||
content["pose"] = pose
|
content["pose"] = pose
|
||||||
return ResourceDictInstance(ResourceDict.model_validate(content))
|
try:
|
||||||
|
res_dict = ResourceDict.model_validate(content)
|
||||||
|
return ResourceDictInstance(res_dict)
|
||||||
|
except ValidationError as err:
|
||||||
|
raise err
|
||||||
|
|
||||||
def get_plr_nested_dict(self) -> Dict[str, Any]:
|
def get_plr_nested_dict(self) -> Dict[str, Any]:
|
||||||
"""获取资源实例的嵌套字典表示"""
|
"""获取资源实例的嵌套字典表示"""
|
||||||
@@ -322,7 +351,7 @@ class ResourceTreeSet(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_plr_resources(cls, resources: List["PLRResource"]) -> "ResourceTreeSet":
|
def from_plr_resources(cls, resources: List["PLRResource"], known_newly_created=False) -> "ResourceTreeSet":
|
||||||
"""
|
"""
|
||||||
从plr资源创建ResourceTreeSet
|
从plr资源创建ResourceTreeSet
|
||||||
"""
|
"""
|
||||||
@@ -339,6 +368,8 @@ class ResourceTreeSet(object):
|
|||||||
}
|
}
|
||||||
if source in replace_info:
|
if source in replace_info:
|
||||||
return replace_info[source]
|
return replace_info[source]
|
||||||
|
elif source is None:
|
||||||
|
return ""
|
||||||
else:
|
else:
|
||||||
print("转换pylabrobot的时候,出现未知类型", source)
|
print("转换pylabrobot的时候,出现未知类型", source)
|
||||||
return source
|
return source
|
||||||
@@ -349,7 +380,8 @@ class ResourceTreeSet(object):
|
|||||||
if not uid:
|
if not uid:
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
res.unilabos_uuid = uid
|
res.unilabos_uuid = uid
|
||||||
logger.warning(f"{res}没有uuid,请设置后再传入,默认填充{uid}!\n{traceback.format_exc()}")
|
if not known_newly_created:
|
||||||
|
logger.warning(f"{res}没有uuid,请设置后再传入,默认填充{uid}!\n{traceback.format_exc()}")
|
||||||
|
|
||||||
# 获取unilabos_extra,默认为空字典
|
# 获取unilabos_extra,默认为空字典
|
||||||
extra = getattr(res, "unilabos_extra", {})
|
extra = getattr(res, "unilabos_extra", {})
|
||||||
@@ -386,7 +418,7 @@ class ResourceTreeSet(object):
|
|||||||
"parent": parent_resource, # 直接传入 ResourceDict 对象
|
"parent": parent_resource, # 直接传入 ResourceDict 对象
|
||||||
"parent_uuid": parent_uuid, # 使用 parent_uuid 而不是 parent 对象
|
"parent_uuid": parent_uuid, # 使用 parent_uuid 而不是 parent 对象
|
||||||
"type": replace_plr_type(d.get("category", "")),
|
"type": replace_plr_type(d.get("category", "")),
|
||||||
"class": d.get("class", ""),
|
"class": extra.get(EXTRA_CLASS, ""),
|
||||||
"position": pos,
|
"position": pos,
|
||||||
"pose": pos,
|
"pose": pos,
|
||||||
"config": {
|
"config": {
|
||||||
@@ -436,7 +468,7 @@ class ResourceTreeSet(object):
|
|||||||
trees.append(tree_instance)
|
trees.append(tree_instance)
|
||||||
return cls(trees)
|
return cls(trees)
|
||||||
|
|
||||||
def to_plr_resources(self) -> List["PLRResource"]:
|
def to_plr_resources(self, skip_devices=True) -> List["PLRResource"]:
|
||||||
"""
|
"""
|
||||||
将 ResourceTreeSet 转换为 PLR 资源列表
|
将 ResourceTreeSet 转换为 PLR 资源列表
|
||||||
|
|
||||||
@@ -448,13 +480,20 @@ class ResourceTreeSet(object):
|
|||||||
from pylabrobot.utils.object_parsing import find_subclass
|
from pylabrobot.utils.object_parsing import find_subclass
|
||||||
|
|
||||||
# 类型映射
|
# 类型映射
|
||||||
TYPE_MAP = {"plate": "Plate", "well": "Well", "deck": "Deck", "container": "RegularContainer", "tip_spot": "TipSpot"}
|
TYPE_MAP = {
|
||||||
|
"plate": "Plate",
|
||||||
|
"well": "Well",
|
||||||
|
"deck": "Deck",
|
||||||
|
"container": "RegularContainer",
|
||||||
|
"tip_spot": "TipSpot",
|
||||||
|
}
|
||||||
|
|
||||||
def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict):
|
def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict):
|
||||||
"""一次遍历收集 name_to_uuid, all_states 和 name_to_extra"""
|
"""一次遍历收集 name_to_uuid, all_states 和 name_to_extra"""
|
||||||
name_to_uuid[node.res_content.name] = node.res_content.uuid
|
name_to_uuid[node.res_content.name] = node.res_content.uuid
|
||||||
all_states[node.res_content.name] = node.res_content.data
|
all_states[node.res_content.name] = node.res_content.data
|
||||||
name_to_extra[node.res_content.name] = node.res_content.extra
|
name_to_extra[node.res_content.name] = node.res_content.extra
|
||||||
|
name_to_extra[node.res_content.name][EXTRA_CLASS] = node.res_content.klass
|
||||||
for child in node.children:
|
for child in node.children:
|
||||||
collect_node_data(child, name_to_uuid, all_states, name_to_extra)
|
collect_node_data(child, name_to_uuid, all_states, name_to_extra)
|
||||||
|
|
||||||
@@ -469,9 +508,9 @@ class ResourceTreeSet(object):
|
|||||||
**res.config,
|
**res.config,
|
||||||
"name": res.name,
|
"name": res.name,
|
||||||
"type": res.config.get("type", plr_type),
|
"type": res.config.get("type", plr_type),
|
||||||
"size_x": res.config.get("size_x", 0),
|
"size_x": res.pose.size.width,
|
||||||
"size_y": res.config.get("size_y", 0),
|
"size_y": res.pose.size.height,
|
||||||
"size_z": res.config.get("size_z", 0),
|
"size_z": res.pose.size.depth,
|
||||||
"location": {
|
"location": {
|
||||||
"x": res.pose.position.x,
|
"x": res.pose.position.x,
|
||||||
"y": res.pose.position.y,
|
"y": res.pose.position.y,
|
||||||
@@ -499,7 +538,10 @@ class ResourceTreeSet(object):
|
|||||||
plr_dict = node_to_plr_dict(tree.root_node, has_model)
|
plr_dict = node_to_plr_dict(tree.root_node, has_model)
|
||||||
try:
|
try:
|
||||||
sub_cls = find_subclass(plr_dict["type"], PLRResource)
|
sub_cls = find_subclass(plr_dict["type"], PLRResource)
|
||||||
if sub_cls is None:
|
if skip_devices and plr_dict["type"] == "device":
|
||||||
|
logger.info(f"跳过更新 {plr_dict['name']} 设备是class")
|
||||||
|
continue
|
||||||
|
elif sub_cls is None:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"无法找到类型 {plr_dict['type']} 对应的 PLR 资源类。原始信息:{tree.root_node.res_content}"
|
f"无法找到类型 {plr_dict['type']} 对应的 PLR 资源类。原始信息:{tree.root_node.res_content}"
|
||||||
)
|
)
|
||||||
@@ -507,6 +549,11 @@ class ResourceTreeSet(object):
|
|||||||
if "category" not in spec.parameters:
|
if "category" not in spec.parameters:
|
||||||
plr_dict.pop("category", None)
|
plr_dict.pop("category", None)
|
||||||
plr_resource = sub_cls.deserialize(plr_dict, allow_marshal=True)
|
plr_resource = sub_cls.deserialize(plr_dict, allow_marshal=True)
|
||||||
|
from pylabrobot.resources import Coordinate
|
||||||
|
from pylabrobot.serializer import deserialize
|
||||||
|
|
||||||
|
location = cast(Coordinate, deserialize(plr_dict["location"]))
|
||||||
|
plr_resource.location = location
|
||||||
plr_resource.load_all_state(all_states)
|
plr_resource.load_all_state(all_states)
|
||||||
# 使用 DeviceNodeResourceTracker 设置 UUID 和 Extra
|
# 使用 DeviceNodeResourceTracker 设置 UUID 和 Extra
|
||||||
tracker.loop_set_uuid(plr_resource, name_to_uuid)
|
tracker.loop_set_uuid(plr_resource, name_to_uuid)
|
||||||
@@ -918,6 +965,33 @@ class DeviceNodeResourceTracker(object):
|
|||||||
|
|
||||||
return self._traverse_and_process(resource, process)
|
return self._traverse_and_process(resource, process)
|
||||||
|
|
||||||
|
def loop_find_with_uuid(self, resource, target_uuid: str):
|
||||||
|
"""
|
||||||
|
递归遍历资源树,根据 uuid 查找并返回对应的资源
|
||||||
|
|
||||||
|
Args:
|
||||||
|
resource: 资源对象(可以是list、dict或实例)
|
||||||
|
target_uuid: 要查找的uuid
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
找到的资源对象,未找到则返回None
|
||||||
|
"""
|
||||||
|
found_resource = None
|
||||||
|
|
||||||
|
def process(res):
|
||||||
|
nonlocal found_resource
|
||||||
|
if found_resource is not None:
|
||||||
|
return 0 # 已找到,跳过后续处理
|
||||||
|
current_uuid = self._get_resource_attr(res, "uuid", "unilabos_uuid")
|
||||||
|
if current_uuid and current_uuid == target_uuid:
|
||||||
|
found_resource = res
|
||||||
|
logger.trace(f"找到资源UUID: {target_uuid}")
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self._traverse_and_process(resource, process)
|
||||||
|
return found_resource
|
||||||
|
|
||||||
def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int:
|
def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int:
|
||||||
"""
|
"""
|
||||||
递归遍历资源树,根据 name 设置所有节点的 extra
|
递归遍历资源树,根据 name 设置所有节点的 extra
|
||||||
@@ -936,7 +1010,7 @@ class DeviceNodeResourceTracker(object):
|
|||||||
extra = name_to_extra_map[resource_name]
|
extra = name_to_extra_map[resource_name]
|
||||||
self.set_resource_extra(res, extra)
|
self.set_resource_extra(res, extra)
|
||||||
if len(extra):
|
if len(extra):
|
||||||
logger.debug(f"设置资源Extra: {resource_name} -> {extra}")
|
logger.trace(f"设置资源Extra: {resource_name} -> {extra}")
|
||||||
return 1
|
return 1
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
@@ -1103,7 +1177,7 @@ class DeviceNodeResourceTracker(object):
|
|||||||
for key in keys_to_remove:
|
for key in keys_to_remove:
|
||||||
self.resource2parent_resource.pop(key, None)
|
self.resource2parent_resource.pop(key, None)
|
||||||
|
|
||||||
logger.debug(f"成功移除资源: {resource}")
|
logger.trace(f"[ResourceTracker] 成功移除资源: {resource}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clear_resource(self):
|
def clear_resource(self):
|
||||||
@@ -5,7 +5,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
get_action_type,
|
get_action_type,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
from unilabos.resources.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import copy
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
from unilabos.ros.device_node_wrapper import ros2_device_node
|
from unilabos.ros.device_node_wrapper import ros2_device_node
|
||||||
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
from unilabos.resources.resource_tracker import ResourceDictInstance
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.exception import DeviceClassInvalid
|
from unilabos.utils.exception import DeviceClassInvalid
|
||||||
from unilabos.utils.import_manager import default_manager
|
from unilabos.utils.import_manager import default_manager
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# from nt import device_encoding
|
# from nt import device_encoding
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
@@ -10,7 +11,7 @@ from unilabos_msgs.srv._serial_command import SerialCommand_Response
|
|||||||
|
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
||||||
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
||||||
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
||||||
from unilabos_msgs.srv import SerialCommand # type: ignore
|
from unilabos_msgs.srv import SerialCommand # type: ignore
|
||||||
from rclpy.executors import MultiThreadedExecutor
|
from rclpy.executors import MultiThreadedExecutor
|
||||||
@@ -55,7 +56,11 @@ def main(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""主函数"""
|
"""主函数"""
|
||||||
|
|
||||||
rclpy.init(args=rclpy_init_args)
|
# Support restart - check if rclpy is already initialized
|
||||||
|
if not rclpy.ok():
|
||||||
|
rclpy.init(args=rclpy_init_args)
|
||||||
|
else:
|
||||||
|
logger.info("[ROS] rclpy already initialized, reusing context")
|
||||||
executor = rclpy.__executor = MultiThreadedExecutor()
|
executor = rclpy.__executor = MultiThreadedExecutor()
|
||||||
# 创建主机节点
|
# 创建主机节点
|
||||||
host_node = HostNode(
|
host_node = HostNode(
|
||||||
@@ -88,7 +93,7 @@ def main(
|
|||||||
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
||||||
# lh_joint_pub = LiquidHandlerJointPublisher(
|
# lh_joint_pub = LiquidHandlerJointPublisher(
|
||||||
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
||||||
# )
|
# )
|
||||||
executor.add_node(resource_mesh_manager)
|
executor.add_node(resource_mesh_manager)
|
||||||
executor.add_node(joint_republisher)
|
executor.add_node(joint_republisher)
|
||||||
# executor.add_node(lh_joint_pub)
|
# executor.add_node(lh_joint_pub)
|
||||||
|
|||||||
@@ -159,10 +159,14 @@ _msg_converter: Dict[Type, Any] = {
|
|||||||
else Pose()
|
else Pose()
|
||||||
),
|
),
|
||||||
config=json.dumps(x.get("config", {})),
|
config=json.dumps(x.get("config", {})),
|
||||||
data=json.dumps(x.get("data", {})),
|
data=json.dumps(obtain_data_with_uuid(x)),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def obtain_data_with_uuid(x: dict):
|
||||||
|
data = x.get("data", {})
|
||||||
|
data["unilabos_uuid"] = x.get("uuid", None)
|
||||||
|
return data
|
||||||
|
|
||||||
def json_or_yaml_loads(data: str) -> Any:
|
def json_or_yaml_loads(data: str) -> Any:
|
||||||
try:
|
try:
|
||||||
@@ -357,7 +361,14 @@ def convert_to_ros_msg(ros_msg_type: Union[Type, Any], obj: Any) -> Any:
|
|||||||
if hasattr(ros_msg, key):
|
if hasattr(ros_msg, key):
|
||||||
attr = getattr(ros_msg, key)
|
attr = getattr(ros_msg, key)
|
||||||
if isinstance(attr, (float, int, str, bool)):
|
if isinstance(attr, (float, int, str, bool)):
|
||||||
setattr(ros_msg, key, type(attr)(value))
|
# 处理list类型的值,取第一个元素或抛出错误
|
||||||
|
if isinstance(value, list):
|
||||||
|
if len(value) > 0:
|
||||||
|
setattr(ros_msg, key, type(attr)(value[0]))
|
||||||
|
else:
|
||||||
|
setattr(ros_msg, key, type(attr)()) # 使用默认值
|
||||||
|
else:
|
||||||
|
setattr(ros_msg, key, type(attr)(value))
|
||||||
elif isinstance(attr, (list, tuple)) and isinstance(value, Iterable):
|
elif isinstance(attr, (list, tuple)) and isinstance(value, Iterable):
|
||||||
td = ros_msg.SLOT_TYPES[ind].value_type
|
td = ros_msg.SLOT_TYPES[ind].value_type
|
||||||
if isinstance(td, NamespacedType):
|
if isinstance(td, NamespacedType):
|
||||||
@@ -370,9 +381,35 @@ def convert_to_ros_msg(ros_msg_type: Union[Type, Any], obj: Any) -> Any:
|
|||||||
setattr(ros_msg, key, []) # FIXME
|
setattr(ros_msg, key, []) # FIXME
|
||||||
elif "array.array" in str(type(attr)):
|
elif "array.array" in str(type(attr)):
|
||||||
if attr.typecode == "f" or attr.typecode == "d":
|
if attr.typecode == "f" or attr.typecode == "d":
|
||||||
|
# 如果是单个值,转换为列表
|
||||||
|
if value is None:
|
||||||
|
value = []
|
||||||
|
elif not isinstance(value, Iterable) or isinstance(value, (str, bytes)):
|
||||||
|
value = [value]
|
||||||
setattr(ros_msg, key, [float(i) for i in value])
|
setattr(ros_msg, key, [float(i) for i in value])
|
||||||
else:
|
else:
|
||||||
setattr(ros_msg, key, value)
|
# 对于整数数组,需要确保是序列且每个值在有效范围内
|
||||||
|
if value is None:
|
||||||
|
value = []
|
||||||
|
elif not isinstance(value, Iterable) or isinstance(value, (str, bytes)):
|
||||||
|
# 如果是单个值,转换为列表
|
||||||
|
value = [value]
|
||||||
|
# 确保每个整数值在有效范围内(-2147483648 到 2147483647)
|
||||||
|
converted_value = []
|
||||||
|
for i in value:
|
||||||
|
if i is None:
|
||||||
|
continue # 跳过 None 值
|
||||||
|
if isinstance(i, (int, float)):
|
||||||
|
int_val = int(i)
|
||||||
|
# 确保在 int32 范围内
|
||||||
|
if int_val < -2147483648:
|
||||||
|
int_val = -2147483648
|
||||||
|
elif int_val > 2147483647:
|
||||||
|
int_val = 2147483647
|
||||||
|
converted_value.append(int_val)
|
||||||
|
else:
|
||||||
|
converted_value.append(i)
|
||||||
|
setattr(ros_msg, key, converted_value)
|
||||||
else:
|
else:
|
||||||
nested_ros_msg = convert_to_ros_msg(type(attr)(), value)
|
nested_ros_msg = convert_to_ros_msg(type(attr)(), value)
|
||||||
setattr(ros_msg, key, nested_ros_msg)
|
setattr(ros_msg, key, nested_ros_msg)
|
||||||
|
|||||||
@@ -1,19 +1,29 @@
|
|||||||
import copy
|
|
||||||
import inspect
|
import inspect
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from typing import get_type_hints, TypeVar, Generic, Dict, Any, Type, TypedDict, Optional, List, TYPE_CHECKING, Union, \
|
from typing import (
|
||||||
Tuple
|
get_type_hints,
|
||||||
|
TypeVar,
|
||||||
|
Generic,
|
||||||
|
Dict,
|
||||||
|
Any,
|
||||||
|
Type,
|
||||||
|
TypedDict,
|
||||||
|
Optional,
|
||||||
|
List,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Union,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import rclpy
|
import rclpy
|
||||||
import yaml
|
import yaml
|
||||||
from msgcenterpy import ROS2MessageInstance
|
|
||||||
from rclpy.node import Node
|
from rclpy.node import Node
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
@@ -22,15 +32,13 @@ from rclpy.callback_groups import ReentrantCallbackGroup
|
|||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
from unilabos_msgs.action import SendCmd
|
from unilabos_msgs.action import SendCmd
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
|
from unilabos.config.config import BasicConfig
|
||||||
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
||||||
|
|
||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.graphio import (
|
from unilabos.resources.graphio import (
|
||||||
resource_ulab_to_plr,
|
|
||||||
initialize_resources,
|
initialize_resources,
|
||||||
dict_to_tree,
|
|
||||||
resource_plr_to_ulab,
|
|
||||||
tree_to_list,
|
|
||||||
)
|
)
|
||||||
from unilabos.resources.plr_additional_res_reg import register
|
from unilabos.resources.plr_additional_res_reg import register
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
@@ -47,13 +55,15 @@ from unilabos_msgs.srv import (
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
|
|
||||||
from unilabos.ros.nodes.resource_tracker import (
|
from unilabos.resources.resource_tracker import (
|
||||||
DeviceNodeResourceTracker,
|
DeviceNodeResourceTracker,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
ResourceTreeInstance,
|
ResourceTreeInstance,
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
|
EXTRA_SAMPLE_UUID,
|
||||||
|
PARAM_SAMPLE_UUIDS,
|
||||||
|
JSON_UNILABOS_PARAM,
|
||||||
)
|
)
|
||||||
from unilabos.ros.x.rclpyx import get_event_loop
|
|
||||||
from unilabos.ros.utils.driver_creator import WorkstationNodeCreator, PyLabRobotCreator, DeviceClassCreator
|
from unilabos.ros.utils.driver_creator import WorkstationNodeCreator, PyLabRobotCreator, DeviceClassCreator
|
||||||
from rclpy.task import Task, Future
|
from rclpy.task import Task, Future
|
||||||
from unilabos.utils.import_manager import default_manager
|
from unilabos.utils.import_manager import default_manager
|
||||||
@@ -189,7 +199,7 @@ class PropertyPublisher:
|
|||||||
f"创建发布者 {name} 失败,可能由于注册表有误,类型: {msg_type},错误: {ex}\n{traceback.format_exc()}"
|
f"创建发布者 {name} 失败,可能由于注册表有误,类型: {msg_type},错误: {ex}\n{traceback.format_exc()}"
|
||||||
)
|
)
|
||||||
self.timer = node.create_timer(self.timer_period, self.publish_property)
|
self.timer = node.create_timer(self.timer_period, self.publish_property)
|
||||||
self.__loop = get_event_loop()
|
self.__loop = ROS2DeviceNode.get_asyncio_loop()
|
||||||
str_msg_type = str(msg_type)[8:-2]
|
str_msg_type = str(msg_type)[8:-2]
|
||||||
self.node.lab_logger().trace(f"发布属性: {name}, 类型: {str_msg_type}, 周期: {initial_period}秒, QoS: {qos}")
|
self.node.lab_logger().trace(f"发布属性: {name}, 类型: {str_msg_type}, 周期: {initial_period}秒, QoS: {qos}")
|
||||||
|
|
||||||
@@ -221,14 +231,15 @@ class PropertyPublisher:
|
|||||||
|
|
||||||
def publish_property(self):
|
def publish_property(self):
|
||||||
try:
|
try:
|
||||||
self.node.lab_logger().trace(f"【.publish_property】开始发布属性: {self.name}")
|
# self.node.lab_logger().trace(f"【.publish_property】开始发布属性: {self.name}")
|
||||||
value = self.get_property()
|
value = self.get_property()
|
||||||
if self.print_publish:
|
if self.print_publish:
|
||||||
self.node.lab_logger().trace(f"【.publish_property】发布 {self.msg_type}: {value}")
|
pass
|
||||||
|
# self.node.lab_logger().trace(f"【.publish_property】发布 {self.msg_type}: {value}")
|
||||||
if value is not None:
|
if value is not None:
|
||||||
msg = convert_to_ros_msg(self.msg_type, value)
|
msg = convert_to_ros_msg(self.msg_type, value)
|
||||||
self.publisher_.publish(msg)
|
self.publisher_.publish(msg)
|
||||||
self.node.lab_logger().trace(f"【.publish_property】属性 {self.name} 发布成功")
|
# self.node.lab_logger().trace(f"【.publish_property】属性 {self.name} 发布成功")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.node.lab_logger().error(
|
self.node.lab_logger().error(
|
||||||
f"【.publish_property】发布属性 {self.publisher_.topic} 出错: {str(e)}\n{traceback.format_exc()}"
|
f"【.publish_property】发布属性 {self.publisher_.topic} 出错: {str(e)}\n{traceback.format_exc()}"
|
||||||
@@ -363,10 +374,10 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
||||||
from pylabrobot.resources.resource import Resource as ResourcePLR
|
|
||||||
from pylabrobot.resources.deck import Deck
|
from pylabrobot.resources.deck import Deck
|
||||||
from pylabrobot.resources import Coordinate
|
from pylabrobot.resources import Coordinate
|
||||||
from pylabrobot.resources import Plate
|
from pylabrobot.resources import Plate
|
||||||
|
|
||||||
# 物料传输到对应的node节点
|
# 物料传输到对应的node节点
|
||||||
client = self._resource_clients["c2s_update_resource_tree"]
|
client = self._resource_clients["c2s_update_resource_tree"]
|
||||||
request = SerialCommand.Request()
|
request = SerialCommand.Request()
|
||||||
@@ -394,30 +405,27 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
rts: ResourceTreeSet = ResourceTreeSet.from_raw_dict_list(input_resources)
|
rts: ResourceTreeSet = ResourceTreeSet.from_raw_dict_list(input_resources)
|
||||||
parent_resource = None
|
parent_resource = None
|
||||||
if bind_parent_id != self.node_name:
|
if bind_parent_id != self.node_name:
|
||||||
parent_resource = self.resource_tracker.figure_resource(
|
parent_resource = self.resource_tracker.figure_resource({"name": bind_parent_id})
|
||||||
{"name": bind_parent_id}
|
for r in rts.root_nodes:
|
||||||
)
|
# noinspection PyUnresolvedReferences
|
||||||
for r in rts.root_nodes:
|
r.res_content.parent_uuid = parent_resource.unilabos_uuid
|
||||||
# noinspection PyUnresolvedReferences
|
else:
|
||||||
r.res_content.parent_uuid = parent_resource.unilabos_uuid
|
for r in rts.root_nodes:
|
||||||
|
r.res_content.parent_uuid = self.uuid
|
||||||
if len(LIQUID_INPUT_SLOT) and LIQUID_INPUT_SLOT[0] == -1 and len(rts.root_nodes) == 1 and isinstance(rts.root_nodes[0], RegularContainer):
|
rts_plr_instances = rts.to_plr_resources()
|
||||||
|
if len(rts.root_nodes) == 1 and isinstance(rts_plr_instances[0], RegularContainer):
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
container_instance: RegularContainer = rts.root_nodes[0]
|
container_instance: RegularContainer = rts_plr_instances[0]
|
||||||
found_resources = self.resource_tracker.figure_resource(
|
found_resources = self.resource_tracker.figure_resource({"name": container_instance.name}, try_mode=True)
|
||||||
{"id": container_instance.name}, try_mode=True
|
|
||||||
)
|
|
||||||
if not len(found_resources):
|
if not len(found_resources):
|
||||||
self.resource_tracker.add_resource(container_instance)
|
self.resource_tracker.add_resource(container_instance)
|
||||||
logger.info(f"添加物料{container_instance.name}到资源跟踪器")
|
logger.info(f"添加物料{container_instance.name}到资源跟踪器")
|
||||||
else:
|
else:
|
||||||
assert (
|
assert len(found_resources) == 1, f"找到多个同名物料: {container_instance.name}, 请检查物料系统"
|
||||||
len(found_resources) == 1
|
|
||||||
), f"找到多个同名物料: {container_instance.name}, 请检查物料系统"
|
|
||||||
found_resource = found_resources[0]
|
found_resource = found_resources[0]
|
||||||
if isinstance(found_resource, RegularContainer):
|
if isinstance(found_resource, RegularContainer):
|
||||||
logger.info(f"更新物料{container_instance.name}的数据{found_resource.state}")
|
logger.info(f"更新物料{container_instance.name}的数据{found_resource.state}")
|
||||||
found_resource.state.update(json.loads(container_instance.state))
|
found_resource.state.update(container_instance.state)
|
||||||
elif isinstance(found_resource, dict):
|
elif isinstance(found_resource, dict):
|
||||||
raise ValueError("已不支持 字典 版本的RegularContainer")
|
raise ValueError("已不支持 字典 版本的RegularContainer")
|
||||||
else:
|
else:
|
||||||
@@ -425,21 +433,26 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
f"更新物料{container_instance.name}出现不支持的数据类型{type(found_resource)} {found_resource}"
|
f"更新物料{container_instance.name}出现不支持的数据类型{type(found_resource)} {found_resource}"
|
||||||
)
|
)
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
request.command = json.dumps({
|
request.command = json.dumps(
|
||||||
"action": "add",
|
{
|
||||||
"data": {
|
"action": "add",
|
||||||
"data": rts.dump(),
|
"data": {
|
||||||
"mount_uuid": parent_resource.unilabos_uuid if parent_resource is not None else "",
|
"data": rts.dump(),
|
||||||
"first_add": True,
|
"mount_uuid": parent_resource.unilabos_uuid if parent_resource is not None else self.uuid,
|
||||||
},
|
"first_add": False,
|
||||||
})
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
tree_response: SerialCommand.Response = await client.call_async(request)
|
tree_response: SerialCommand.Response = await client.call_async(request)
|
||||||
uuid_maps = json.loads(tree_response.response)
|
uuid_maps = json.loads(tree_response.response)
|
||||||
self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
|
plr_instances = rts.to_plr_resources()
|
||||||
|
for plr_instance in plr_instances:
|
||||||
|
self.resource_tracker.loop_update_uuid(plr_instance, uuid_maps)
|
||||||
|
rts: ResourceTreeSet = ResourceTreeSet.from_plr_resources(plr_instances)
|
||||||
self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes")
|
self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes")
|
||||||
final_response = {
|
final_response = {
|
||||||
"created_resources": rts.dump(),
|
"created_resource_tree": rts.dump(),
|
||||||
"liquid_input_resources": [],
|
"liquid_input_resource_tree": [],
|
||||||
}
|
}
|
||||||
res.response = json.dumps(final_response)
|
res.response = json.dumps(final_response)
|
||||||
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
||||||
@@ -465,13 +478,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
return res
|
return res
|
||||||
try:
|
try:
|
||||||
if len(rts.root_nodes) == 1 and parent_resource is not None:
|
if len(rts.root_nodes) == 1 and parent_resource is not None:
|
||||||
plr_instance = rts.to_plr_resources()[0]
|
plr_instance = plr_instances[0]
|
||||||
if isinstance(plr_instance, Plate):
|
if isinstance(plr_instance, Plate):
|
||||||
empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items
|
empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items
|
||||||
if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1:
|
if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1:
|
||||||
ADD_LIQUID_TYPE = ADD_LIQUID_TYPE * len(LIQUID_INPUT_SLOT)
|
ADD_LIQUID_TYPE = ADD_LIQUID_TYPE * len(LIQUID_INPUT_SLOT)
|
||||||
LIQUID_VOLUME = LIQUID_VOLUME * len(LIQUID_INPUT_SLOT)
|
LIQUID_VOLUME = LIQUID_VOLUME * len(LIQUID_INPUT_SLOT)
|
||||||
self.lab_logger().warning(f"增加液体资源时,数量为1,自动补全为 {len(LIQUID_INPUT_SLOT)} 个")
|
self.lab_logger().warning(
|
||||||
|
f"增加液体资源时,数量为1,自动补全为 {len(LIQUID_INPUT_SLOT)} 个"
|
||||||
|
)
|
||||||
for liquid_type, liquid_volume, liquid_input_slot in zip(
|
for liquid_type, liquid_volume, liquid_input_slot in zip(
|
||||||
ADD_LIQUID_TYPE, LIQUID_VOLUME, LIQUID_INPUT_SLOT
|
ADD_LIQUID_TYPE, LIQUID_VOLUME, LIQUID_INPUT_SLOT
|
||||||
):
|
):
|
||||||
@@ -490,9 +505,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
input_wells = []
|
input_wells = []
|
||||||
for r in LIQUID_INPUT_SLOT:
|
for r in LIQUID_INPUT_SLOT:
|
||||||
input_wells.append(plr_instance.children[r])
|
input_wells.append(plr_instance.children[r])
|
||||||
final_response["liquid_input_resources"] = ResourceTreeSet.from_plr_resources(input_wells).dump()
|
final_response["liquid_input_resource_tree"] = ResourceTreeSet.from_plr_resources(
|
||||||
|
input_wells
|
||||||
|
).dump()
|
||||||
res.response = json.dumps(final_response)
|
res.response = json.dumps(final_response)
|
||||||
if issubclass(parent_resource.__class__, Deck) and hasattr(parent_resource, "assign_child_at_slot") and "slot" in other_calling_param:
|
if (
|
||||||
|
issubclass(parent_resource.__class__, Deck)
|
||||||
|
and hasattr(parent_resource, "assign_child_at_slot")
|
||||||
|
and "slot" in other_calling_param
|
||||||
|
):
|
||||||
other_calling_param["slot"] = int(other_calling_param["slot"])
|
other_calling_param["slot"] = int(other_calling_param["slot"])
|
||||||
parent_resource.assign_child_at_slot(plr_instance, **other_calling_param)
|
parent_resource.assign_child_at_slot(plr_instance, **other_calling_param)
|
||||||
else:
|
else:
|
||||||
@@ -504,14 +525,19 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
)
|
)
|
||||||
# 调整了液体以及Deck之后要重新Assign
|
# 调整了液体以及Deck之后要重新Assign
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
request.command = json.dumps({
|
rts_with_parent = ResourceTreeSet.from_plr_resources([parent_resource])
|
||||||
"action": "add",
|
if rts_with_parent.root_nodes[0].res_content.uuid_parent is None:
|
||||||
"data": {
|
rts_with_parent.root_nodes[0].res_content.parent_uuid = self.uuid
|
||||||
"data": ResourceTreeSet.from_plr_resources([parent_resource]).dump(),
|
request.command = json.dumps(
|
||||||
"mount_uuid": parent_resource.parent.unilabos_uuid if parent_resource.parent is not None else self.uuid,
|
{
|
||||||
"first_add": False,
|
"action": "add",
|
||||||
},
|
"data": {
|
||||||
})
|
"data": rts_with_parent.dump(),
|
||||||
|
"mount_uuid": rts_with_parent.root_nodes[0].res_content.uuid_parent,
|
||||||
|
"first_add": False,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
tree_response: SerialCommand.Response = await client.call_async(request)
|
tree_response: SerialCommand.Response = await client.call_async(request)
|
||||||
uuid_maps = json.loads(tree_response.response)
|
uuid_maps = json.loads(tree_response.response)
|
||||||
self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
|
self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
|
||||||
@@ -621,7 +647,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
raw_nodes = json.loads(response.response)
|
raw_nodes = json.loads(response.response)
|
||||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes)
|
tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes)
|
||||||
self.lab_logger().debug(f"获取资源结果: {len(tree_set.trees)} 个资源树")
|
self.lab_logger().trace(f"获取资源结果: {len(tree_set.trees)} 个资源树 {tree_set.root_nodes}")
|
||||||
return tree_set
|
return tree_set
|
||||||
|
|
||||||
async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR":
|
async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR":
|
||||||
@@ -655,61 +681,71 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def transfer_to_new_resource(
|
def transfer_to_new_resource(
|
||||||
self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any]
|
self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any]
|
||||||
):
|
) -> Optional["ResourcePLR"]:
|
||||||
parent_uuid = tree.root_node.res_content.parent_uuid
|
parent_uuid = tree.root_node.res_content.parent_uuid
|
||||||
if parent_uuid:
|
if not parent_uuid:
|
||||||
parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
|
self.lab_logger().warning(
|
||||||
if parent_resource is None:
|
f"物料{plr_resource} parent未知,挂载到当前节点下,额外参数:{additional_add_params}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
if parent_uuid == self.uuid:
|
||||||
|
self.lab_logger().warning(
|
||||||
|
f"物料{plr_resource}请求挂载到{self.identifier},额外参数:{additional_add_params}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
|
||||||
|
if parent_resource is None:
|
||||||
|
self.lab_logger().warning(
|
||||||
|
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# 特殊兼容所有plr的物料的assign方法,和create_resource append_resource后期同步
|
||||||
|
additional_params = {}
|
||||||
|
extra = getattr(plr_resource, "unilabos_extra", {})
|
||||||
|
if len(extra):
|
||||||
|
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
|
||||||
|
if "update_resource_site" in extra:
|
||||||
|
additional_add_params["site"] = extra["update_resource_site"]
|
||||||
|
site = additional_add_params.get("site", None)
|
||||||
|
spec = inspect.signature(parent_resource.assign_child_resource)
|
||||||
|
if "spot" in spec.parameters:
|
||||||
|
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
|
||||||
|
if ordering_dict:
|
||||||
|
site = list(ordering_dict.keys()).index(site)
|
||||||
|
additional_params["spot"] = site
|
||||||
|
old_parent = plr_resource.parent
|
||||||
|
if old_parent is not None:
|
||||||
|
# plr并不支持同一个deck的加载和卸载
|
||||||
|
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
|
||||||
|
old_parent.unassign_child_resource(plr_resource)
|
||||||
self.lab_logger().warning(
|
self.lab_logger().warning(
|
||||||
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
|
f"物料{plr_resource}请求挂载到{parent_resource},额外参数:{additional_params}"
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
try:
|
|
||||||
# 特殊兼容所有plr的物料的assign方法,和create_resource append_resource后期同步
|
|
||||||
additional_params = {}
|
|
||||||
extra = getattr(plr_resource, "unilabos_extra", {})
|
|
||||||
if len(extra):
|
|
||||||
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
|
|
||||||
if "update_resource_site" in extra:
|
|
||||||
additional_add_params["site"] = extra["update_resource_site"]
|
|
||||||
site = additional_add_params.get("site", None)
|
|
||||||
spec = inspect.signature(parent_resource.assign_child_resource)
|
|
||||||
if "spot" in spec.parameters:
|
|
||||||
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
|
|
||||||
if ordering_dict:
|
|
||||||
site = list(ordering_dict.keys()).index(site)
|
|
||||||
additional_params["spot"] = site
|
|
||||||
old_parent = plr_resource.parent
|
|
||||||
if old_parent is not None:
|
|
||||||
# plr并不支持同一个deck的加载和卸载
|
|
||||||
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
|
|
||||||
old_parent.unassign_child_resource(plr_resource)
|
|
||||||
self.lab_logger().warning(
|
|
||||||
f"物料{plr_resource}请求挂载到{parent_resource},额外参数:{additional_params}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# ⭐ assign 之前,需要从 resources 列表中移除
|
# ⭐ assign 之前,需要从 resources 列表中移除
|
||||||
# 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源
|
# 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源
|
||||||
# 如果不移除,figure_resource 会找到两次:一次在 resources,一次在 parent 的 children
|
# 如果不移除,figure_resource 会找到两次:一次在 resources,一次在 parent 的 children
|
||||||
resource_id = id(plr_resource)
|
resource_id = id(plr_resource)
|
||||||
for i, r in enumerate(self.resource_tracker.resources):
|
for i, r in enumerate(self.resource_tracker.resources):
|
||||||
if id(r) == resource_id:
|
if id(r) == resource_id:
|
||||||
self.resource_tracker.resources.pop(i)
|
self.resource_tracker.resources.pop(i)
|
||||||
self.lab_logger().debug(
|
self.lab_logger().debug(
|
||||||
f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)"
|
f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
parent_resource.assign_child_resource(plr_resource, location=None, **additional_params)
|
parent_resource.assign_child_resource(plr_resource, location=None, **additional_params)
|
||||||
|
|
||||||
func = getattr(self.driver_instance, "resource_tree_transfer", None)
|
func = getattr(self.driver_instance, "resource_tree_transfer", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
# 分别是 物料的原来父节点,当前物料的状态,物料的新父节点(此时物料已经重新assign了)
|
# 分别是 物料的原来父节点,当前物料的状态,物料的新父节点(此时物料已经重新assign了)
|
||||||
func(old_parent, plr_resource, parent_resource)
|
func(old_parent, plr_resource, parent_resource)
|
||||||
except Exception as e:
|
return parent_resource
|
||||||
self.lab_logger().warning(
|
except Exception as e:
|
||||||
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}"
|
self.lab_logger().warning(
|
||||||
)
|
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}"
|
||||||
|
)
|
||||||
|
|
||||||
async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response):
|
async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response):
|
||||||
"""
|
"""
|
||||||
@@ -724,7 +760,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def _handle_add(
|
def _handle_add(
|
||||||
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
||||||
) -> Dict[str, Any]:
|
) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
|
||||||
"""
|
"""
|
||||||
处理资源添加操作的内部函数
|
处理资源添加操作的内部函数
|
||||||
|
|
||||||
@@ -736,15 +772,20 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
操作结果字典
|
操作结果字典
|
||||||
"""
|
"""
|
||||||
|
parents = [] # 放的是被变更的物料 / 被变更的物料父级
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
self.resource_tracker.add_resource(plr_resource)
|
self.resource_tracker.add_resource(plr_resource)
|
||||||
self.transfer_to_new_resource(plr_resource, tree, additional_add_params)
|
parent = self.transfer_to_new_resource(plr_resource, tree, additional_add_params)
|
||||||
|
if parent is not None:
|
||||||
|
parents.append(parent)
|
||||||
|
else:
|
||||||
|
parents.append(plr_resource)
|
||||||
|
|
||||||
func = getattr(self.driver_instance, "resource_tree_add", None)
|
func = getattr(self.driver_instance, "resource_tree_add", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
func(plr_resources)
|
func(plr_resources)
|
||||||
|
|
||||||
return {"success": True, "action": "add"}
|
return {"success": True, "action": "add"}, parents
|
||||||
|
|
||||||
def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]:
|
def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
@@ -779,11 +820,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if plr_resource.parent is not None:
|
if plr_resource.parent is not None:
|
||||||
plr_resource.parent.unassign_child_resource(plr_resource)
|
plr_resource.parent.unassign_child_resource(plr_resource)
|
||||||
self.resource_tracker.remove_resource(plr_resource)
|
self.resource_tracker.remove_resource(plr_resource)
|
||||||
self.lab_logger().info(f"移除物料 {plr_resource} 及其子节点")
|
self.lab_logger().info(f"[资源同步] 移除物料 {plr_resource} 及其子节点")
|
||||||
|
|
||||||
for other_plr_resource in other_plr_resources:
|
for other_plr_resource in other_plr_resources:
|
||||||
self.resource_tracker.remove_resource(other_plr_resource)
|
self.resource_tracker.remove_resource(other_plr_resource)
|
||||||
self.lab_logger().info(f"移除物料 {other_plr_resource} 及其子节点")
|
self.lab_logger().info(f"[资源同步] 移除物料 {other_plr_resource} 及其子节点")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"success": True,
|
"success": True,
|
||||||
@@ -793,8 +834,10 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _handle_update(
|
def _handle_update(
|
||||||
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]],
|
||||||
) -> Dict[str, Any]:
|
tree_set: ResourceTreeSet,
|
||||||
|
additional_add_params: Dict[str, Any],
|
||||||
|
) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
|
||||||
"""
|
"""
|
||||||
处理资源更新操作的内部函数
|
处理资源更新操作的内部函数
|
||||||
|
|
||||||
@@ -806,16 +849,28 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
操作结果字典
|
操作结果字典
|
||||||
"""
|
"""
|
||||||
|
original_instances = []
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
|
if isinstance(plr_resource, ResourceDictInstance):
|
||||||
|
self._lab_logger.info(f"跳过 非资源{plr_resource.res_content.name} 的更新")
|
||||||
|
continue
|
||||||
states = plr_resource.serialize_all_state()
|
states = plr_resource.serialize_all_state()
|
||||||
original_instance: ResourcePLR = self.resource_tracker.figure_resource(
|
original_instance: ResourcePLR = self.resource_tracker.figure_resource(
|
||||||
{"uuid": tree.root_node.res_content.uuid}, try_mode=False
|
{"uuid": tree.root_node.res_content.uuid}, try_mode=False
|
||||||
)
|
)
|
||||||
|
original_parent_resource = original_instance.parent
|
||||||
|
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
|
||||||
|
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
|
||||||
|
not_same_parent = (
|
||||||
|
original_parent_resource_uuid != target_parent_resource_uuid
|
||||||
|
and original_parent_resource is not None
|
||||||
|
)
|
||||||
|
old_name = original_instance.name
|
||||||
|
new_name = plr_resource.name
|
||||||
|
parent_appended = False
|
||||||
|
|
||||||
# Update操作中包含改名:需要先remove再add
|
# Update操作中包含改名:需要先remove再add,这里更新父节点即可
|
||||||
if original_instance.name != plr_resource.name:
|
if not not_same_parent and old_name != new_name:
|
||||||
old_name = original_instance.name
|
|
||||||
new_name = plr_resource.name
|
|
||||||
self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}")
|
self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}")
|
||||||
|
|
||||||
# 收集所有相关的uuid(包括子节点)
|
# 收集所有相关的uuid(包括子节点)
|
||||||
@@ -824,12 +879,10 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
_handle_add([original_instance], tree_set, additional_add_params)
|
_handle_add([original_instance], tree_set, additional_add_params)
|
||||||
|
|
||||||
self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}")
|
self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}")
|
||||||
|
original_instances.append(original_parent_resource)
|
||||||
|
parent_appended = True
|
||||||
|
|
||||||
# 常规更新:不涉及改名
|
# 常规更新:不涉及改名
|
||||||
original_parent_resource = original_instance.parent
|
|
||||||
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
|
|
||||||
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
|
|
||||||
|
|
||||||
self.lab_logger().info(
|
self.lab_logger().info(
|
||||||
f"物料{original_instance} 原始父节点{original_parent_resource_uuid} "
|
f"物料{original_instance} 原始父节点{original_parent_resource_uuid} "
|
||||||
f"目标父节点{target_parent_resource_uuid} 更新"
|
f"目标父节点{target_parent_resource_uuid} 更新"
|
||||||
@@ -840,25 +893,50 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501
|
original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501
|
||||||
|
|
||||||
# 如果父节点变化,需要重新挂载
|
# 如果父节点变化,需要重新挂载
|
||||||
if (
|
if not_same_parent:
|
||||||
original_parent_resource_uuid != target_parent_resource_uuid
|
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
||||||
and original_parent_resource is not None
|
original_instances.append(parent)
|
||||||
):
|
parent_appended = True
|
||||||
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
else:
|
||||||
|
# 判断是否变更了resource_site,重新登记
|
||||||
|
target_site = original_instance.unilabos_extra.get("update_resource_site")
|
||||||
|
sites = (
|
||||||
|
original_instance.parent.sites
|
||||||
|
if original_instance.parent is not None and hasattr(original_instance.parent, "sites")
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
site_names = (
|
||||||
|
list(original_instance.parent._ordering.keys())
|
||||||
|
if original_instance.parent is not None and hasattr(original_instance.parent, "sites")
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
if target_site is not None and sites is not None and site_names is not None:
|
||||||
|
site_index = sites.index(original_instance)
|
||||||
|
site_name = site_names[site_index]
|
||||||
|
if site_name != target_site:
|
||||||
|
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
||||||
|
if parent is not None:
|
||||||
|
original_instances.append(parent)
|
||||||
|
parent_appended = True
|
||||||
|
|
||||||
# 加载状态
|
# 加载状态
|
||||||
|
original_instance.location = plr_resource.location
|
||||||
|
original_instance.rotation = plr_resource.rotation
|
||||||
|
original_instance.barcode = plr_resource.barcode
|
||||||
original_instance.load_all_state(states)
|
original_instance.load_all_state(states)
|
||||||
child_count = len(original_instance.get_all_children())
|
child_count = len(original_instance.get_all_children())
|
||||||
self.lab_logger().info(
|
self.lab_logger().info(
|
||||||
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
||||||
)
|
)
|
||||||
|
if not parent_appended:
|
||||||
|
original_instances.append(original_instance)
|
||||||
|
|
||||||
# 调用driver的update回调
|
# 调用driver的update回调
|
||||||
func = getattr(self.driver_instance, "resource_tree_update", None)
|
func = getattr(self.driver_instance, "resource_tree_update", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
func(plr_resources)
|
func(original_instances)
|
||||||
|
|
||||||
return {"success": True, "action": "update"}
|
return {"success": True, "action": "update"}, original_instances
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(req.command)
|
data = json.loads(req.command)
|
||||||
@@ -868,9 +946,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
action = i.get("action") # remove, add, update
|
action = i.get("action") # remove, add, update
|
||||||
resources_uuid: List[str] = i.get("data") # 资源数据
|
resources_uuid: List[str] = i.get("data") # 资源数据
|
||||||
additional_add_params = i.get("additional_add_params", {}) # 额外参数
|
additional_add_params = i.get("additional_add_params", {}) # 额外参数
|
||||||
self.lab_logger().info(
|
self.lab_logger().trace(f"[资源同步] 处理 {action}, " f"resources count: {len(resources_uuid)}")
|
||||||
f"[Resource Tree Update] Processing {action} operation, " f"resources count: {len(resources_uuid)}"
|
|
||||||
)
|
|
||||||
tree_set = None
|
tree_set = None
|
||||||
if action in ["add", "update"]:
|
if action in ["add", "update"]:
|
||||||
tree_set = await self.get_resource(
|
tree_set = await self.get_resource(
|
||||||
@@ -881,13 +957,56 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if tree_set is None:
|
if tree_set is None:
|
||||||
raise ValueError("tree_set不能为None")
|
raise ValueError("tree_set不能为None")
|
||||||
plr_resources = tree_set.to_plr_resources()
|
plr_resources = tree_set.to_plr_resources()
|
||||||
result = _handle_add(plr_resources, tree_set, additional_add_params)
|
result, parents = _handle_add(plr_resources, tree_set, additional_add_params)
|
||||||
|
parents: List[Optional["ResourcePLR"]] = [i for i in parents if i is not None]
|
||||||
|
# de_dupe_parents = list(set(parents))
|
||||||
|
# Fix unhashable type error for WareHouse
|
||||||
|
de_dupe_parents = []
|
||||||
|
_seen_ids = set()
|
||||||
|
for p in parents:
|
||||||
|
if id(p) not in _seen_ids:
|
||||||
|
_seen_ids.add(id(p))
|
||||||
|
de_dupe_parents.append(p)
|
||||||
|
new_tree_set = ResourceTreeSet.from_plr_resources(de_dupe_parents) # 去重
|
||||||
|
for tree in new_tree_set.trees:
|
||||||
|
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
|
||||||
|
tree.root_node.res_content.parent_uuid = self.uuid
|
||||||
|
r = SerialCommand.Request()
|
||||||
|
r.command = json.dumps(
|
||||||
|
{"data": {"data": new_tree_set.dump()}, "action": "update"}
|
||||||
|
) # 和Update Resource一致
|
||||||
|
response: SerialCommand_Response = await self._resource_clients[
|
||||||
|
"c2s_update_resource_tree"
|
||||||
|
].call_async(
|
||||||
|
r
|
||||||
|
) # type: ignore
|
||||||
|
self.lab_logger().info(f"确认资源云端 Add 结果: {response.response}")
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "update":
|
elif action == "update":
|
||||||
if tree_set is None:
|
if tree_set is None:
|
||||||
raise ValueError("tree_set不能为None")
|
raise ValueError("tree_set不能为None")
|
||||||
plr_resources = tree_set.to_plr_resources()
|
plr_resources = []
|
||||||
result = _handle_update(plr_resources, tree_set, additional_add_params)
|
for tree in tree_set.trees:
|
||||||
|
if tree.root_node.res_content.type == "device":
|
||||||
|
plr_resources.append(tree.root_node)
|
||||||
|
else:
|
||||||
|
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
|
||||||
|
result, original_instances = _handle_update(plr_resources, tree_set, additional_add_params)
|
||||||
|
if not BasicConfig.no_update_feedback:
|
||||||
|
new_tree_set = ResourceTreeSet.from_plr_resources(original_instances) # 去重
|
||||||
|
for tree in new_tree_set.trees:
|
||||||
|
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
|
||||||
|
tree.root_node.res_content.parent_uuid = self.uuid
|
||||||
|
r = SerialCommand.Request()
|
||||||
|
r.command = json.dumps(
|
||||||
|
{"data": {"data": new_tree_set.dump()}, "action": "update"}
|
||||||
|
) # 和Update Resource一致
|
||||||
|
response: SerialCommand_Response = await self._resource_clients[
|
||||||
|
"c2s_update_resource_tree"
|
||||||
|
].call_async(
|
||||||
|
r
|
||||||
|
) # type: ignore
|
||||||
|
self.lab_logger().info(f"确认资源云端 Update 结果: {response.response}")
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "remove":
|
elif action == "remove":
|
||||||
result = _handle_remove(resources_uuid)
|
result = _handle_remove(resources_uuid)
|
||||||
@@ -901,15 +1020,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
# 返回处理结果
|
# 返回处理结果
|
||||||
result_json = {"results": results, "total": len(data)}
|
result_json = {"results": results, "total": len(data)}
|
||||||
res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder)
|
res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder)
|
||||||
self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations")
|
# self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations")
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
error_msg = f"Invalid JSON format: {str(e)}"
|
error_msg = f"Invalid JSON format: {str(e)}"
|
||||||
self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
|
self.lab_logger().error(f"[资源同步] {error_msg}")
|
||||||
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"Unexpected error: {str(e)}"
|
error_msg = f"Unexpected error: {str(e)}"
|
||||||
self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
|
self.lab_logger().error(f"[资源同步] {error_msg}")
|
||||||
self.lab_logger().error(traceback.format_exc())
|
self.lab_logger().error(traceback.format_exc())
|
||||||
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
||||||
|
|
||||||
@@ -1230,7 +1349,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name)
|
ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name)
|
||||||
|
|
||||||
action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"])
|
action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"])
|
||||||
self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
|
self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {str(action_kwargs)[:1000]}")
|
||||||
|
self.lab_logger().trace(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
|
||||||
error_skip = False
|
error_skip = False
|
||||||
# 向Host查询物料当前状态,如果是host本身的增加物料的请求,则直接跳过
|
# 向Host查询物料当前状态,如果是host本身的增加物料的请求,则直接跳过
|
||||||
if action_name not in ["create_resource_detailed", "create_resource"]:
|
if action_name not in ["create_resource_detailed", "create_resource"]:
|
||||||
@@ -1244,21 +1364,41 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]]
|
resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]]
|
||||||
|
|
||||||
# 批量查询资源
|
# 批量查询资源
|
||||||
queried_resources = []
|
queried_resources: list = [None] * len(resource_inputs)
|
||||||
for resource_data in resource_inputs:
|
uuid_indices: list[tuple[int, str, dict]] = [] # (index, uuid, resource_data)
|
||||||
plr_resource = await self.get_resource_with_dir(
|
|
||||||
resource_id=resource_data["id"], with_children=True
|
# 第一遍:处理没有uuid的资源,收集有uuid的资源信息
|
||||||
)
|
for idx, resource_data in enumerate(resource_inputs):
|
||||||
if "sample_id" in resource_data:
|
unilabos_uuid = resource_data.get("data", {}).get("unilabos_uuid")
|
||||||
plr_resource.unilabos_extra["sample_uuid"] = resource_data["sample_id"]
|
if unilabos_uuid is None:
|
||||||
queried_resources.append(plr_resource)
|
plr_resource = await self.get_resource_with_dir(
|
||||||
|
resource_id=resource_data["id"], with_children=True
|
||||||
|
)
|
||||||
|
if "sample_id" in resource_data:
|
||||||
|
plr_resource.unilabos_extra[EXTRA_SAMPLE_UUID] = resource_data["sample_id"]
|
||||||
|
queried_resources[idx] = plr_resource
|
||||||
|
else:
|
||||||
|
uuid_indices.append((idx, unilabos_uuid, resource_data))
|
||||||
|
|
||||||
|
# 第二遍:批量查询有uuid的资源
|
||||||
|
if uuid_indices:
|
||||||
|
uuids = [item[1] for item in uuid_indices]
|
||||||
|
resource_tree = await self.get_resource(uuids)
|
||||||
|
plr_resources = resource_tree.to_plr_resources()
|
||||||
|
for i, (idx, _, resource_data) in enumerate(uuid_indices):
|
||||||
|
plr_resource = plr_resources[i]
|
||||||
|
if "sample_id" in resource_data:
|
||||||
|
plr_resource.unilabos_extra[EXTRA_SAMPLE_UUID] = resource_data["sample_id"]
|
||||||
|
queried_resources[idx] = plr_resource
|
||||||
|
|
||||||
self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源")
|
self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源")
|
||||||
|
|
||||||
# 通过资源跟踪器获取本地实例
|
# 通过资源跟踪器获取本地实例
|
||||||
final_resources = queried_resources if is_sequence else queried_resources[0]
|
final_resources = queried_resources if is_sequence else queried_resources[0]
|
||||||
if not is_sequence:
|
if not is_sequence:
|
||||||
plr = self.resource_tracker.figure_resource({"name": final_resources.name}, try_mode=False)
|
plr = self.resource_tracker.figure_resource(
|
||||||
|
{"name": final_resources.name}, try_mode=False
|
||||||
|
)
|
||||||
# 保留unilabos_extra
|
# 保留unilabos_extra
|
||||||
if hasattr(final_resources, "unilabos_extra") and hasattr(plr, "unilabos_extra"):
|
if hasattr(final_resources, "unilabos_extra") and hasattr(plr, "unilabos_extra"):
|
||||||
plr.unilabos_extra = getattr(final_resources, "unilabos_extra", {}).copy()
|
plr.unilabos_extra = getattr(final_resources, "unilabos_extra", {}).copy()
|
||||||
@@ -1298,6 +1438,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
except Exception as _:
|
except Exception as _:
|
||||||
execution_error = traceback.format_exc()
|
execution_error = traceback.format_exc()
|
||||||
error(
|
error(
|
||||||
|
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}"
|
||||||
|
)
|
||||||
|
trace(
|
||||||
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1319,6 +1462,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
except Exception as _:
|
except Exception as _:
|
||||||
execution_error = traceback.format_exc()
|
execution_error = traceback.format_exc()
|
||||||
error(
|
error(
|
||||||
|
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}"
|
||||||
|
)
|
||||||
|
trace(
|
||||||
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1387,11 +1533,18 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if isinstance(rs, list):
|
if isinstance(rs, list):
|
||||||
for r in rs:
|
for r in rs:
|
||||||
res = self.resource_tracker.parent_resource(r) # 获取 resource 对象
|
res = self.resource_tracker.parent_resource(r) # 获取 resource 对象
|
||||||
|
if res is None:
|
||||||
|
res = rs
|
||||||
|
if id(res) not in seen:
|
||||||
|
seen.add(id(res))
|
||||||
|
unique_resources.append(res)
|
||||||
else:
|
else:
|
||||||
res = self.resource_tracker.parent_resource(r)
|
res = self.resource_tracker.parent_resource(rs)
|
||||||
if id(res) not in seen:
|
if res is None:
|
||||||
seen.add(id(res))
|
res = rs
|
||||||
unique_resources.append(res)
|
if id(res) not in seen:
|
||||||
|
seen.add(id(res))
|
||||||
|
unique_resources.append(res)
|
||||||
|
|
||||||
# 使用新的资源树接口
|
# 使用新的资源树接口
|
||||||
if unique_resources:
|
if unique_resources:
|
||||||
@@ -1443,20 +1596,39 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
try:
|
try:
|
||||||
function_name = target["function_name"]
|
function_name = target["function_name"]
|
||||||
function_args = target["function_args"]
|
function_args = target["function_args"]
|
||||||
|
# 获取 unilabos 系统参数
|
||||||
|
unilabos_param: Dict[str, Any] = target[JSON_UNILABOS_PARAM]
|
||||||
|
|
||||||
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
|
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
|
||||||
function = getattr(self.driver_instance, function_name)
|
function = getattr(self.driver_instance, function_name)
|
||||||
assert callable(
|
assert callable(
|
||||||
function
|
function
|
||||||
), f"执行动作时JSON中的function_name对应的函数不可调用: {function_name}\n原JSON: {string}"
|
), f"执行动作时JSON中的function_name对应的函数不可调用: {function_name}\n原JSON: {string}"
|
||||||
|
|
||||||
# 处理 ResourceSlot 类型参数
|
# 处理参数(包含 unilabos 系统参数如 sample_uuids)
|
||||||
args_list = default_manager._analyze_method_signature(function)["args"]
|
args_list = default_manager._analyze_method_signature(function, skip_unilabos_params=False)["args"]
|
||||||
for arg in args_list:
|
for arg in args_list:
|
||||||
arg_name = arg["name"]
|
arg_name = arg["name"]
|
||||||
arg_type = arg["type"]
|
arg_type = arg["type"]
|
||||||
|
|
||||||
# 跳过不在 function_args 中的参数
|
# 跳过不在 function_args 中的参数
|
||||||
if arg_name not in function_args:
|
if arg_name not in function_args:
|
||||||
|
# 处理 sample_uuids 参数注入
|
||||||
|
if arg_name == PARAM_SAMPLE_UUIDS:
|
||||||
|
raw_sample_uuids = unilabos_param.get(PARAM_SAMPLE_UUIDS, {})
|
||||||
|
# 将 material uuid 转换为 resource 实例
|
||||||
|
# key: sample_uuid, value: material_uuid -> resource 实例
|
||||||
|
resolved_sample_uuids: Dict[str, Any] = {}
|
||||||
|
for sample_uuid, material_uuid in raw_sample_uuids.items():
|
||||||
|
if material_uuid and self.resource_tracker:
|
||||||
|
resource = self.resource_tracker.uuid_to_resources.get(material_uuid)
|
||||||
|
resolved_sample_uuids[sample_uuid] = resource if resource else material_uuid
|
||||||
|
else:
|
||||||
|
resolved_sample_uuids[sample_uuid] = material_uuid
|
||||||
|
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
|
||||||
|
self.lab_logger().debug(
|
||||||
|
f"[JsonCommand] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 处理单个 ResourceSlot
|
# 处理单个 ResourceSlot
|
||||||
@@ -1464,8 +1636,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_data = function_args[arg_name]
|
resource_data = function_args[arg_name]
|
||||||
if isinstance(resource_data, dict) and "id" in resource_data:
|
if isinstance(resource_data, dict) and "id" in resource_data:
|
||||||
try:
|
try:
|
||||||
converted_resource = self._convert_resource_sync(resource_data)
|
function_args[arg_name] = self._convert_resources_sync(resource_data["uuid"])[0]
|
||||||
function_args[arg_name] = converted_resource
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.lab_logger().error(
|
self.lab_logger().error(
|
||||||
f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
||||||
@@ -1479,68 +1650,87 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_list = function_args[arg_name]
|
resource_list = function_args[arg_name]
|
||||||
if isinstance(resource_list, list):
|
if isinstance(resource_list, list):
|
||||||
try:
|
try:
|
||||||
converted_resources = []
|
uuids = [r["uuid"] for r in resource_list if isinstance(r, dict) and "id" in r]
|
||||||
for resource_data in resource_list:
|
function_args[arg_name] = self._convert_resources_sync(*uuids) if uuids else []
|
||||||
if isinstance(resource_data, dict) and "id" in resource_data:
|
|
||||||
converted_resource = self._convert_resource_sync(resource_data)
|
|
||||||
converted_resources.append(converted_resource)
|
|
||||||
function_args[arg_name] = converted_resources
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.lab_logger().error(
|
self.lab_logger().error(
|
||||||
f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
||||||
)
|
)
|
||||||
raise JsonCommandInitError(f"ResourceSlot列表参数转换失败: {arg_name}")
|
raise JsonCommandInitError(f"ResourceSlot列表参数转换失败: {arg_name}")
|
||||||
|
|
||||||
|
# todo: 默认反报送
|
||||||
return function(**function_args)
|
return function(**function_args)
|
||||||
except KeyError as ex:
|
except KeyError as ex:
|
||||||
raise JsonCommandInitError(
|
raise JsonCommandInitError(
|
||||||
f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}"
|
f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def _convert_resource_sync(self, resource_data: Dict[str, Any]):
|
def _convert_resources_sync(self, *uuids: str) -> List["ResourcePLR"]:
|
||||||
"""同步转换资源数据为实例"""
|
"""同步转换资源 UUID 为实例
|
||||||
# 创建资源查询请求
|
|
||||||
r = SerialCommand.Request()
|
|
||||||
r.command = json.dumps(
|
|
||||||
{
|
|
||||||
"id": resource_data.get("id", None),
|
|
||||||
"uuid": resource_data.get("uuid", None),
|
|
||||||
"with_children": True,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# 同步调用资源查询服务
|
Args:
|
||||||
future = self._resource_clients["resource_get"].call_async(r)
|
*uuids: 一个或多个资源 UUID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
单个 UUID 时返回单个资源实例,多个 UUID 时返回资源实例列表
|
||||||
|
"""
|
||||||
|
if not uuids:
|
||||||
|
raise ValueError("至少需要提供一个 UUID")
|
||||||
|
|
||||||
|
uuids_list = list(uuids)
|
||||||
|
future = self._resource_clients["c2s_update_resource_tree"].call_async(
|
||||||
|
SerialCommand.Request(
|
||||||
|
command=json.dumps(
|
||||||
|
{
|
||||||
|
"data": {"data": uuids_list, "with_children": True},
|
||||||
|
"action": "get",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# 等待结果(使用while循环,每次sleep 0.05秒,最多等待30秒)
|
# 等待结果(使用while循环,每次sleep 0.05秒,最多等待30秒)
|
||||||
timeout = 30.0
|
timeout = 30.0
|
||||||
elapsed = 0.0
|
elapsed = 0.0
|
||||||
while not future.done() and elapsed < timeout:
|
while not future.done() and elapsed < timeout:
|
||||||
time.sleep(0.05)
|
time.sleep(0.02)
|
||||||
elapsed += 0.05
|
elapsed += 0.02
|
||||||
|
|
||||||
if not future.done():
|
if not future.done():
|
||||||
raise Exception(f"资源查询超时: {resource_data}")
|
raise Exception(f"资源查询超时: {uuids_list}")
|
||||||
|
|
||||||
response = future.result()
|
response = future.result()
|
||||||
if response is None:
|
if response is None:
|
||||||
raise Exception(f"资源查询返回空结果: {resource_data}")
|
raise Exception(f"资源查询返回空结果: {uuids_list}")
|
||||||
|
|
||||||
raw_data = json.loads(response.response)
|
raw_data = json.loads(response.response)
|
||||||
|
|
||||||
# 转换为 PLR 资源
|
# 转换为 PLR 资源
|
||||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||||
plr_resource = tree_set.to_plr_resources()[0]
|
if not len(tree_set.trees):
|
||||||
|
raise Exception(f"资源查询返回空树: {raw_data}")
|
||||||
|
plr_resources = tree_set.to_plr_resources()
|
||||||
|
|
||||||
# 通过资源跟踪器获取本地实例
|
# 通过资源跟踪器获取本地实例
|
||||||
res = self.resource_tracker.figure_resource(plr_resource, try_mode=True)
|
figured_resources: List[ResourcePLR] = []
|
||||||
if len(res) == 0:
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
self.lab_logger().warning(f"资源转换未能索引到实例: {resource_data},返回新建实例")
|
res = self.resource_tracker.figure_resource(plr_resource, try_mode=True)
|
||||||
return plr_resource
|
if len(res) == 0:
|
||||||
elif len(res) == 1:
|
self.lab_logger().warning(f"资源转换未能索引到实例: {tree.root_node.res_content},返回新建实例")
|
||||||
return res[0]
|
figured_resources.append(plr_resource)
|
||||||
else:
|
elif len(res) == 1:
|
||||||
raise ValueError(f"资源转换得到多个实例: {res}")
|
figured_resources.append(res[0])
|
||||||
|
else:
|
||||||
|
raise ValueError(f"资源转换得到多个实例: {res}")
|
||||||
|
|
||||||
|
mapped_plr_resources = []
|
||||||
|
for uuid in uuids_list:
|
||||||
|
for plr_resource in figured_resources:
|
||||||
|
r = self.resource_tracker.loop_find_with_uuid(plr_resource, uuid)
|
||||||
|
mapped_plr_resources.append(r)
|
||||||
|
break
|
||||||
|
|
||||||
|
return mapped_plr_resources
|
||||||
|
|
||||||
async def _execute_driver_command_async(self, string: str):
|
async def _execute_driver_command_async(self, string: str):
|
||||||
try:
|
try:
|
||||||
@@ -1555,6 +1745,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
try:
|
try:
|
||||||
function_name = target["function_name"]
|
function_name = target["function_name"]
|
||||||
function_args = target["function_args"]
|
function_args = target["function_args"]
|
||||||
|
# 获取 unilabos 系统参数
|
||||||
|
unilabos_param: Dict[str, Any] = target.get(JSON_UNILABOS_PARAM, {})
|
||||||
|
|
||||||
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
|
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
|
||||||
function = getattr(self.driver_instance, function_name)
|
function = getattr(self.driver_instance, function_name)
|
||||||
assert callable(
|
assert callable(
|
||||||
@@ -1564,14 +1757,30 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
function
|
function
|
||||||
), f"执行动作时JSON中的function并非异步: {function_name}\n原JSON: {string}"
|
), f"执行动作时JSON中的function并非异步: {function_name}\n原JSON: {string}"
|
||||||
|
|
||||||
# 处理 ResourceSlot 类型参数
|
# 处理参数(包含 unilabos 系统参数如 sample_uuids)
|
||||||
args_list = default_manager._analyze_method_signature(function)["args"]
|
args_list = default_manager._analyze_method_signature(function, skip_unilabos_params=False)["args"]
|
||||||
for arg in args_list:
|
for arg in args_list:
|
||||||
arg_name = arg["name"]
|
arg_name = arg["name"]
|
||||||
arg_type = arg["type"]
|
arg_type = arg["type"]
|
||||||
|
|
||||||
# 跳过不在 function_args 中的参数
|
# 跳过不在 function_args 中的参数
|
||||||
if arg_name not in function_args:
|
if arg_name not in function_args:
|
||||||
|
# 处理 sample_uuids 参数注入
|
||||||
|
if arg_name == PARAM_SAMPLE_UUIDS:
|
||||||
|
raw_sample_uuids = unilabos_param.get(PARAM_SAMPLE_UUIDS, {})
|
||||||
|
# 将 material uuid 转换为 resource 实例
|
||||||
|
# key: sample_uuid, value: material_uuid -> resource 实例
|
||||||
|
resolved_sample_uuids: Dict[str, Any] = {}
|
||||||
|
for sample_uuid, material_uuid in raw_sample_uuids.items():
|
||||||
|
if material_uuid and self.resource_tracker:
|
||||||
|
resource = self.resource_tracker.uuid_to_resources.get(material_uuid)
|
||||||
|
resolved_sample_uuids[sample_uuid] = resource if resource else material_uuid
|
||||||
|
else:
|
||||||
|
resolved_sample_uuids[sample_uuid] = material_uuid
|
||||||
|
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
|
||||||
|
self.lab_logger().debug(
|
||||||
|
f"[JsonCommandAsync] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 处理单个 ResourceSlot
|
# 处理单个 ResourceSlot
|
||||||
@@ -1661,6 +1870,15 @@ class ROS2DeviceNode:
|
|||||||
它不继承设备类,而是通过代理模式访问设备类的属性和方法。
|
它不继承设备类,而是通过代理模式访问设备类的属性和方法。
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# 类变量,用于循环管理
|
||||||
|
_asyncio_loop = None
|
||||||
|
_asyncio_loop_running = False
|
||||||
|
_asyncio_loop_thread = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_asyncio_loop(cls):
|
||||||
|
return cls._asyncio_loop
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def safe_task_wrapper(trace_callback, func, **kwargs):
|
async def safe_task_wrapper(trace_callback, func, **kwargs):
|
||||||
try:
|
try:
|
||||||
@@ -1737,6 +1955,11 @@ class ROS2DeviceNode:
|
|||||||
print_publish: 是否打印发布信息
|
print_publish: 是否打印发布信息
|
||||||
driver_is_ros:
|
driver_is_ros:
|
||||||
"""
|
"""
|
||||||
|
# 在初始化时检查循环状态
|
||||||
|
if ROS2DeviceNode._asyncio_loop_running and ROS2DeviceNode._asyncio_loop_thread is not None:
|
||||||
|
pass
|
||||||
|
elif ROS2DeviceNode._asyncio_loop_thread is None:
|
||||||
|
self._start_loop()
|
||||||
|
|
||||||
# 保存设备类是否支持异步上下文
|
# 保存设备类是否支持异步上下文
|
||||||
self._has_async_context = hasattr(driver_class, "__aenter__") and hasattr(driver_class, "__aexit__")
|
self._has_async_context = hasattr(driver_class, "__aenter__") and hasattr(driver_class, "__aexit__")
|
||||||
@@ -1754,6 +1977,7 @@ class ROS2DeviceNode:
|
|||||||
or driver_class.__name__ == "LiquidHandlerBiomek"
|
or driver_class.__name__ == "LiquidHandlerBiomek"
|
||||||
or driver_class.__name__ == "PRCXI9300Handler"
|
or driver_class.__name__ == "PRCXI9300Handler"
|
||||||
or driver_class.__name__ == "TransformXYZHandler"
|
or driver_class.__name__ == "TransformXYZHandler"
|
||||||
|
or driver_class.__name__ == "OpcUaClient"
|
||||||
)
|
)
|
||||||
|
|
||||||
# 创建设备类实例
|
# 创建设备类实例
|
||||||
@@ -1827,6 +2051,19 @@ class ROS2DeviceNode:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._ros_node.lab_logger().error(f"设备后初始化失败: {e}")
|
self._ros_node.lab_logger().error(f"设备后初始化失败: {e}")
|
||||||
|
|
||||||
|
def _start_loop(self):
|
||||||
|
def run_event_loop():
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
ROS2DeviceNode._asyncio_loop = loop
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
loop.run_forever()
|
||||||
|
|
||||||
|
ROS2DeviceNode._asyncio_loop_thread = threading.Thread(
|
||||||
|
target=run_event_loop, daemon=True, name="ROS2DeviceNode"
|
||||||
|
)
|
||||||
|
ROS2DeviceNode._asyncio_loop_thread.start()
|
||||||
|
logger.info(f"循环线程已启动")
|
||||||
|
|
||||||
|
|
||||||
class DeviceInfoType(TypedDict):
|
class DeviceInfoType(TypedDict):
|
||||||
id: str
|
id: str
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
import collections
|
import collections
|
||||||
from dataclasses import dataclass, field
|
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, TypedDict, Union
|
from dataclasses import dataclass, field
|
||||||
|
from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, Union
|
||||||
|
|
||||||
from action_msgs.msg import GoalStatus
|
from action_msgs.msg import GoalStatus
|
||||||
from geometry_msgs.msg import Point
|
from geometry_msgs.msg import Point
|
||||||
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
||||||
from rclpy.callback_groups import ReentrantCallbackGroup
|
|
||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
|
from typing_extensions import TypedDict
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
from unilabos_msgs.srv import (
|
from unilabos_msgs.srv import (
|
||||||
ResourceAdd,
|
ResourceAdd,
|
||||||
@@ -19,14 +19,24 @@ from unilabos_msgs.srv import (
|
|||||||
ResourceUpdate,
|
ResourceUpdate,
|
||||||
ResourceList,
|
ResourceList,
|
||||||
SerialCommand,
|
SerialCommand,
|
||||||
ResourceGet,
|
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
from unique_identifier_msgs.msg import UUID
|
from unique_identifier_msgs.msg import UUID
|
||||||
|
|
||||||
|
from unilabos.registry.placeholder_type import ResourceSlot, DeviceSlot
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.graphio import initialize_resource
|
from unilabos.resources.graphio import initialize_resource
|
||||||
from unilabos.resources.registry import add_schema
|
from unilabos.resources.registry import add_schema
|
||||||
|
from unilabos.resources.resource_tracker import (
|
||||||
|
ResourceDict,
|
||||||
|
ResourceDictInstance,
|
||||||
|
ResourceTreeSet,
|
||||||
|
ResourceTreeInstance,
|
||||||
|
RETURN_UNILABOS_SAMPLES,
|
||||||
|
JSON_UNILABOS_PARAM,
|
||||||
|
PARAM_SAMPLE_UUIDS,
|
||||||
|
)
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
get_msg_type,
|
get_msg_type,
|
||||||
@@ -37,17 +47,10 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
||||||
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
||||||
from unilabos.ros.nodes.resource_tracker import (
|
|
||||||
ResourceDict,
|
|
||||||
ResourceDictInstance,
|
|
||||||
ResourceTreeSet,
|
|
||||||
ResourceTreeInstance,
|
|
||||||
)
|
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.exception import DeviceClassInvalid
|
from unilabos.utils.exception import DeviceClassInvalid
|
||||||
from unilabos.utils.log import warning
|
from unilabos.utils.log import warning
|
||||||
from unilabos.utils.type_check import serialize_result_info
|
from unilabos.utils.type_check import serialize_result_info
|
||||||
from unilabos.registry.placeholder_type import ResourceSlot, DeviceSlot
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from unilabos.app.ws_client import QueueItem
|
from unilabos.app.ws_client import QueueItem
|
||||||
@@ -63,6 +66,18 @@ class TestResourceReturn(TypedDict):
|
|||||||
devices: List[DeviceSlot]
|
devices: List[DeviceSlot]
|
||||||
|
|
||||||
|
|
||||||
|
class TestLatencyReturn(TypedDict):
|
||||||
|
"""test_latency方法的返回值类型"""
|
||||||
|
|
||||||
|
avg_rtt_ms: float
|
||||||
|
avg_time_diff_ms: float
|
||||||
|
max_time_error_ms: float
|
||||||
|
task_delay_ms: float
|
||||||
|
raw_delay_ms: float
|
||||||
|
test_count: int
|
||||||
|
status: str
|
||||||
|
|
||||||
|
|
||||||
class HostNode(BaseROS2DeviceNode):
|
class HostNode(BaseROS2DeviceNode):
|
||||||
"""
|
"""
|
||||||
主机节点类,负责管理设备、资源和控制器
|
主机节点类,负责管理设备、资源和控制器
|
||||||
@@ -72,6 +87,8 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
_instance: ClassVar[Optional["HostNode"]] = None
|
_instance: ClassVar[Optional["HostNode"]] = None
|
||||||
_ready_event: ClassVar[threading.Event] = threading.Event()
|
_ready_event: ClassVar[threading.Event] = threading.Event()
|
||||||
|
_shutting_down: ClassVar[bool] = False # Flag to signal shutdown to background threads
|
||||||
|
_background_threads: ClassVar[List[threading.Thread]] = [] # Track all background threads for cleanup
|
||||||
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
||||||
DeviceActionStatus
|
DeviceActionStatus
|
||||||
)
|
)
|
||||||
@@ -83,6 +100,48 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
return cls._instance
|
return cls._instance
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def shutdown_background_threads(cls, timeout: float = 5.0) -> None:
|
||||||
|
"""
|
||||||
|
Gracefully shutdown all background threads for clean exit or restart.
|
||||||
|
|
||||||
|
This method:
|
||||||
|
1. Sets shutdown flag to stop background operations
|
||||||
|
2. Waits for background threads to finish with timeout
|
||||||
|
3. Cleans up finished threads from tracking list
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout: Maximum time to wait for each thread (seconds)
|
||||||
|
"""
|
||||||
|
cls._shutting_down = True
|
||||||
|
|
||||||
|
# Wait for background threads to finish
|
||||||
|
active_threads = []
|
||||||
|
for t in cls._background_threads:
|
||||||
|
if t.is_alive():
|
||||||
|
t.join(timeout=timeout)
|
||||||
|
if t.is_alive():
|
||||||
|
active_threads.append(t.name)
|
||||||
|
|
||||||
|
if active_threads:
|
||||||
|
logger.warning(f"[Host Node] Some background threads still running: {active_threads}")
|
||||||
|
|
||||||
|
# Clear the thread list
|
||||||
|
cls._background_threads.clear()
|
||||||
|
logger.info(f"[Host Node] Background threads shutdown complete")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def reset_state(cls) -> None:
|
||||||
|
"""
|
||||||
|
Reset the HostNode singleton state for restart or clean exit.
|
||||||
|
Call this after destroying the instance.
|
||||||
|
"""
|
||||||
|
cls._instance = None
|
||||||
|
cls._ready_event.clear()
|
||||||
|
cls._shutting_down = False
|
||||||
|
cls._background_threads.clear()
|
||||||
|
logger.info("[Host Node] State reset complete")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
@@ -296,12 +355,36 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
bridge.publish_host_ready()
|
bridge.publish_host_ready()
|
||||||
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
||||||
|
|
||||||
def _send_re_register(self, sclient):
|
def _send_re_register(self, sclient, device_namespace: str):
|
||||||
sclient.wait_for_service()
|
"""
|
||||||
request = SerialCommand.Request()
|
Send re-register command to a device. This is a one-time operation.
|
||||||
request.command = ""
|
|
||||||
future = sclient.call_async(request)
|
Args:
|
||||||
response = future.result()
|
sclient: The service client
|
||||||
|
device_namespace: The device namespace for logging
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Use timeout to prevent indefinite blocking
|
||||||
|
if not sclient.wait_for_service(timeout_sec=10.0):
|
||||||
|
self.lab_logger().debug(f"[Host Node] Re-register timeout for {device_namespace}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check shutdown flag after wait
|
||||||
|
if self._shutting_down:
|
||||||
|
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (shutdown)")
|
||||||
|
return
|
||||||
|
|
||||||
|
request = SerialCommand.Request()
|
||||||
|
request.command = ""
|
||||||
|
future = sclient.call_async(request)
|
||||||
|
# Use timeout for result as well
|
||||||
|
future.result()
|
||||||
|
except Exception as e:
|
||||||
|
# Gracefully handle destruction during shutdown
|
||||||
|
if "destruction was requested" in str(e) or self._shutting_down:
|
||||||
|
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (cleanup)")
|
||||||
|
else:
|
||||||
|
self.lab_logger().warning(f"[Host Node] Re-register failed for {device_namespace}: {e}")
|
||||||
|
|
||||||
def _discover_devices(self) -> None:
|
def _discover_devices(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -333,23 +416,27 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self._create_action_clients_for_device(device_id, namespace)
|
self._create_action_clients_for_device(device_id, namespace)
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
threading.Thread(
|
t = threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient,),
|
args=(sclient, namespace),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
).start()
|
)
|
||||||
|
self._background_threads.append(t)
|
||||||
|
t.start()
|
||||||
elif device_key not in self._online_devices:
|
elif device_key not in self._online_devices:
|
||||||
# 设备重新上线
|
# 设备重新上线
|
||||||
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
threading.Thread(
|
t = threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient,),
|
args=(sclient, namespace),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
).start()
|
)
|
||||||
|
self._background_threads.append(t)
|
||||||
|
t.start()
|
||||||
|
|
||||||
# 检测离线设备
|
# 检测离线设备
|
||||||
offline_devices = self._online_devices - current_devices
|
offline_devices = self._online_devices - current_devices
|
||||||
@@ -515,11 +602,10 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
new_li = []
|
assert len(response) == 1, "Create Resource应当只返回一个结果"
|
||||||
for i in response:
|
for i in response:
|
||||||
res = json.loads(i)
|
res = json.loads(i)
|
||||||
new_li.append(res)
|
return res
|
||||||
return {"resources": new_li, "liquid_input_resources": new_li}
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
pass
|
pass
|
||||||
_n = "\n"
|
_n = "\n"
|
||||||
@@ -665,13 +751,14 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
if bCreate:
|
if bCreate:
|
||||||
self.lab_logger().trace(f"Status created: {device_id}.{property_name} = {msg.data}")
|
self.lab_logger().trace(f"Status created: {device_id}.{property_name} = {msg.data}")
|
||||||
else:
|
else:
|
||||||
self.lab_logger().debug(f"Status updated: {device_id}.{property_name} = {msg.data}")
|
self.lab_logger().trace(f"Status updated: {device_id}.{property_name} = {msg.data}")
|
||||||
|
|
||||||
def send_goal(
|
def send_goal(
|
||||||
self,
|
self,
|
||||||
item: "QueueItem",
|
item: "QueueItem",
|
||||||
action_type: str,
|
action_type: str,
|
||||||
action_kwargs: Dict[str, Any],
|
action_kwargs: Dict[str, Any],
|
||||||
|
sample_material: Dict[str, str],
|
||||||
server_info: Optional[Dict[str, Any]] = None,
|
server_info: Optional[Dict[str, Any]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -689,14 +776,14 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
if action_name.startswith("auto-"):
|
if action_name.startswith("auto-"):
|
||||||
action_name = action_name[5:]
|
action_name = action_name[5:]
|
||||||
action_id = f"/devices/{device_id}/_execute_driver_command"
|
action_id = f"/devices/{device_id}/_execute_driver_command"
|
||||||
action_kwargs = {
|
json_command: Dict[str, Any] = {
|
||||||
"string": json.dumps(
|
"function_name": action_name,
|
||||||
{
|
"function_args": action_kwargs,
|
||||||
"function_name": action_name,
|
JSON_UNILABOS_PARAM: {
|
||||||
"function_args": action_kwargs,
|
PARAM_SAMPLE_UUIDS: sample_material,
|
||||||
}
|
},
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
action_kwargs = {"string": json.dumps(json_command)}
|
||||||
if action_type.startswith("UniLabJsonCommandAsync"):
|
if action_type.startswith("UniLabJsonCommandAsync"):
|
||||||
action_id = f"/devices/{device_id}/_execute_driver_command_async"
|
action_id = f"/devices/{device_id}/_execute_driver_command_async"
|
||||||
else:
|
else:
|
||||||
@@ -707,23 +794,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
raise ValueError(f"ActionClient {action_id} not found.")
|
raise ValueError(f"ActionClient {action_id} not found.")
|
||||||
|
|
||||||
action_client: ActionClient = self._action_clients[action_id]
|
action_client: ActionClient = self._action_clients[action_id]
|
||||||
# 遍历action_kwargs下的所有子dict,将"sample_uuid"的值赋给"sample_id"
|
|
||||||
def assign_sample_id(obj):
|
|
||||||
if isinstance(obj, dict):
|
|
||||||
if "sample_uuid" in obj:
|
|
||||||
obj["sample_id"] = obj["sample_uuid"]
|
|
||||||
obj.pop("sample_uuid")
|
|
||||||
for k,v in obj.items():
|
|
||||||
if k != "unilabos_extra":
|
|
||||||
assign_sample_id(v)
|
|
||||||
elif isinstance(obj, list):
|
|
||||||
for item in obj:
|
|
||||||
assign_sample_id(item)
|
|
||||||
|
|
||||||
assign_sample_id(action_kwargs)
|
|
||||||
goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs)
|
goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs)
|
||||||
|
|
||||||
self.lab_logger().info(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
|
# self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {str(goal_msg)[:1000]}")
|
||||||
|
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {action_kwargs}")
|
||||||
|
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
|
||||||
action_client.wait_for_server()
|
action_client.wait_for_server()
|
||||||
goal_uuid_obj = UUID(uuid=list(u.bytes))
|
goal_uuid_obj = UUID(uuid=list(u.bytes))
|
||||||
|
|
||||||
@@ -744,9 +819,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
||||||
self._goals[item.job_id] = goal_handle
|
self._goals[item.job_id] = goal_handle
|
||||||
goal_future = goal_handle.get_result_async()
|
goal_future = goal_handle.get_result_async()
|
||||||
goal_future.add_done_callback(
|
goal_future.add_done_callback(lambda f: self.get_result_callback(item, action_id, f))
|
||||||
lambda f: self.get_result_callback(item, action_id, f)
|
|
||||||
)
|
|
||||||
goal_future.result()
|
goal_future.result()
|
||||||
|
|
||||||
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
||||||
@@ -783,9 +856,14 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
# 适配后端的一些额外处理
|
# 适配后端的一些额外处理
|
||||||
return_value = return_info.get("return_value")
|
return_value = return_info.get("return_value")
|
||||||
if isinstance(return_value, dict):
|
if isinstance(return_value, dict):
|
||||||
unilabos_samples = return_info.get("unilabos_samples")
|
unilabos_samples = return_value.pop(RETURN_UNILABOS_SAMPLES, None)
|
||||||
if isinstance(unilabos_samples, list):
|
if isinstance(unilabos_samples, list) and unilabos_samples:
|
||||||
return_info["unilabos_samples"] = unilabos_samples
|
self.lab_logger().info(
|
||||||
|
f"[Host Node] Job {job_id[:8]} returned {len(unilabos_samples)} sample(s): "
|
||||||
|
f"{[s.get('name', s.get('id', 'unknown')) if isinstance(s, dict) else str(s)[:20] for s in unilabos_samples[:5]]}"
|
||||||
|
f"{'...' if len(unilabos_samples) > 5 else ''}"
|
||||||
|
)
|
||||||
|
return_info["samples"] = unilabos_samples
|
||||||
suc = return_info.get("suc", False)
|
suc = return_info.get("suc", False)
|
||||||
if not suc:
|
if not suc:
|
||||||
status = "failed"
|
status = "failed"
|
||||||
@@ -811,7 +889,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
# 清理 _goals 中的记录
|
# 清理 _goals 中的记录
|
||||||
if job_id in self._goals:
|
if job_id in self._goals:
|
||||||
del self._goals[job_id]
|
del self._goals[job_id]
|
||||||
self.lab_logger().debug(f"[Host Node] Removed goal {job_id[:8]} from _goals")
|
self.lab_logger().trace(f"[Host Node] Removed goal {job_id[:8]} from _goals")
|
||||||
|
|
||||||
# 存储结果供 HTTP API 查询
|
# 存储结果供 HTTP API 查询
|
||||||
try:
|
try:
|
||||||
@@ -1063,11 +1141,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
接收序列化的 ResourceTreeSet 数据并进行处理
|
接收序列化的 ResourceTreeSet 数据并进行处理
|
||||||
"""
|
"""
|
||||||
self.lab_logger().info(f"[Host Node-Resource] Resource tree add request received")
|
|
||||||
try:
|
try:
|
||||||
# 解析请求数据
|
# 解析请求数据
|
||||||
data = json.loads(request.command)
|
data = json.loads(request.command)
|
||||||
action = data["action"]
|
action = data["action"]
|
||||||
|
self.lab_logger().info(f"[Host Node-Resource] Resource tree {action} request received")
|
||||||
data = data["data"]
|
data = data["data"]
|
||||||
if action == "add":
|
if action == "add":
|
||||||
await self._resource_tree_action_add_callback(data, response)
|
await self._resource_tree_action_add_callback(data, response)
|
||||||
@@ -1091,7 +1169,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
"""
|
"""
|
||||||
更新节点信息回调
|
更新节点信息回调
|
||||||
"""
|
"""
|
||||||
# self.lab_logger().info(f"[Host Node] Node info update request received: {request}")
|
self.lab_logger().trace(f"[Host Node] Node info update request received: {request}")
|
||||||
try:
|
try:
|
||||||
from unilabos.app.communication import get_communication_client
|
from unilabos.app.communication import get_communication_client
|
||||||
from unilabos.app.web.client import HTTPClient, http_client
|
from unilabos.app.web.client import HTTPClient, http_client
|
||||||
@@ -1169,10 +1247,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from unilabos.app.web import http_client
|
from unilabos.app.web import http_client
|
||||||
|
|
||||||
data = json.loads(request.command)
|
data = json.loads(request.command)
|
||||||
if "uuid" in data and data["uuid"] is not None:
|
if "uuid" in data and data["uuid"] is not None:
|
||||||
http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"])
|
http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"])
|
||||||
elif "id" in data and data["id"].startswith("/"):
|
elif "id" in data:
|
||||||
http_req = http_client.resource_get(data["id"], data["with_children"])
|
http_req = http_client.resource_get(data["id"], data["with_children"])
|
||||||
else:
|
else:
|
||||||
raise ValueError("没有使用正确的物料 id 或 uuid")
|
raise ValueError("没有使用正确的物料 id 或 uuid")
|
||||||
@@ -1255,10 +1334,20 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self.lab_logger().debug(f"[Host Node-Resource] List parameters: {request}")
|
self.lab_logger().debug(f"[Host Node-Resource] List parameters: {request}")
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def test_latency(self):
|
def test_latency(self) -> TestLatencyReturn:
|
||||||
"""
|
"""
|
||||||
测试网络延迟的action实现
|
测试网络延迟的action实现
|
||||||
通过5次ping-pong机制校对时间误差并计算实际延迟
|
通过5次ping-pong机制校对时间误差并计算实际延迟
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TestLatencyReturn: 包含延迟测试结果的字典,包括:
|
||||||
|
- avg_rtt_ms: 平均往返时间(毫秒)
|
||||||
|
- avg_time_diff_ms: 平均时间差(毫秒)
|
||||||
|
- max_time_error_ms: 最大时间误差(毫秒)
|
||||||
|
- task_delay_ms: 实际任务延迟(毫秒),-1表示无法计算
|
||||||
|
- raw_delay_ms: 原始时间差(毫秒),-1表示无法计算
|
||||||
|
- test_count: 有效测试次数
|
||||||
|
- status: 测试状态,"success"表示成功,"all_timeout"表示全部超时
|
||||||
"""
|
"""
|
||||||
import uuid as uuid_module
|
import uuid as uuid_module
|
||||||
|
|
||||||
@@ -1321,7 +1410,15 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
if not ping_results:
|
if not ping_results:
|
||||||
self.lab_logger().error("❌ 所有ping-pong测试都失败了")
|
self.lab_logger().error("❌ 所有ping-pong测试都失败了")
|
||||||
return {"status": "all_timeout"}
|
return {
|
||||||
|
"avg_rtt_ms": -1.0,
|
||||||
|
"avg_time_diff_ms": -1.0,
|
||||||
|
"max_time_error_ms": -1.0,
|
||||||
|
"task_delay_ms": -1.0,
|
||||||
|
"raw_delay_ms": -1.0,
|
||||||
|
"test_count": 0,
|
||||||
|
"status": "all_timeout",
|
||||||
|
}
|
||||||
|
|
||||||
# 统计分析
|
# 统计分析
|
||||||
rtts = [r["rtt_ms"] for r in ping_results]
|
rtts = [r["rtt_ms"] for r in ping_results]
|
||||||
@@ -1329,7 +1426,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
avg_rtt_ms = sum(rtts) / len(rtts)
|
avg_rtt_ms = sum(rtts) / len(rtts)
|
||||||
avg_time_diff_ms = sum(time_diffs) / len(time_diffs)
|
avg_time_diff_ms = sum(time_diffs) / len(time_diffs)
|
||||||
max_time_diff_error_ms = max(abs(min(time_diffs)), abs(max(time_diffs)))
|
max_time_diff_error_ms: float = max(abs(min(time_diffs)), abs(max(time_diffs)))
|
||||||
|
|
||||||
self.lab_logger().info("-" * 50)
|
self.lab_logger().info("-" * 50)
|
||||||
self.lab_logger().info("[测试统计]")
|
self.lab_logger().info("[测试统计]")
|
||||||
@@ -1369,7 +1466,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
self.lab_logger().info("=" * 60)
|
self.lab_logger().info("=" * 60)
|
||||||
|
|
||||||
return {
|
res: TestLatencyReturn = {
|
||||||
"avg_rtt_ms": avg_rtt_ms,
|
"avg_rtt_ms": avg_rtt_ms,
|
||||||
"avg_time_diff_ms": avg_time_diff_ms,
|
"avg_time_diff_ms": avg_time_diff_ms,
|
||||||
"max_time_error_ms": max_time_diff_error_ms,
|
"max_time_error_ms": max_time_diff_error_ms,
|
||||||
@@ -1380,12 +1477,23 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
"test_count": len(ping_results),
|
"test_count": len(ping_results),
|
||||||
"status": "success",
|
"status": "success",
|
||||||
}
|
}
|
||||||
|
return res
|
||||||
|
|
||||||
def test_resource(
|
def test_resource(
|
||||||
self, resource: ResourceSlot, resources: List[ResourceSlot], device: DeviceSlot, devices: List[DeviceSlot]
|
self,
|
||||||
|
resource: ResourceSlot = None,
|
||||||
|
resources: List[ResourceSlot] = None,
|
||||||
|
device: DeviceSlot = None,
|
||||||
|
devices: List[DeviceSlot] = None,
|
||||||
) -> TestResourceReturn:
|
) -> TestResourceReturn:
|
||||||
|
if resources is None:
|
||||||
|
resources = []
|
||||||
|
if devices is None:
|
||||||
|
devices = []
|
||||||
|
if resource is None:
|
||||||
|
resource = RegularContainer("test_resource传入None")
|
||||||
return {
|
return {
|
||||||
"resources": ResourceTreeSet.from_plr_resources([resource, *resources]).dump(),
|
"resources": ResourceTreeSet.from_plr_resources([resource, *resources], known_newly_created=True).dump(),
|
||||||
"devices": [device, *devices],
|
"devices": [device, *devices],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1437,7 +1545,9 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
# 构建服务地址
|
# 构建服务地址
|
||||||
srv_address = f"/srv{namespace}/s2c_resource_tree"
|
srv_address = f"/srv{namespace}/s2c_resource_tree"
|
||||||
self.lab_logger().info(f"[Host Node-Resource] Notifying {device_id} for resource tree {action} operation")
|
self.lab_logger().trace(
|
||||||
|
f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation started -------"
|
||||||
|
)
|
||||||
|
|
||||||
# 创建服务客户端
|
# 创建服务客户端
|
||||||
sclient = self.create_client(SerialCommand, srv_address)
|
sclient = self.create_client(SerialCommand, srv_address)
|
||||||
@@ -1472,8 +1582,8 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
time.sleep(0.05)
|
time.sleep(0.05)
|
||||||
|
|
||||||
response = future.result()
|
response = future.result()
|
||||||
self.lab_logger().info(
|
self.lab_logger().trace(
|
||||||
f"[Host Node-Resource] Resource tree {action} notification completed for {device_id}"
|
f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation completed -------"
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -6,17 +6,13 @@ from typing import List, Dict, Any, Optional, TYPE_CHECKING
|
|||||||
|
|
||||||
import rclpy
|
import rclpy
|
||||||
from rosidl_runtime_py import message_to_ordereddict
|
from rosidl_runtime_py import message_to_ordereddict
|
||||||
from unilabos_msgs.msg import Resource
|
|
||||||
from unilabos_msgs.srv import ResourceUpdate
|
|
||||||
|
|
||||||
from unilabos.messages import * # type: ignore # protocol names
|
from unilabos.messages import * # type: ignore # protocol names
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
from rclpy.callback_groups import ReentrantCallbackGroup
|
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
from unilabos.compile import action_protocol_generators
|
from unilabos.compile import action_protocol_generators
|
||||||
from unilabos.resources.graphio import list_to_nested_dict, nested_dict_to_list
|
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
get_action_type,
|
get_action_type,
|
||||||
@@ -24,7 +20,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
convert_from_ros_msg_with_mapping,
|
convert_from_ros_msg_with_mapping,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
||||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
||||||
from unilabos.utils.type_check import get_result_info_str
|
from unilabos.utils.type_check import get_result_info_str
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -232,15 +228,15 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
|
|||||||
try:
|
try:
|
||||||
# 统一处理单个或多个资源
|
# 统一处理单个或多个资源
|
||||||
resource_id = (
|
resource_id = (
|
||||||
protocol_kwargs[k]["id"] if v == "unilabos_msgs/Resource" else protocol_kwargs[k][0]["id"]
|
protocol_kwargs[k]["id"]
|
||||||
|
if v == "unilabos_msgs/Resource"
|
||||||
|
else protocol_kwargs[k][0]["id"]
|
||||||
)
|
)
|
||||||
resource_uuid = protocol_kwargs[k].get("uuid", None)
|
resource_uuid = protocol_kwargs[k].get("uuid", None)
|
||||||
r = SerialCommand_Request()
|
r = SerialCommand_Request()
|
||||||
r.command = json.dumps({"id": resource_id, "uuid": resource_uuid, "with_children": True})
|
r.command = json.dumps({"id": resource_id, "uuid": resource_uuid, "with_children": True})
|
||||||
# 发送请求并等待响应
|
# 发送请求并等待响应
|
||||||
response: SerialCommand_Response = await self._resource_clients[
|
response: SerialCommand_Response = await self._resource_clients["resource_get"].call_async(
|
||||||
"resource_get"
|
|
||||||
].call_async(
|
|
||||||
r
|
r
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
raw_data = json.loads(response.response)
|
raw_data = json.loads(response.response)
|
||||||
@@ -308,12 +304,54 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
# 向Host更新物料当前状态
|
# 向Host更新物料当前状态
|
||||||
for k, v in goal.get_fields_and_field_types().items():
|
for k, v in goal.get_fields_and_field_types().items():
|
||||||
if v in ["unilabos_msgs/Resource", "sequence<unilabos_msgs/Resource>"]:
|
if v not in ["unilabos_msgs/Resource", "sequence<unilabos_msgs/Resource>"]:
|
||||||
r = ResourceUpdate.Request()
|
continue
|
||||||
r.resources = [
|
self.lab_logger().info(f"更新资源状态: {k}")
|
||||||
convert_to_ros_msg(Resource, rs) for rs in nested_dict_to_list(protocol_kwargs[k])
|
try:
|
||||||
]
|
# 去重:使用 seen 集合获取唯一的资源对象
|
||||||
response = await self._resource_clients["resource_update"].call_async(r)
|
seen = set()
|
||||||
|
unique_resources = []
|
||||||
|
|
||||||
|
# 获取资源数据,统一转换为列表
|
||||||
|
resource_data = protocol_kwargs[k]
|
||||||
|
is_sequence = v != "unilabos_msgs/Resource"
|
||||||
|
if not is_sequence:
|
||||||
|
resource_list = [resource_data] if isinstance(resource_data, dict) else resource_data
|
||||||
|
else:
|
||||||
|
# 处理序列类型,可能是嵌套列表
|
||||||
|
resource_list = []
|
||||||
|
if isinstance(resource_data, list):
|
||||||
|
for item in resource_data:
|
||||||
|
if isinstance(item, list):
|
||||||
|
resource_list.extend(item)
|
||||||
|
else:
|
||||||
|
resource_list.append(item)
|
||||||
|
else:
|
||||||
|
resource_list = [resource_data]
|
||||||
|
|
||||||
|
for res_data in resource_list:
|
||||||
|
if not isinstance(res_data, dict):
|
||||||
|
continue
|
||||||
|
res_name = res_data.get("id") or res_data.get("name")
|
||||||
|
if not res_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 使用 resource_tracker 获取本地 PLR 实例
|
||||||
|
plr = self.resource_tracker.figure_resource({"name": res_name}, try_mode=False)
|
||||||
|
# 获取父资源
|
||||||
|
res = self.resource_tracker.parent_resource(plr)
|
||||||
|
if res is None:
|
||||||
|
res = plr
|
||||||
|
if id(res) not in seen:
|
||||||
|
seen.add(id(res))
|
||||||
|
unique_resources.append(res)
|
||||||
|
|
||||||
|
# 使用新的资源树接口更新
|
||||||
|
if unique_resources:
|
||||||
|
await self.update_resource(unique_resources)
|
||||||
|
except Exception as e:
|
||||||
|
self.lab_logger().error(f"资源更新失败: {e}")
|
||||||
|
self.lab_logger().error(traceback.format_exc())
|
||||||
|
|
||||||
# 设置成功状态和返回值
|
# 设置成功状态和返回值
|
||||||
execution_success = True
|
execution_success = True
|
||||||
|
|||||||
@@ -11,10 +11,9 @@ import traceback
|
|||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
||||||
|
|
||||||
from unilabos.resources.graphio import nested_dict_to_list, resource_ulab_to_plr
|
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
||||||
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
|
||||||
ResourceTreeInstance
|
ResourceTreeInstance
|
||||||
from unilabos.utils import logger, import_manager
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.cls_creator import create_instance_from_config
|
from unilabos.utils.cls_creator import create_instance_from_config
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
@@ -53,7 +52,8 @@ class DeviceClassCreator(Generic[T]):
|
|||||||
if self.device_instance is not None:
|
if self.device_instance is not None:
|
||||||
for c in self.children:
|
for c in self.children:
|
||||||
if c.res_content.type != "device":
|
if c.res_content.type != "device":
|
||||||
self.resource_tracker.add_resource(c.get_plr_nested_dict())
|
res = ResourceTreeSet([ResourceTreeInstance(c)]).to_plr_resources()[0]
|
||||||
|
self.resource_tracker.add_resource(res)
|
||||||
|
|
||||||
def create_instance(self, data: Dict[str, Any]) -> T:
|
def create_instance(self, data: Dict[str, Any]) -> T:
|
||||||
"""
|
"""
|
||||||
@@ -120,7 +120,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
# return resource, source_type
|
# return resource, source_type
|
||||||
|
|
||||||
def _process_resource_references(
|
def _process_resource_references(
|
||||||
self, data: Any, to_dict=False, states=None, prefix_path="", name_to_uuid=None
|
self, data: Any, processed_child_names: Optional[Dict[str, Any]], to_dict=False, states=None, prefix_path="", name_to_uuid=None
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""
|
"""
|
||||||
递归处理资源引用,替换_resource_child_name对应的资源
|
递归处理资源引用,替换_resource_child_name对应的资源
|
||||||
@@ -135,7 +135,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
处理后的数据
|
处理后的数据
|
||||||
"""
|
"""
|
||||||
from pylabrobot.resources import Deck, Resource
|
from pylabrobot.resources import Resource
|
||||||
|
|
||||||
if states is None:
|
if states is None:
|
||||||
states = {}
|
states = {}
|
||||||
@@ -165,6 +165,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
states[prefix_path] = resource_instance.serialize_all_state()
|
states[prefix_path] = resource_instance.serialize_all_state()
|
||||||
return serialized
|
return serialized
|
||||||
else:
|
else:
|
||||||
|
processed_child_names[child_name] = resource_instance
|
||||||
self.resource_tracker.add_resource(resource_instance)
|
self.resource_tracker.add_resource(resource_instance)
|
||||||
# 立即设置UUID,state已经在resource_ulab_to_plr中处理过了
|
# 立即设置UUID,state已经在resource_ulab_to_plr中处理过了
|
||||||
if name_to_uuid:
|
if name_to_uuid:
|
||||||
@@ -183,12 +184,12 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
result = {}
|
result = {}
|
||||||
for key, value in data.items():
|
for key, value in data.items():
|
||||||
new_prefix = f"{prefix_path}.{key}" if prefix_path else key
|
new_prefix = f"{prefix_path}.{key}" if prefix_path else key
|
||||||
result[key] = self._process_resource_references(value, to_dict, states, new_prefix, name_to_uuid)
|
result[key] = self._process_resource_references(value, processed_child_names, to_dict, states, new_prefix, name_to_uuid)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
elif isinstance(data, list):
|
elif isinstance(data, list):
|
||||||
return [
|
return [
|
||||||
self._process_resource_references(item, to_dict, states, f"{prefix_path}[{i}]", name_to_uuid)
|
self._process_resource_references(item, processed_child_names, to_dict, states, f"{prefix_path}[{i}]", name_to_uuid)
|
||||||
for i, item in enumerate(data)
|
for i, item in enumerate(data)
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -235,7 +236,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
# 首先处理资源引用
|
# 首先处理资源引用
|
||||||
states = {}
|
states = {}
|
||||||
processed_data = self._process_resource_references(
|
processed_data = self._process_resource_references(
|
||||||
data, to_dict=True, states=states, name_to_uuid=name_to_uuid
|
data, {}, to_dict=True, states=states, name_to_uuid=name_to_uuid
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -271,7 +272,12 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
arg_value = spec_args[param_name].annotation
|
arg_value = spec_args[param_name].annotation
|
||||||
data[param_name]["_resource_type"] = self.device_cls.__module__ + ":" + arg_value
|
data[param_name]["_resource_type"] = self.device_cls.__module__ + ":" + arg_value
|
||||||
logger.debug(f"自动补充 _resource_type: {data[param_name]['_resource_type']}")
|
logger.debug(f"自动补充 _resource_type: {data[param_name]['_resource_type']}")
|
||||||
processed_data = self._process_resource_references(data, to_dict=False, name_to_uuid=name_to_uuid)
|
processed_child_names = {}
|
||||||
|
processed_data = self._process_resource_references(data, processed_child_names, to_dict=False, name_to_uuid=name_to_uuid)
|
||||||
|
for child_name, resource_instance in processed_data.items():
|
||||||
|
for ind, name in enumerate([child.res_content.name for child in self.children]):
|
||||||
|
if name == child_name:
|
||||||
|
self.children.pop(ind)
|
||||||
self.device_instance = super(PyLabRobotCreator, self).create_instance(processed_data) # 补全变量后直接调用,调用的自身的attach_resource
|
self.device_instance = super(PyLabRobotCreator, self).create_instance(processed_data) # 补全变量后直接调用,调用的自身的attach_resource
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"PyLabRobot创建实例失败: {e}")
|
logger.error(f"PyLabRobot创建实例失败: {e}")
|
||||||
@@ -343,9 +349,10 @@ class WorkstationNodeCreator(DeviceClassCreator[T]):
|
|||||||
try:
|
try:
|
||||||
# 创建实例,额外补充一个给protocol node的字段,后面考虑取消
|
# 创建实例,额外补充一个给protocol node的字段,后面考虑取消
|
||||||
data["children"] = self.children
|
data["children"] = self.children
|
||||||
for child in self.children:
|
# super(WorkstationNodeCreator, self).create_instance(data)的时候会attach
|
||||||
if child.res_content.type != "device":
|
# for child in self.children:
|
||||||
self.resource_tracker.add_resource(child.get_plr_nested_dict())
|
# if child.res_content.type != "device":
|
||||||
|
# self.resource_tracker.add_resource(child.get_plr_nested_dict())
|
||||||
deck_dict = data.get("deck")
|
deck_dict = data.get("deck")
|
||||||
if deck_dict:
|
if deck_dict:
|
||||||
from pylabrobot.resources import Deck, Resource
|
from pylabrobot.resources import Deck, Resource
|
||||||
|
|||||||
@@ -339,13 +339,8 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
"max_volume": 500.0,
|
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container",
|
"category": "container"
|
||||||
"max_temp": 200.0,
|
|
||||||
"min_temp": -20.0,
|
|
||||||
"has_stirrer": true,
|
|
||||||
"has_heater": true
|
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"liquids": [],
|
"liquids": [],
|
||||||
@@ -769,9 +764,7 @@
|
|||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container",
|
"category": "container"
|
||||||
"reagent": "sodium_chloride",
|
|
||||||
"physical_state": "solid"
|
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
@@ -792,14 +785,11 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
"volume": 500.0,
|
|
||||||
"size_x": 600,
|
"size_x": 600,
|
||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container",
|
"category": "container"
|
||||||
"reagent": "sodium_carbonate",
|
|
||||||
"physical_state": "solid"
|
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
@@ -820,14 +810,11 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
"volume": 500.0,
|
|
||||||
"size_x": 650,
|
"size_x": 650,
|
||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container",
|
"category": "container"
|
||||||
"reagent": "magnesium_chloride",
|
|
||||||
"physical_state": "solid"
|
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
|
|||||||
837
unilabos/test/experiments/prcxi_9320_no_res.json
Normal file
837
unilabos/test/experiments/prcxi_9320_no_res.json
Normal file
@@ -0,0 +1,837 @@
|
|||||||
|
{
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "PRCXI",
|
||||||
|
"name": "PRCXI",
|
||||||
|
"type": "device",
|
||||||
|
"class": "liquid_handler.prcxi",
|
||||||
|
"parent": "",
|
||||||
|
"pose": {
|
||||||
|
"size": {
|
||||||
|
"width": 550,
|
||||||
|
"height": 400,
|
||||||
|
"depth": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"axis": "Left",
|
||||||
|
"deck": {
|
||||||
|
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
|
||||||
|
"_resource_child_name": "PRCXI_Deck"
|
||||||
|
},
|
||||||
|
"host": "10.20.30.184",
|
||||||
|
"port": 9999,
|
||||||
|
"debug": false,
|
||||||
|
"setup": true,
|
||||||
|
"is_9320": true,
|
||||||
|
"timeout": 10,
|
||||||
|
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
|
||||||
|
"simulator": false,
|
||||||
|
"step_mode": false,
|
||||||
|
"channel_num": 2
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"reset_ok": true
|
||||||
|
},
|
||||||
|
"schema": {},
|
||||||
|
"description": "",
|
||||||
|
"model": null,
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 700,
|
||||||
|
"z": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "PRCXI_Deck",
|
||||||
|
"name": "PRCXI_Deck",
|
||||||
|
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI",
|
||||||
|
"type": "deck",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Deck",
|
||||||
|
"size_x": 550,
|
||||||
|
"size_y": 400,
|
||||||
|
"size_z": 17,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "deck",
|
||||||
|
"barcode": null
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T1",
|
||||||
|
"name": "T1",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 5,
|
||||||
|
"y": 301,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T1",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T2",
|
||||||
|
"name": "T2",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 142.5,
|
||||||
|
"y": 301,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T2",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T3",
|
||||||
|
"name": "T3",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 280,
|
||||||
|
"y": 301,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T3",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T4",
|
||||||
|
"name": "T4",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 417.5,
|
||||||
|
"y": 301,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 94,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T4",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T5",
|
||||||
|
"name": "T5",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 5,
|
||||||
|
"y": 205,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T5",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T6",
|
||||||
|
"name": "T6",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 142.5,
|
||||||
|
"y": 205,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T6",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T7",
|
||||||
|
"name": "T7",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 280,
|
||||||
|
"y": 205,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T7",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T8",
|
||||||
|
"name": "T8",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 417.5,
|
||||||
|
"y": 205,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T8",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T9",
|
||||||
|
"name": "T9",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 5,
|
||||||
|
"y": 109,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T9",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T10",
|
||||||
|
"name": "T10",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 142.5,
|
||||||
|
"y": 109,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T10",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T11",
|
||||||
|
"name": "T11",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 280,
|
||||||
|
"y": 109,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T11",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T12",
|
||||||
|
"name": "T12",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 417.5,
|
||||||
|
"y": 109,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T12",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T13",
|
||||||
|
"name": "T13",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 5,
|
||||||
|
"y": 13,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T13",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T14",
|
||||||
|
"name": "T14",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 142.5,
|
||||||
|
"y": 13,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T14",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T15",
|
||||||
|
"name": "T15",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 280,
|
||||||
|
"y": 13,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T15",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T16",
|
||||||
|
"name": "T16",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 417.5,
|
||||||
|
"y": 13,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300PlateAdapterSite",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 28,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T16",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "trash",
|
||||||
|
"name": "trash",
|
||||||
|
|
||||||
|
"children": [],
|
||||||
|
"parent": "T16",
|
||||||
|
"type": "trash",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Trash",
|
||||||
|
"size_x": 127.5,
|
||||||
|
"size_y": 86,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "trash",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"max_volume": "Infinity",
|
||||||
|
"material_z_thickness": 0,
|
||||||
|
"compute_volume_from_height": null,
|
||||||
|
"compute_height_from_volume": null
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"liquids": [],
|
||||||
|
"pending_liquids": [],
|
||||||
|
"liquid_history": [],
|
||||||
|
"Material": {
|
||||||
|
"uuid": "730067cf07ae43849ddf4034299030e9"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"edges": []
|
||||||
|
}
|
||||||
795
unilabos/test/experiments/prcxi_9320_slim.json
Normal file
795
unilabos/test/experiments/prcxi_9320_slim.json
Normal file
@@ -0,0 +1,795 @@
|
|||||||
|
{
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "PRCXI",
|
||||||
|
"name": "PRCXI",
|
||||||
|
"type": "device",
|
||||||
|
"class": "liquid_handler.prcxi",
|
||||||
|
"parent": "",
|
||||||
|
"pose": {
|
||||||
|
"size": {
|
||||||
|
"width": 562,
|
||||||
|
"height": 394,
|
||||||
|
"depth": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"axis": "Left",
|
||||||
|
"deck": {
|
||||||
|
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
|
||||||
|
"_resource_child_name": "PRCXI_Deck"
|
||||||
|
},
|
||||||
|
"host": "10.20.30.184",
|
||||||
|
"port": 9999,
|
||||||
|
"debug": true,
|
||||||
|
"setup": true,
|
||||||
|
"is_9320": true,
|
||||||
|
"timeout": 10,
|
||||||
|
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
|
||||||
|
"simulator": true,
|
||||||
|
"channel_num": 2
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"reset_ok": true
|
||||||
|
},
|
||||||
|
"schema": {},
|
||||||
|
"description": "",
|
||||||
|
"model": null,
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 240,
|
||||||
|
"z": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "PRCXI_Deck",
|
||||||
|
"name": "PRCXI_Deck",
|
||||||
|
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI",
|
||||||
|
"type": "deck",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 10,
|
||||||
|
"y": 10,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Deck",
|
||||||
|
"size_x": 542,
|
||||||
|
"size_y": 374,
|
||||||
|
"size_z": 0,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "deck",
|
||||||
|
"barcode": null
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T1",
|
||||||
|
"name": "T1",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 288,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T1",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T2",
|
||||||
|
"name": "T2",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 138,
|
||||||
|
"y": 288,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T2",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T3",
|
||||||
|
"name": "T3",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 276,
|
||||||
|
"y": 288,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T3",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T4",
|
||||||
|
"name": "T4",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 414,
|
||||||
|
"y": 288,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T4",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T5",
|
||||||
|
"name": "T5",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 192,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T5",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T6",
|
||||||
|
"name": "T6",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 138,
|
||||||
|
"y": 192,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T6",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T7",
|
||||||
|
"name": "T7",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 276,
|
||||||
|
"y": 192,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T7",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T8",
|
||||||
|
"name": "T8",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 414,
|
||||||
|
"y": 192,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T8",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T9",
|
||||||
|
"name": "T9",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 96,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T9",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T10",
|
||||||
|
"name": "T10",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 138,
|
||||||
|
"y": 96,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T10",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T11",
|
||||||
|
"name": "T11",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 276,
|
||||||
|
"y": 96,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T11",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T12",
|
||||||
|
"name": "T12",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 414,
|
||||||
|
"y": 96,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T12",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T13",
|
||||||
|
"name": "T13",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T13",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T14",
|
||||||
|
"name": "T14",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 138,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T14",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T15",
|
||||||
|
"name": "T15",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 276,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T15",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T16",
|
||||||
|
"name": "T16",
|
||||||
|
"children": [],
|
||||||
|
"parent": "PRCXI_Deck",
|
||||||
|
"type": "plate",
|
||||||
|
"class": "",
|
||||||
|
"position": {
|
||||||
|
"x": 414,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0
|
||||||
|
},
|
||||||
|
"config": {
|
||||||
|
"type": "PRCXI9300Container",
|
||||||
|
"size_x": 127,
|
||||||
|
"size_y": 85.5,
|
||||||
|
"size_z": 10,
|
||||||
|
"rotation": {
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"z": 0,
|
||||||
|
"type": "Rotation"
|
||||||
|
},
|
||||||
|
"category": "plate",
|
||||||
|
"model": null,
|
||||||
|
"barcode": null,
|
||||||
|
"ordering": {},
|
||||||
|
"sites": [
|
||||||
|
{
|
||||||
|
"label": "T16",
|
||||||
|
"visible": true,
|
||||||
|
"position": { "x": 0, "y": 0, "z": 0 },
|
||||||
|
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||||
|
"content_type": [
|
||||||
|
"plate",
|
||||||
|
"tip_rack",
|
||||||
|
"plates",
|
||||||
|
"tip_racks",
|
||||||
|
"tube_rack"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"edges": []
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -24,6 +24,7 @@ class EnvironmentChecker:
|
|||||||
"msgcenterpy": "msgcenterpy",
|
"msgcenterpy": "msgcenterpy",
|
||||||
"opentrons_shared_data": "opentrons_shared_data",
|
"opentrons_shared_data": "opentrons_shared_data",
|
||||||
"typing_extensions": "typing_extensions",
|
"typing_extensions": "typing_extensions",
|
||||||
|
"crcmod": "crcmod-plus",
|
||||||
}
|
}
|
||||||
|
|
||||||
# 特殊安装包(需要特殊处理的包)
|
# 特殊安装包(需要特殊处理的包)
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ __all__ = [
|
|||||||
|
|
||||||
from ast import Constant
|
from ast import Constant
|
||||||
|
|
||||||
|
from unilabos.resources.resource_tracker import PARAM_SAMPLE_UUIDS
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
|
|
||||||
|
|
||||||
@@ -334,13 +335,18 @@ class ImportManager:
|
|||||||
result["action_methods"][method_name] = method_info
|
result["action_methods"][method_name] = method_info
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _analyze_method_signature(self, method) -> Dict[str, Any]:
|
def _analyze_method_signature(self, method, skip_unilabos_params: bool = True) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
分析方法签名,提取具体的命名参数信息
|
分析方法签名,提取具体的命名参数信息
|
||||||
|
|
||||||
注意:此方法会跳过*args和**kwargs,只提取具体的命名参数
|
注意:此方法会跳过*args和**kwargs,只提取具体的命名参数
|
||||||
这样可以确保通过**dict方式传参时的准确性
|
这样可以确保通过**dict方式传参时的准确性
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method: 要分析的方法
|
||||||
|
skip_unilabos_params: 是否跳过 unilabos 系统参数(如 sample_uuids),
|
||||||
|
registry 补全时为 True,JsonCommand 执行时为 False
|
||||||
|
|
||||||
示例用法:
|
示例用法:
|
||||||
method_info = self._analyze_method_signature(some_method)
|
method_info = self._analyze_method_signature(some_method)
|
||||||
params = {"param1": "value1", "param2": "value2"}
|
params = {"param1": "value1", "param2": "value2"}
|
||||||
@@ -361,6 +367,10 @@ class ImportManager:
|
|||||||
if param.kind == param.VAR_KEYWORD: # **kwargs
|
if param.kind == param.VAR_KEYWORD: # **kwargs
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# 跳过 sample_uuids 参数(由系统自动注入,registry 补全时跳过)
|
||||||
|
if skip_unilabos_params and param_name == PARAM_SAMPLE_UUIDS:
|
||||||
|
continue
|
||||||
|
|
||||||
is_required = param.default == inspect.Parameter.empty
|
is_required = param.default == inspect.Parameter.empty
|
||||||
if is_required:
|
if is_required:
|
||||||
num_required += 1
|
num_required += 1
|
||||||
@@ -549,6 +559,9 @@ class ImportManager:
|
|||||||
for i, arg in enumerate(node.args.args):
|
for i, arg in enumerate(node.args.args):
|
||||||
if arg.arg == "self":
|
if arg.arg == "self":
|
||||||
continue
|
continue
|
||||||
|
# 跳过 sample_uuids 参数(由系统自动注入)
|
||||||
|
if arg.arg == PARAM_SAMPLE_UUIDS:
|
||||||
|
continue
|
||||||
arg_info = {
|
arg_info = {
|
||||||
"name": arg.arg,
|
"name": arg.arg,
|
||||||
"type": None,
|
"type": None,
|
||||||
|
|||||||
18
unilabos/utils/requirements.txt
Normal file
18
unilabos/utils/requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
networkx
|
||||||
|
typing_extensions
|
||||||
|
websockets
|
||||||
|
msgcenterpy>=0.1.5
|
||||||
|
opentrons_shared_data
|
||||||
|
pint
|
||||||
|
fastapi
|
||||||
|
jinja2
|
||||||
|
requests
|
||||||
|
uvicorn
|
||||||
|
pyautogui
|
||||||
|
opcua
|
||||||
|
pyserial
|
||||||
|
pandas
|
||||||
|
crcmod-plus
|
||||||
|
pymodbus
|
||||||
|
matplotlib
|
||||||
|
pylibftdi
|
||||||
@@ -1,3 +1,104 @@
|
|||||||
|
"""
|
||||||
|
工作流转换模块 - JSON 到 WorkflowGraph 的转换流程
|
||||||
|
|
||||||
|
==================== 输入格式 (JSON) ====================
|
||||||
|
|
||||||
|
{
|
||||||
|
"workflow": [
|
||||||
|
{"action": "transfer_liquid", "action_args": {"sources": "cell_lines", "targets": "Liquid_1", "asp_vol": 100.0, "dis_vol": 74.75, ...}},
|
||||||
|
...
|
||||||
|
],
|
||||||
|
"reagent": {
|
||||||
|
"cell_lines": {"slot": 4, "well": ["A1", "A3", "A5"], "labware": "DRUG + YOYO-MEDIA"},
|
||||||
|
"Liquid_1": {"slot": 1, "well": ["A4", "A7", "A10"], "labware": "rep 1"},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
==================== 转换步骤 ====================
|
||||||
|
|
||||||
|
第一步: 按 slot 去重创建 create_resource 节点(创建板子)
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
- 首先创建一个 Group 节点(type="Group", minimized=true),用于包含所有 create_resource 节点
|
||||||
|
- 遍历所有 reagent,按 slot 去重,为每个唯一的 slot 创建一个板子
|
||||||
|
- 所有 create_resource 节点的 parent_uuid 指向 Group 节点,minimized=true
|
||||||
|
- 生成参数:
|
||||||
|
res_id: plate_slot_{slot}
|
||||||
|
device_id: /PRCXI
|
||||||
|
class_name: PRCXI_BioER_96_wellplate
|
||||||
|
parent: /PRCXI/PRCXI_Deck/T{slot}
|
||||||
|
slot_on_deck: "{slot}"
|
||||||
|
- 输出端口: labware(用于连接 set_liquid_from_plate)
|
||||||
|
- 控制流: create_resource 之间通过 ready 端口串联
|
||||||
|
|
||||||
|
示例: slot=1, slot=4 -> 创建 1 个 Group + 2 个 create_resource 节点
|
||||||
|
|
||||||
|
第二步: 为每个 reagent 创建 set_liquid_from_plate 节点(设置液体)
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
- 首先创建一个 Group 节点(type="Group", minimized=true),用于包含所有 set_liquid_from_plate 节点
|
||||||
|
- 遍历所有 reagent,为每个试剂创建 set_liquid_from_plate 节点
|
||||||
|
- 所有 set_liquid_from_plate 节点的 parent_uuid 指向 Group 节点,minimized=true
|
||||||
|
- 生成参数:
|
||||||
|
plate: [](通过连接传递,来自 create_resource 的 labware)
|
||||||
|
well_names: ["A1", "A3", "A5"](来自 reagent 的 well 数组)
|
||||||
|
liquid_names: ["cell_lines", "cell_lines", "cell_lines"](与 well 数量一致)
|
||||||
|
volumes: [1e5, 1e5, 1e5](与 well 数量一致,默认体积)
|
||||||
|
- 输入连接: create_resource (labware) -> set_liquid_from_plate (input_plate)
|
||||||
|
- 输出端口: output_wells(用于连接 transfer_liquid)
|
||||||
|
- 控制流: set_liquid_from_plate 连接在所有 create_resource 之后,通过 ready 端口串联
|
||||||
|
|
||||||
|
第三步: 解析 workflow,创建 transfer_liquid 等动作节点
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
- 遍历 workflow 数组,为每个动作创建步骤节点
|
||||||
|
- 参数重命名: asp_vol -> asp_vols, dis_vol -> dis_vols, asp_flow_rate -> asp_flow_rates, dis_flow_rate -> dis_flow_rates
|
||||||
|
- 参数扩展: 根据 targets 的 wells 数量,将单值扩展为数组
|
||||||
|
例: asp_vol=100.0, targets 有 3 个 wells -> asp_vols=[100.0, 100.0, 100.0]
|
||||||
|
- 连接处理: 如果 sources/targets 已通过 set_liquid_from_plate 连接,参数值改为 []
|
||||||
|
- 输入连接: set_liquid_from_plate (output_wells) -> transfer_liquid (sources_identifier / targets_identifier)
|
||||||
|
- 输出端口: sources_out, targets_out(用于连接下一个 transfer_liquid)
|
||||||
|
|
||||||
|
==================== 连接关系图 ====================
|
||||||
|
|
||||||
|
控制流 (ready 端口串联):
|
||||||
|
- create_resource 之间: 无 ready 连接
|
||||||
|
- set_liquid_from_plate 之间: 无 ready 连接
|
||||||
|
- create_resource 与 set_liquid_from_plate 之间: 无 ready 连接
|
||||||
|
- transfer_liquid 之间: 通过 ready 端口串联
|
||||||
|
transfer_liquid_1 -> transfer_liquid_2 -> transfer_liquid_3 -> ...
|
||||||
|
|
||||||
|
物料流:
|
||||||
|
[create_resource] --labware--> [set_liquid_from_plate] --output_wells--> [transfer_liquid] --sources_out/targets_out--> [下一个 transfer_liquid]
|
||||||
|
(slot=1) (cell_lines) (input_plate) (sources_identifier) (sources_identifier)
|
||||||
|
(slot=4) (Liquid_1) (targets_identifier) (targets_identifier)
|
||||||
|
|
||||||
|
==================== 端口映射 ====================
|
||||||
|
|
||||||
|
create_resource:
|
||||||
|
输出: labware
|
||||||
|
|
||||||
|
set_liquid_from_plate:
|
||||||
|
输入: input_plate
|
||||||
|
输出: output_plate, output_wells
|
||||||
|
|
||||||
|
transfer_liquid:
|
||||||
|
输入: sources -> sources_identifier, targets -> targets_identifier
|
||||||
|
输出: sources -> sources_out, targets -> targets_out
|
||||||
|
|
||||||
|
==================== 设备名配置 (device_name) ====================
|
||||||
|
|
||||||
|
每个节点都有 device_name 字段,指定在哪个设备上执行:
|
||||||
|
- create_resource: device_name = "host_node"(固定)
|
||||||
|
- set_liquid_from_plate: device_name = "PRCXI"(可配置,见 DEVICE_NAME_DEFAULT)
|
||||||
|
- transfer_liquid 等动作: device_name = "PRCXI"(可配置,见 DEVICE_NAME_DEFAULT)
|
||||||
|
|
||||||
|
==================== 校验规则 ====================
|
||||||
|
|
||||||
|
- 检查 sources/targets 是否在 reagent 中定义
|
||||||
|
- 检查 sources 和 targets 的 wells 数量是否匹配
|
||||||
|
- 检查参数数组长度是否与 wells 数量一致
|
||||||
|
- 如有问题,在 footer 中添加 [WARN: ...] 标记
|
||||||
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@@ -8,6 +109,35 @@ from typing import Dict, List, Any, Tuple, Optional
|
|||||||
|
|
||||||
Json = Dict[str, Any]
|
Json = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
# ==================== 默认配置 ====================
|
||||||
|
|
||||||
|
# 设备名配置
|
||||||
|
DEVICE_NAME_HOST = "host_node" # create_resource 固定在 host_node 上执行
|
||||||
|
DEVICE_NAME_DEFAULT = "PRCXI" # transfer_liquid, set_liquid_from_plate 等动作的默认设备名
|
||||||
|
|
||||||
|
# 节点类型
|
||||||
|
NODE_TYPE_DEFAULT = "ILab" # 所有节点的默认类型
|
||||||
|
|
||||||
|
# create_resource 节点默认参数
|
||||||
|
CREATE_RESOURCE_DEFAULTS = {
|
||||||
|
"device_id": "/PRCXI",
|
||||||
|
"parent_template": "/PRCXI/PRCXI_Deck/T{slot}", # {slot} 会被替换为实际的 slot 值
|
||||||
|
"class_name": "PRCXI_BioER_96_wellplate",
|
||||||
|
}
|
||||||
|
|
||||||
|
# 默认液体体积 (uL)
|
||||||
|
DEFAULT_LIQUID_VOLUME = 1e5
|
||||||
|
|
||||||
|
# 参数重命名映射:单数 -> 复数(用于 transfer_liquid 等动作)
|
||||||
|
PARAM_RENAME_MAPPING = {
|
||||||
|
"asp_vol": "asp_vols",
|
||||||
|
"dis_vol": "dis_vols",
|
||||||
|
"asp_flow_rate": "asp_flow_rates",
|
||||||
|
"dis_flow_rate": "dis_flow_rates",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# ---------------- Graph ----------------
|
# ---------------- Graph ----------------
|
||||||
|
|
||||||
|
|
||||||
@@ -228,7 +358,7 @@ def refactor_data(
|
|||||||
|
|
||||||
|
|
||||||
def build_protocol_graph(
|
def build_protocol_graph(
|
||||||
labware_info: List[Dict[str, Any]],
|
labware_info: Dict[str, Dict[str, Any]],
|
||||||
protocol_steps: List[Dict[str, Any]],
|
protocol_steps: List[Dict[str, Any]],
|
||||||
workstation_name: str,
|
workstation_name: str,
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
action_resource_mapping: Optional[Dict[str, str]] = None,
|
||||||
@@ -236,112 +366,260 @@ def build_protocol_graph(
|
|||||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
labware_info: labware 信息字典
|
labware_info: labware 信息字典,格式为 {name: {slot, well, labware, ...}, ...}
|
||||||
protocol_steps: 协议步骤列表
|
protocol_steps: 协议步骤列表
|
||||||
workstation_name: 工作站名称
|
workstation_name: 工作站名称
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
||||||
"""
|
"""
|
||||||
G = WorkflowGraph()
|
G = WorkflowGraph()
|
||||||
resource_last_writer = {}
|
resource_last_writer = {} # reagent_name -> "node_id:port"
|
||||||
|
slot_to_create_resource = {} # slot -> create_resource node_id
|
||||||
|
|
||||||
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
||||||
# 有机化学&移液站协议图构建
|
|
||||||
WORKSTATION_ID = workstation_name
|
|
||||||
|
|
||||||
# 为所有labware创建资源节点
|
# ==================== 第一步:按 slot 去重创建 create_resource 节点 ====================
|
||||||
res_index = 0
|
# 收集所有唯一的 slot
|
||||||
|
slots_info = {} # slot -> {labware, res_id}
|
||||||
for labware_id, item in labware_info.items():
|
for labware_id, item in labware_info.items():
|
||||||
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
slot = str(item.get("slot", ""))
|
||||||
node_id = str(uuid.uuid4())
|
if slot and slot not in slots_info:
|
||||||
|
res_id = f"plate_slot_{slot}"
|
||||||
|
slots_info[slot] = {
|
||||||
|
"labware": item.get("labware", ""),
|
||||||
|
"res_id": res_id,
|
||||||
|
}
|
||||||
|
|
||||||
# 判断节点类型
|
# 创建 Group 节点,包含所有 create_resource 节点
|
||||||
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
|
group_node_id = str(uuid.uuid4())
|
||||||
lab_node_type = "Labware"
|
G.add_node(
|
||||||
description = f"Prepare Labware: {labware_id}"
|
group_node_id,
|
||||||
liquid_type = []
|
name="Resources Group",
|
||||||
liquid_volume = []
|
type="Group",
|
||||||
elif item.get("type") == "hardware" or "reactor" in str(labware_id).lower():
|
parent_uuid="",
|
||||||
if "reactor" not in str(labware_id).lower():
|
lab_node_type="Device",
|
||||||
continue
|
template_name="",
|
||||||
lab_node_type = "Sample"
|
resource_name="",
|
||||||
description = f"Prepare Reactor: {labware_id}"
|
footer="",
|
||||||
liquid_type = []
|
minimized=True,
|
||||||
liquid_volume = []
|
param=None,
|
||||||
else:
|
)
|
||||||
lab_node_type = "Reagent"
|
|
||||||
description = f"Add Reagent to Flask: {labware_id}"
|
# 为每个唯一的 slot 创建 create_resource 节点
|
||||||
liquid_type = [labware_id]
|
res_index = 0
|
||||||
liquid_volume = [1e5]
|
for slot, info in slots_info.items():
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
res_id = info["res_id"]
|
||||||
|
|
||||||
res_index += 1
|
res_index += 1
|
||||||
G.add_node(
|
G.add_node(
|
||||||
node_id,
|
node_id,
|
||||||
template_name="create_resource",
|
template_name="create_resource",
|
||||||
resource_name="host_node",
|
resource_name="host_node",
|
||||||
name=f"Res {res_index}",
|
name=f"Plate {res_index}",
|
||||||
description=description,
|
description=f"Create plate on slot {slot}",
|
||||||
lab_node_type=lab_node_type,
|
lab_node_type="Labware",
|
||||||
footer="create_resource-host_node",
|
footer="create_resource-host_node",
|
||||||
|
device_name=DEVICE_NAME_HOST,
|
||||||
|
type=NODE_TYPE_DEFAULT,
|
||||||
|
parent_uuid=group_node_id, # 指向 Group 节点
|
||||||
|
minimized=True, # 折叠显示
|
||||||
param={
|
param={
|
||||||
"res_id": labware_id,
|
"res_id": res_id,
|
||||||
"device_id": WORKSTATION_ID,
|
"device_id": CREATE_RESOURCE_DEFAULTS["device_id"],
|
||||||
"class_name": "container",
|
"class_name": CREATE_RESOURCE_DEFAULTS["class_name"],
|
||||||
"parent": WORKSTATION_ID,
|
"parent": CREATE_RESOURCE_DEFAULTS["parent_template"].format(slot=slot),
|
||||||
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
||||||
"liquid_input_slot": [-1],
|
"slot_on_deck": slot,
|
||||||
"liquid_type": liquid_type,
|
|
||||||
"liquid_volume": liquid_volume,
|
|
||||||
"slot_on_deck": "",
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
resource_last_writer[labware_id] = f"{node_id}:labware"
|
slot_to_create_resource[slot] = node_id
|
||||||
|
|
||||||
|
# create_resource 之间不需要 ready 连接
|
||||||
|
|
||||||
|
# ==================== 第二步:为每个 reagent 创建 set_liquid_from_plate 节点 ====================
|
||||||
|
# 创建 Group 节点,包含所有 set_liquid_from_plate 节点
|
||||||
|
set_liquid_group_id = str(uuid.uuid4())
|
||||||
|
G.add_node(
|
||||||
|
set_liquid_group_id,
|
||||||
|
name="SetLiquid Group",
|
||||||
|
type="Group",
|
||||||
|
parent_uuid="",
|
||||||
|
lab_node_type="Device",
|
||||||
|
template_name="",
|
||||||
|
resource_name="",
|
||||||
|
footer="",
|
||||||
|
minimized=True,
|
||||||
|
param=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
set_liquid_index = 0
|
||||||
|
|
||||||
|
for labware_id, item in labware_info.items():
|
||||||
|
# 跳过 Tip/Rack 类型
|
||||||
|
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
|
||||||
|
continue
|
||||||
|
if item.get("type") == "hardware":
|
||||||
|
continue
|
||||||
|
|
||||||
|
slot = str(item.get("slot", ""))
|
||||||
|
wells = item.get("well", [])
|
||||||
|
if not wells or not slot:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# res_id 不能有空格
|
||||||
|
res_id = str(labware_id).replace(" ", "_")
|
||||||
|
well_count = len(wells)
|
||||||
|
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
set_liquid_index += 1
|
||||||
|
|
||||||
|
G.add_node(
|
||||||
|
node_id,
|
||||||
|
template_name="set_liquid_from_plate",
|
||||||
|
resource_name="liquid_handler.prcxi",
|
||||||
|
name=f"SetLiquid {set_liquid_index}",
|
||||||
|
description=f"Set liquid: {labware_id}",
|
||||||
|
lab_node_type="Reagent",
|
||||||
|
footer="set_liquid_from_plate-liquid_handler.prcxi",
|
||||||
|
device_name=DEVICE_NAME_DEFAULT,
|
||||||
|
type=NODE_TYPE_DEFAULT,
|
||||||
|
parent_uuid=set_liquid_group_id, # 指向 Group 节点
|
||||||
|
minimized=True, # 折叠显示
|
||||||
|
param={
|
||||||
|
"plate": [], # 通过连接传递
|
||||||
|
"well_names": wells, # 孔位名数组,如 ["A1", "A3", "A5"]
|
||||||
|
"liquid_names": [res_id] * well_count,
|
||||||
|
"volumes": [DEFAULT_LIQUID_VOLUME] * well_count,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# set_liquid_from_plate 之间不需要 ready 连接
|
||||||
|
|
||||||
|
# 物料流:create_resource 的 labware -> set_liquid_from_plate 的 input_plate
|
||||||
|
create_res_node_id = slot_to_create_resource.get(slot)
|
||||||
|
if create_res_node_id:
|
||||||
|
G.add_edge(create_res_node_id, node_id, source_port="labware", target_port="input_plate")
|
||||||
|
|
||||||
|
# set_liquid_from_plate 的输出 output_wells 用于连接 transfer_liquid
|
||||||
|
resource_last_writer[labware_id] = f"{node_id}:output_wells"
|
||||||
|
|
||||||
|
# transfer_liquid 之间通过 ready 串联,从 None 开始
|
||||||
last_control_node_id = None
|
last_control_node_id = None
|
||||||
|
|
||||||
|
# 端口名称映射:JSON 字段名 -> 实际 handle key
|
||||||
|
INPUT_PORT_MAPPING = {
|
||||||
|
"sources": "sources_identifier",
|
||||||
|
"targets": "targets_identifier",
|
||||||
|
"vessel": "vessel",
|
||||||
|
"to_vessel": "to_vessel",
|
||||||
|
"from_vessel": "from_vessel",
|
||||||
|
"reagent": "reagent",
|
||||||
|
"solvent": "solvent",
|
||||||
|
"compound": "compound",
|
||||||
|
}
|
||||||
|
|
||||||
|
OUTPUT_PORT_MAPPING = {
|
||||||
|
"sources": "sources_out", # 输出端口是 xxx_out
|
||||||
|
"targets": "targets_out", # 输出端口是 xxx_out
|
||||||
|
"vessel": "vessel_out",
|
||||||
|
"to_vessel": "to_vessel_out",
|
||||||
|
"from_vessel": "from_vessel_out",
|
||||||
|
"filtrate_vessel": "filtrate_out",
|
||||||
|
"reagent": "reagent",
|
||||||
|
"solvent": "solvent",
|
||||||
|
"compound": "compound",
|
||||||
|
}
|
||||||
|
|
||||||
|
# 需要根据 wells 数量扩展的参数列表(复数形式)
|
||||||
|
EXPAND_BY_WELLS_PARAMS = ["asp_vols", "dis_vols", "asp_flow_rates", "dis_flow_rates"]
|
||||||
|
|
||||||
# 处理协议步骤
|
# 处理协议步骤
|
||||||
for step in protocol_steps:
|
for step in protocol_steps:
|
||||||
node_id = str(uuid.uuid4())
|
node_id = str(uuid.uuid4())
|
||||||
G.add_node(node_id, **step)
|
params = step.get("param", {}).copy() # 复制一份,避免修改原数据
|
||||||
|
connected_params = set() # 记录被连接的参数
|
||||||
|
warnings = [] # 收集警告信息
|
||||||
|
|
||||||
|
# 参数重命名:单数 -> 复数
|
||||||
|
for old_name, new_name in PARAM_RENAME_MAPPING.items():
|
||||||
|
if old_name in params:
|
||||||
|
params[new_name] = params.pop(old_name)
|
||||||
|
|
||||||
|
# 处理输入连接
|
||||||
|
for param_key, target_port in INPUT_PORT_MAPPING.items():
|
||||||
|
resource_name = params.get(param_key)
|
||||||
|
if resource_name and resource_name in resource_last_writer:
|
||||||
|
source_node, source_port = resource_last_writer[resource_name].split(":")
|
||||||
|
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
||||||
|
connected_params.add(param_key)
|
||||||
|
elif resource_name and resource_name not in resource_last_writer:
|
||||||
|
# 资源名在 labware_info 中不存在
|
||||||
|
warnings.append(f"{param_key}={resource_name} 未找到")
|
||||||
|
|
||||||
|
# 获取 targets 对应的 wells 数量,用于扩展参数
|
||||||
|
targets_name = params.get("targets")
|
||||||
|
sources_name = params.get("sources")
|
||||||
|
targets_wells_count = 1
|
||||||
|
sources_wells_count = 1
|
||||||
|
|
||||||
|
if targets_name and targets_name in labware_info:
|
||||||
|
target_wells = labware_info[targets_name].get("well", [])
|
||||||
|
targets_wells_count = len(target_wells) if target_wells else 1
|
||||||
|
elif targets_name:
|
||||||
|
warnings.append(f"targets={targets_name} 未在 reagent 中定义")
|
||||||
|
|
||||||
|
if sources_name and sources_name in labware_info:
|
||||||
|
source_wells = labware_info[sources_name].get("well", [])
|
||||||
|
sources_wells_count = len(source_wells) if source_wells else 1
|
||||||
|
elif sources_name:
|
||||||
|
warnings.append(f"sources={sources_name} 未在 reagent 中定义")
|
||||||
|
|
||||||
|
# 检查 sources 和 targets 的 wells 数量是否匹配
|
||||||
|
if targets_wells_count != sources_wells_count and targets_name and sources_name:
|
||||||
|
warnings.append(f"wells 数量不匹配: sources={sources_wells_count}, targets={targets_wells_count}")
|
||||||
|
|
||||||
|
# 使用 targets 的 wells 数量来扩展参数
|
||||||
|
wells_count = targets_wells_count
|
||||||
|
|
||||||
|
# 扩展单值参数为数组(根据 targets 的 wells 数量)
|
||||||
|
for expand_param in EXPAND_BY_WELLS_PARAMS:
|
||||||
|
if expand_param in params:
|
||||||
|
value = params[expand_param]
|
||||||
|
# 如果是单个值,扩展为数组
|
||||||
|
if not isinstance(value, list):
|
||||||
|
params[expand_param] = [value] * wells_count
|
||||||
|
# 如果已经是数组但长度不对,记录警告
|
||||||
|
elif len(value) != wells_count:
|
||||||
|
warnings.append(f"{expand_param} 数量({len(value)})与 wells({wells_count})不匹配")
|
||||||
|
|
||||||
|
# 如果 sources/targets 已通过连接传递,将参数值改为空数组
|
||||||
|
for param_key in connected_params:
|
||||||
|
if param_key in params:
|
||||||
|
params[param_key] = []
|
||||||
|
|
||||||
|
# 更新 step 的 param、footer、device_name 和 type
|
||||||
|
step_copy = step.copy()
|
||||||
|
step_copy["param"] = params
|
||||||
|
step_copy["device_name"] = DEVICE_NAME_DEFAULT # 动作节点使用默认设备名
|
||||||
|
step_copy["type"] = NODE_TYPE_DEFAULT # 节点类型
|
||||||
|
|
||||||
|
# 如果有警告,修改 footer 添加警告标记(警告放前面)
|
||||||
|
if warnings:
|
||||||
|
original_footer = step.get("footer", "")
|
||||||
|
step_copy["footer"] = f"[WARN: {'; '.join(warnings)}] {original_footer}"
|
||||||
|
|
||||||
|
G.add_node(node_id, **step_copy)
|
||||||
|
|
||||||
# 控制流
|
# 控制流
|
||||||
if last_control_node_id is not None:
|
if last_control_node_id is not None:
|
||||||
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
||||||
last_control_node_id = node_id
|
last_control_node_id = node_id
|
||||||
|
|
||||||
# 物料流
|
# 处理输出:更新 resource_last_writer
|
||||||
params = step.get("param", {})
|
for param_key, output_port in OUTPUT_PORT_MAPPING.items():
|
||||||
input_resources_possible_names = [
|
resource_name = step.get("param", {}).get(param_key) # 使用原始参数值
|
||||||
"vessel",
|
|
||||||
"to_vessel",
|
|
||||||
"from_vessel",
|
|
||||||
"reagent",
|
|
||||||
"solvent",
|
|
||||||
"compound",
|
|
||||||
"sources",
|
|
||||||
"targets",
|
|
||||||
]
|
|
||||||
|
|
||||||
for target_port in input_resources_possible_names:
|
|
||||||
resource_name = params.get(target_port)
|
|
||||||
if resource_name and resource_name in resource_last_writer:
|
|
||||||
source_node, source_port = resource_last_writer[resource_name].split(":")
|
|
||||||
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
|
||||||
|
|
||||||
output_resources = {
|
|
||||||
"vessel_out": params.get("vessel"),
|
|
||||||
"from_vessel_out": params.get("from_vessel"),
|
|
||||||
"to_vessel_out": params.get("to_vessel"),
|
|
||||||
"filtrate_out": params.get("filtrate_vessel"),
|
|
||||||
"reagent": params.get("reagent"),
|
|
||||||
"solvent": params.get("solvent"),
|
|
||||||
"compound": params.get("compound"),
|
|
||||||
"sources_out": params.get("sources"),
|
|
||||||
"targets_out": params.get("targets"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for source_port, resource_name in output_resources.items():
|
|
||||||
if resource_name:
|
if resource_name:
|
||||||
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
resource_last_writer[resource_name] = f"{node_id}:{output_port}"
|
||||||
|
|
||||||
return G
|
return G
|
||||||
|
|
||||||
|
|||||||
@@ -1,21 +1,68 @@
|
|||||||
"""
|
"""
|
||||||
JSON 工作流转换模块
|
JSON 工作流转换模块
|
||||||
|
|
||||||
提供从多种 JSON 格式转换为统一工作流格式的功能。
|
将 workflow/reagent 格式的 JSON 转换为统一工作流格式。
|
||||||
支持的格式:
|
|
||||||
1. workflow/reagent 格式
|
输入格式:
|
||||||
2. steps_info/labware_info 格式
|
{
|
||||||
|
"workflow": [
|
||||||
|
{"action": "...", "action_args": {...}},
|
||||||
|
...
|
||||||
|
],
|
||||||
|
"reagent": {
|
||||||
|
"reagent_name": {"slot": int, "well": [...], "labware": "..."},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from os import PathLike
|
from os import PathLike
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
|
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
|
|
||||||
|
# ==================== 字段映射配置 ====================
|
||||||
|
|
||||||
|
# action 到 resource_name 的映射
|
||||||
|
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
|
||||||
|
# 生物实验操作
|
||||||
|
"transfer_liquid": "liquid_handler.prcxi",
|
||||||
|
"transfer": "liquid_handler.prcxi",
|
||||||
|
"incubation": "incubator.prcxi",
|
||||||
|
"move_labware": "labware_mover.prcxi",
|
||||||
|
"oscillation": "shaker.prcxi",
|
||||||
|
# 有机化学操作
|
||||||
|
"HeatChillToTemp": "heatchill.chemputer",
|
||||||
|
"StopHeatChill": "heatchill.chemputer",
|
||||||
|
"StartHeatChill": "heatchill.chemputer",
|
||||||
|
"HeatChill": "heatchill.chemputer",
|
||||||
|
"Dissolve": "stirrer.chemputer",
|
||||||
|
"Transfer": "liquid_handler.chemputer",
|
||||||
|
"Evaporate": "rotavap.chemputer",
|
||||||
|
"Recrystallize": "reactor.chemputer",
|
||||||
|
"Filter": "filter.chemputer",
|
||||||
|
"Dry": "dryer.chemputer",
|
||||||
|
"Add": "liquid_handler.chemputer",
|
||||||
|
}
|
||||||
|
|
||||||
|
# action_args 字段到 parameters 字段的映射
|
||||||
|
# 格式: {"old_key": "new_key"}, 仅映射需要重命名的字段
|
||||||
|
ARGS_FIELD_MAPPING: Dict[str, str] = {
|
||||||
|
# 如果需要字段重命名,在这里配置
|
||||||
|
# "old_field_name": "new_field_name",
|
||||||
|
}
|
||||||
|
|
||||||
|
# 默认工作站名称
|
||||||
|
DEFAULT_WORKSTATION = "PRCXI"
|
||||||
|
|
||||||
|
|
||||||
|
# ==================== 核心转换函数 ====================
|
||||||
|
|
||||||
|
|
||||||
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
|
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
|
||||||
"""
|
"""
|
||||||
从 registry 获取指定设备和动作的 handles 配置
|
从 registry 获取指定设备和动作的 handles 配置
|
||||||
@@ -39,12 +86,10 @@ def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List
|
|||||||
handles = action_config.get("handles", {})
|
handles = action_config.get("handles", {})
|
||||||
|
|
||||||
if isinstance(handles, dict):
|
if isinstance(handles, dict):
|
||||||
# 处理 input handles (作为 target)
|
|
||||||
for handle in handles.get("input", []):
|
for handle in handles.get("input", []):
|
||||||
handler_key = handle.get("handler_key", "")
|
handler_key = handle.get("handler_key", "")
|
||||||
if handler_key:
|
if handler_key:
|
||||||
result["source"].append(handler_key)
|
result["source"].append(handler_key)
|
||||||
# 处理 output handles (作为 source)
|
|
||||||
for handle in handles.get("output", []):
|
for handle in handles.get("output", []):
|
||||||
handler_key = handle.get("handler_key", "")
|
handler_key = handle.get("handler_key", "")
|
||||||
if handler_key:
|
if handler_key:
|
||||||
@@ -69,12 +114,9 @@ def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
|
|||||||
for edge in graph.edges:
|
for edge in graph.edges:
|
||||||
left_uuid = edge.get("source")
|
left_uuid = edge.get("source")
|
||||||
right_uuid = edge.get("target")
|
right_uuid = edge.get("target")
|
||||||
# target_handle_key是target, right的输入节点(入节点)
|
|
||||||
# source_handle_key是source, left的输出节点(出节点)
|
|
||||||
right_source_conn_key = edge.get("target_handle_key", "")
|
right_source_conn_key = edge.get("target_handle_key", "")
|
||||||
left_target_conn_key = edge.get("source_handle_key", "")
|
left_target_conn_key = edge.get("source_handle_key", "")
|
||||||
|
|
||||||
# 获取源节点和目标节点信息
|
|
||||||
left_node = nodes.get(left_uuid, {})
|
left_node = nodes.get(left_uuid, {})
|
||||||
right_node = nodes.get(right_uuid, {})
|
right_node = nodes.get(right_uuid, {})
|
||||||
|
|
||||||
@@ -83,164 +125,93 @@ def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
|
|||||||
right_res_name = right_node.get("resource_name", "")
|
right_res_name = right_node.get("resource_name", "")
|
||||||
right_template_name = right_node.get("template_name", "")
|
right_template_name = right_node.get("template_name", "")
|
||||||
|
|
||||||
# 获取源节点的 output handles
|
|
||||||
left_node_handles = get_action_handles(left_res_name, left_template_name)
|
left_node_handles = get_action_handles(left_res_name, left_template_name)
|
||||||
target_valid_keys = left_node_handles.get("target", [])
|
target_valid_keys = left_node_handles.get("target", [])
|
||||||
target_valid_keys.append("ready")
|
target_valid_keys.append("ready")
|
||||||
|
|
||||||
# 获取目标节点的 input handles
|
|
||||||
right_node_handles = get_action_handles(right_res_name, right_template_name)
|
right_node_handles = get_action_handles(right_res_name, right_template_name)
|
||||||
source_valid_keys = right_node_handles.get("source", [])
|
source_valid_keys = right_node_handles.get("source", [])
|
||||||
source_valid_keys.append("ready")
|
source_valid_keys.append("ready")
|
||||||
|
|
||||||
# 如果节点配置了 output handles,则 source_port 必须有效
|
# 验证目标节点(right)的输入端口
|
||||||
if not right_source_conn_key:
|
if not right_source_conn_key:
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
node_name = right_node.get("name", right_uuid[:8])
|
||||||
errors.append(f"源节点 '{node_name}' 的 source_handle_key 为空," f"应设置为: {source_valid_keys}")
|
errors.append(f"目标节点 '{node_name}' 的输入端口 (target_handle_key) 为空,应设置为: {source_valid_keys}")
|
||||||
elif right_source_conn_key not in source_valid_keys:
|
elif right_source_conn_key not in source_valid_keys:
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
node_name = right_node.get("name", right_uuid[:8])
|
||||||
errors.append(
|
errors.append(
|
||||||
f"源节点 '{node_name}' 的 source 端点 '{right_source_conn_key}' 不存在," f"支持的端点: {source_valid_keys}"
|
f"目标节点 '{node_name}' 的输入端口 '{right_source_conn_key}' 不存在,支持的输入端口: {source_valid_keys}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# 如果节点配置了 input handles,则 target_port 必须有效
|
# 验证源节点(left)的输出端口
|
||||||
if not left_target_conn_key:
|
if not left_target_conn_key:
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
node_name = left_node.get("name", left_uuid[:8])
|
||||||
errors.append(f"目标节点 '{node_name}' 的 target_handle_key 为空," f"应设置为: {target_valid_keys}")
|
errors.append(f"源节点 '{node_name}' 的输出端口 (source_handle_key) 为空,应设置为: {target_valid_keys}")
|
||||||
elif left_target_conn_key not in target_valid_keys:
|
elif left_target_conn_key not in target_valid_keys:
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
node_name = left_node.get("name", left_uuid[:8])
|
||||||
errors.append(
|
errors.append(
|
||||||
f"目标节点 '{node_name}' 的 target 端点 '{left_target_conn_key}' 不存在,"
|
f"源节点 '{node_name}' 的输出端口 '{left_target_conn_key}' 不存在,支持的输出端口: {target_valid_keys}"
|
||||||
f"支持的端点: {target_valid_keys}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return len(errors) == 0, errors
|
return len(errors) == 0, errors
|
||||||
|
|
||||||
|
|
||||||
# action 到 resource_name 的映射
|
def normalize_workflow_steps(workflow: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "liquid_handler.prcxi",
|
|
||||||
"transfer": "liquid_handler.prcxi",
|
|
||||||
"incubation": "incubator.prcxi",
|
|
||||||
"move_labware": "labware_mover.prcxi",
|
|
||||||
"oscillation": "shaker.prcxi",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "heatchill.chemputer",
|
|
||||||
"StopHeatChill": "heatchill.chemputer",
|
|
||||||
"StartHeatChill": "heatchill.chemputer",
|
|
||||||
"HeatChill": "heatchill.chemputer",
|
|
||||||
"Dissolve": "stirrer.chemputer",
|
|
||||||
"Transfer": "liquid_handler.chemputer",
|
|
||||||
"Evaporate": "rotavap.chemputer",
|
|
||||||
"Recrystallize": "reactor.chemputer",
|
|
||||||
"Filter": "filter.chemputer",
|
|
||||||
"Dry": "dryer.chemputer",
|
|
||||||
"Add": "liquid_handler.chemputer",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_steps(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
||||||
"""
|
"""
|
||||||
将不同格式的步骤数据规范化为统一格式
|
将 workflow 格式的步骤数据规范化
|
||||||
|
|
||||||
支持的输入格式:
|
输入格式:
|
||||||
- action + parameters
|
[{"action": "...", "action_args": {...}}, ...]
|
||||||
- action + action_args
|
|
||||||
- operation + parameters
|
输出格式:
|
||||||
|
[{"action": "...", "parameters": {...}, "step_number": int}, ...]
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
data: 原始步骤数据列表
|
workflow: workflow 数组
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
规范化后的步骤列表,格式为 [{"action": str, "parameters": dict, "description": str?, "step_number": int?}, ...]
|
规范化后的步骤列表
|
||||||
"""
|
"""
|
||||||
normalized = []
|
normalized = []
|
||||||
for idx, step in enumerate(data):
|
for idx, step in enumerate(workflow):
|
||||||
# 获取动作名称(支持 action 或 operation 字段)
|
action = step.get("action")
|
||||||
action = step.get("action") or step.get("operation")
|
|
||||||
if not action:
|
if not action:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 获取参数(支持 parameters 或 action_args 字段)
|
# 获取参数: action_args
|
||||||
raw_params = step.get("parameters") or step.get("action_args") or {}
|
raw_params = step.get("action_args", {})
|
||||||
params = dict(raw_params)
|
params = {}
|
||||||
|
|
||||||
# 规范化 source/target -> sources/targets
|
# 应用字段映射
|
||||||
if "source" in raw_params and "sources" not in raw_params:
|
for key, value in raw_params.items():
|
||||||
params["sources"] = raw_params["source"]
|
mapped_key = ARGS_FIELD_MAPPING.get(key, key)
|
||||||
if "target" in raw_params and "targets" not in raw_params:
|
params[mapped_key] = value
|
||||||
params["targets"] = raw_params["target"]
|
|
||||||
|
|
||||||
# 获取描述(支持 description 或 purpose 字段)
|
step_dict = {
|
||||||
description = step.get("description") or step.get("purpose")
|
"action": action,
|
||||||
|
"parameters": params,
|
||||||
|
"step_number": idx + 1,
|
||||||
|
}
|
||||||
|
|
||||||
# 获取步骤编号(优先使用原始数据中的 step_number,否则使用索引+1)
|
# 保留描述字段
|
||||||
step_number = step.get("step_number", idx + 1)
|
if "description" in step:
|
||||||
|
step_dict["description"] = step["description"]
|
||||||
step_dict = {"action": action, "parameters": params, "step_number": step_number}
|
|
||||||
if description:
|
|
||||||
step_dict["description"] = description
|
|
||||||
|
|
||||||
normalized.append(step_dict)
|
normalized.append(step_dict)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
def normalize_labware(data: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将不同格式的 labware 数据规范化为统一的字典格式
|
|
||||||
|
|
||||||
支持的输入格式:
|
|
||||||
- reagent_name + material_name + positions
|
|
||||||
- name + labware + slot
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始 labware 数据列表
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
规范化后的 labware 字典,格式为 {name: {"slot": int, "labware": str, "well": list, "type": str, "role": str, "name": str}, ...}
|
|
||||||
"""
|
|
||||||
labware = {}
|
|
||||||
for item in data:
|
|
||||||
# 获取 key 名称(优先使用 reagent_name,其次是 material_name 或 name)
|
|
||||||
reagent_name = item.get("reagent_name")
|
|
||||||
key = reagent_name or item.get("material_name") or item.get("name")
|
|
||||||
if not key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
key = str(key)
|
|
||||||
|
|
||||||
# 处理重复 key,自动添加后缀
|
|
||||||
idx = 1
|
|
||||||
original_key = key
|
|
||||||
while key in labware:
|
|
||||||
idx += 1
|
|
||||||
key = f"{original_key}_{idx}"
|
|
||||||
|
|
||||||
labware[key] = {
|
|
||||||
"slot": item.get("positions") or item.get("slot"),
|
|
||||||
"labware": item.get("material_name") or item.get("labware"),
|
|
||||||
"well": item.get("well", []),
|
|
||||||
"type": item.get("type", "reagent"),
|
|
||||||
"role": item.get("role", ""),
|
|
||||||
"name": key,
|
|
||||||
}
|
|
||||||
|
|
||||||
return labware
|
|
||||||
|
|
||||||
|
|
||||||
def convert_from_json(
|
def convert_from_json(
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
workstation_name: str = "PRCXi",
|
workstation_name: str = DEFAULT_WORKSTATION,
|
||||||
validate: bool = True,
|
validate: bool = True,
|
||||||
) -> WorkflowGraph:
|
) -> WorkflowGraph:
|
||||||
"""
|
"""
|
||||||
从 JSON 数据或文件转换为 WorkflowGraph
|
从 JSON 数据或文件转换为 WorkflowGraph
|
||||||
|
|
||||||
支持的 JSON 格式:
|
JSON 格式:
|
||||||
1. {"workflow": [...], "reagent": {...}} - 直接格式
|
{"workflow": [...], "reagent": {...}}
|
||||||
2. {"steps_info": [...], "labware_info": [...]} - 需要规范化的格式
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
data: JSON 文件路径、字典数据、或 JSON 字符串
|
||||||
@@ -251,7 +222,7 @@ def convert_from_json(
|
|||||||
WorkflowGraph: 构建好的工作流图
|
WorkflowGraph: 构建好的工作流图
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: 不支持的 JSON 格式 或 句柄校验失败
|
ValueError: 不支持的 JSON 格式
|
||||||
FileNotFoundError: 文件不存在
|
FileNotFoundError: 文件不存在
|
||||||
json.JSONDecodeError: JSON 解析失败
|
json.JSONDecodeError: JSON 解析失败
|
||||||
"""
|
"""
|
||||||
@@ -262,7 +233,6 @@ def convert_from_json(
|
|||||||
with path.open("r", encoding="utf-8") as fp:
|
with path.open("r", encoding="utf-8") as fp:
|
||||||
json_data = json.load(fp)
|
json_data = json.load(fp)
|
||||||
elif isinstance(data, str):
|
elif isinstance(data, str):
|
||||||
# 尝试作为 JSON 字符串解析
|
|
||||||
json_data = json.loads(data)
|
json_data = json.loads(data)
|
||||||
else:
|
else:
|
||||||
raise FileNotFoundError(f"文件不存在: {data}")
|
raise FileNotFoundError(f"文件不存在: {data}")
|
||||||
@@ -271,30 +241,24 @@ def convert_from_json(
|
|||||||
else:
|
else:
|
||||||
raise TypeError(f"不支持的数据类型: {type(data)}")
|
raise TypeError(f"不支持的数据类型: {type(data)}")
|
||||||
|
|
||||||
# 根据格式解析数据
|
# 校验格式
|
||||||
if "workflow" in json_data and "reagent" in json_data:
|
if "workflow" not in json_data or "reagent" not in json_data:
|
||||||
# 格式1: workflow/reagent(已经是规范格式)
|
|
||||||
protocol_steps = json_data["workflow"]
|
|
||||||
labware_info = json_data["reagent"]
|
|
||||||
elif "steps_info" in json_data and "labware_info" in json_data:
|
|
||||||
# 格式2: steps_info/labware_info(需要规范化)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps_info"])
|
|
||||||
labware_info = normalize_labware(json_data["labware_info"])
|
|
||||||
elif "steps" in json_data and "labware" in json_data:
|
|
||||||
# 格式3: steps/labware(另一种常见格式)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps"])
|
|
||||||
if isinstance(json_data["labware"], list):
|
|
||||||
labware_info = normalize_labware(json_data["labware"])
|
|
||||||
else:
|
|
||||||
labware_info = json_data["labware"]
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"不支持的 JSON 格式。支持的格式:\n"
|
"不支持的 JSON 格式。请使用标准格式:\n"
|
||||||
"1. {'workflow': [...], 'reagent': {...}}\n"
|
'{"workflow": [{"action": "...", "action_args": {...}}, ...], '
|
||||||
"2. {'steps_info': [...], 'labware_info': [...]}\n"
|
'"reagent": {"name": {"slot": int, "well": [...], "labware": "..."}, ...}}'
|
||||||
"3. {'steps': [...], 'labware': [...]}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# 提取数据
|
||||||
|
workflow = json_data["workflow"]
|
||||||
|
reagent = json_data["reagent"]
|
||||||
|
|
||||||
|
# 规范化步骤数据
|
||||||
|
protocol_steps = normalize_workflow_steps(workflow)
|
||||||
|
|
||||||
|
# reagent 已经是字典格式,直接使用
|
||||||
|
labware_info = reagent
|
||||||
|
|
||||||
# 构建工作流图
|
# 构建工作流图
|
||||||
graph = build_protocol_graph(
|
graph = build_protocol_graph(
|
||||||
labware_info=labware_info,
|
labware_info=labware_info,
|
||||||
@@ -317,7 +281,7 @@ def convert_from_json(
|
|||||||
|
|
||||||
def convert_json_to_node_link(
|
def convert_json_to_node_link(
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
workstation_name: str = "PRCXi",
|
workstation_name: str = DEFAULT_WORKSTATION,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
将 JSON 数据转换为 node-link 格式的字典
|
将 JSON 数据转换为 node-link 格式的字典
|
||||||
@@ -335,7 +299,7 @@ def convert_json_to_node_link(
|
|||||||
|
|
||||||
def convert_json_to_workflow_list(
|
def convert_json_to_workflow_list(
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
workstation_name: str = "PRCXi",
|
workstation_name: str = DEFAULT_WORKSTATION,
|
||||||
) -> List[Dict[str, Any]]:
|
) -> List[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
将 JSON 数据转换为工作流列表格式
|
将 JSON 数据转换为工作流列表格式
|
||||||
@@ -349,8 +313,3 @@ def convert_json_to_workflow_list(
|
|||||||
"""
|
"""
|
||||||
graph = convert_from_json(data, workstation_name)
|
graph = convert_from_json(data, workstation_name)
|
||||||
return graph.to_dict()
|
return graph.to_dict()
|
||||||
|
|
||||||
|
|
||||||
# 为了向后兼容,保留下划线前缀的别名
|
|
||||||
_normalize_steps = normalize_steps
|
|
||||||
_normalize_labware = normalize_labware
|
|
||||||
|
|||||||
356
unilabos/workflow/legacy/convert_from_json_legacy.py
Normal file
356
unilabos/workflow/legacy/convert_from_json_legacy.py
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
"""
|
||||||
|
JSON 工作流转换模块
|
||||||
|
|
||||||
|
提供从多种 JSON 格式转换为统一工作流格式的功能。
|
||||||
|
支持的格式:
|
||||||
|
1. workflow/reagent 格式
|
||||||
|
2. steps_info/labware_info 格式
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from os import PathLike
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
|
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
|
||||||
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
|
|
||||||
|
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
|
||||||
|
"""
|
||||||
|
从 registry 获取指定设备和动作的 handles 配置
|
||||||
|
|
||||||
|
Args:
|
||||||
|
resource_name: 设备资源名称,如 "liquid_handler.prcxi"
|
||||||
|
template_name: 动作模板名称,如 "transfer_liquid"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
包含 source 和 target handler_keys 的字典:
|
||||||
|
{"source": ["sources_out", "targets_out", ...], "target": ["sources", "targets", ...]}
|
||||||
|
"""
|
||||||
|
result = {"source": [], "target": []}
|
||||||
|
|
||||||
|
device_info = lab_registry.device_type_registry.get(resource_name, {})
|
||||||
|
if not device_info:
|
||||||
|
return result
|
||||||
|
|
||||||
|
action_mappings = device_info.get("class", {}).get("action_value_mappings", {})
|
||||||
|
action_config = action_mappings.get(template_name, {})
|
||||||
|
handles = action_config.get("handles", {})
|
||||||
|
|
||||||
|
if isinstance(handles, dict):
|
||||||
|
# 处理 input handles (作为 target)
|
||||||
|
for handle in handles.get("input", []):
|
||||||
|
handler_key = handle.get("handler_key", "")
|
||||||
|
if handler_key:
|
||||||
|
result["source"].append(handler_key)
|
||||||
|
# 处理 output handles (作为 source)
|
||||||
|
for handle in handles.get("output", []):
|
||||||
|
handler_key = handle.get("handler_key", "")
|
||||||
|
if handler_key:
|
||||||
|
result["target"].append(handler_key)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
|
||||||
|
"""
|
||||||
|
校验工作流图中所有边的句柄配置是否正确
|
||||||
|
|
||||||
|
Args:
|
||||||
|
graph: 工作流图对象
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(is_valid, errors): 是否有效,错误信息列表
|
||||||
|
"""
|
||||||
|
errors = []
|
||||||
|
nodes = graph.nodes
|
||||||
|
|
||||||
|
for edge in graph.edges:
|
||||||
|
left_uuid = edge.get("source")
|
||||||
|
right_uuid = edge.get("target")
|
||||||
|
# target_handle_key是target, right的输入节点(入节点)
|
||||||
|
# source_handle_key是source, left的输出节点(出节点)
|
||||||
|
right_source_conn_key = edge.get("target_handle_key", "")
|
||||||
|
left_target_conn_key = edge.get("source_handle_key", "")
|
||||||
|
|
||||||
|
# 获取源节点和目标节点信息
|
||||||
|
left_node = nodes.get(left_uuid, {})
|
||||||
|
right_node = nodes.get(right_uuid, {})
|
||||||
|
|
||||||
|
left_res_name = left_node.get("resource_name", "")
|
||||||
|
left_template_name = left_node.get("template_name", "")
|
||||||
|
right_res_name = right_node.get("resource_name", "")
|
||||||
|
right_template_name = right_node.get("template_name", "")
|
||||||
|
|
||||||
|
# 获取源节点的 output handles
|
||||||
|
left_node_handles = get_action_handles(left_res_name, left_template_name)
|
||||||
|
target_valid_keys = left_node_handles.get("target", [])
|
||||||
|
target_valid_keys.append("ready")
|
||||||
|
|
||||||
|
# 获取目标节点的 input handles
|
||||||
|
right_node_handles = get_action_handles(right_res_name, right_template_name)
|
||||||
|
source_valid_keys = right_node_handles.get("source", [])
|
||||||
|
source_valid_keys.append("ready")
|
||||||
|
|
||||||
|
# 如果节点配置了 output handles,则 source_port 必须有效
|
||||||
|
if not right_source_conn_key:
|
||||||
|
node_name = left_node.get("name", left_uuid[:8])
|
||||||
|
errors.append(f"源节点 '{node_name}' 的 source_handle_key 为空," f"应设置为: {source_valid_keys}")
|
||||||
|
elif right_source_conn_key not in source_valid_keys:
|
||||||
|
node_name = left_node.get("name", left_uuid[:8])
|
||||||
|
errors.append(
|
||||||
|
f"源节点 '{node_name}' 的 source 端点 '{right_source_conn_key}' 不存在," f"支持的端点: {source_valid_keys}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 如果节点配置了 input handles,则 target_port 必须有效
|
||||||
|
if not left_target_conn_key:
|
||||||
|
node_name = right_node.get("name", right_uuid[:8])
|
||||||
|
errors.append(f"目标节点 '{node_name}' 的 target_handle_key 为空," f"应设置为: {target_valid_keys}")
|
||||||
|
elif left_target_conn_key not in target_valid_keys:
|
||||||
|
node_name = right_node.get("name", right_uuid[:8])
|
||||||
|
errors.append(
|
||||||
|
f"目标节点 '{node_name}' 的 target 端点 '{left_target_conn_key}' 不存在,"
|
||||||
|
f"支持的端点: {target_valid_keys}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return len(errors) == 0, errors
|
||||||
|
|
||||||
|
|
||||||
|
# action 到 resource_name 的映射
|
||||||
|
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
|
||||||
|
# 生物实验操作
|
||||||
|
"transfer_liquid": "liquid_handler.prcxi",
|
||||||
|
"transfer": "liquid_handler.prcxi",
|
||||||
|
"incubation": "incubator.prcxi",
|
||||||
|
"move_labware": "labware_mover.prcxi",
|
||||||
|
"oscillation": "shaker.prcxi",
|
||||||
|
# 有机化学操作
|
||||||
|
"HeatChillToTemp": "heatchill.chemputer",
|
||||||
|
"StopHeatChill": "heatchill.chemputer",
|
||||||
|
"StartHeatChill": "heatchill.chemputer",
|
||||||
|
"HeatChill": "heatchill.chemputer",
|
||||||
|
"Dissolve": "stirrer.chemputer",
|
||||||
|
"Transfer": "liquid_handler.chemputer",
|
||||||
|
"Evaporate": "rotavap.chemputer",
|
||||||
|
"Recrystallize": "reactor.chemputer",
|
||||||
|
"Filter": "filter.chemputer",
|
||||||
|
"Dry": "dryer.chemputer",
|
||||||
|
"Add": "liquid_handler.chemputer",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_steps(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
将不同格式的步骤数据规范化为统一格式
|
||||||
|
|
||||||
|
支持的输入格式:
|
||||||
|
- action + parameters
|
||||||
|
- action + action_args
|
||||||
|
- operation + parameters
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: 原始步骤数据列表
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
规范化后的步骤列表,格式为 [{"action": str, "parameters": dict, "description": str?, "step_number": int?}, ...]
|
||||||
|
"""
|
||||||
|
normalized = []
|
||||||
|
for idx, step in enumerate(data):
|
||||||
|
# 获取动作名称(支持 action 或 operation 字段)
|
||||||
|
action = step.get("action") or step.get("operation")
|
||||||
|
if not action:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 获取参数(支持 parameters 或 action_args 字段)
|
||||||
|
raw_params = step.get("parameters") or step.get("action_args") or {}
|
||||||
|
params = dict(raw_params)
|
||||||
|
|
||||||
|
# 规范化 source/target -> sources/targets
|
||||||
|
if "source" in raw_params and "sources" not in raw_params:
|
||||||
|
params["sources"] = raw_params["source"]
|
||||||
|
if "target" in raw_params and "targets" not in raw_params:
|
||||||
|
params["targets"] = raw_params["target"]
|
||||||
|
|
||||||
|
# 获取描述(支持 description 或 purpose 字段)
|
||||||
|
description = step.get("description") or step.get("purpose")
|
||||||
|
|
||||||
|
# 获取步骤编号(优先使用原始数据中的 step_number,否则使用索引+1)
|
||||||
|
step_number = step.get("step_number", idx + 1)
|
||||||
|
|
||||||
|
step_dict = {"action": action, "parameters": params, "step_number": step_number}
|
||||||
|
if description:
|
||||||
|
step_dict["description"] = description
|
||||||
|
|
||||||
|
normalized.append(step_dict)
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_labware(data: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
将不同格式的 labware 数据规范化为统一的字典格式
|
||||||
|
|
||||||
|
支持的输入格式:
|
||||||
|
- reagent_name + material_name + positions
|
||||||
|
- name + labware + slot
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: 原始 labware 数据列表
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
规范化后的 labware 字典,格式为 {name: {"slot": int, "labware": str, "well": list, "type": str, "role": str, "name": str}, ...}
|
||||||
|
"""
|
||||||
|
labware = {}
|
||||||
|
for item in data:
|
||||||
|
# 获取 key 名称(优先使用 reagent_name,其次是 material_name 或 name)
|
||||||
|
reagent_name = item.get("reagent_name")
|
||||||
|
key = reagent_name or item.get("material_name") or item.get("name")
|
||||||
|
if not key:
|
||||||
|
continue
|
||||||
|
|
||||||
|
key = str(key)
|
||||||
|
|
||||||
|
# 处理重复 key,自动添加后缀
|
||||||
|
idx = 1
|
||||||
|
original_key = key
|
||||||
|
while key in labware:
|
||||||
|
idx += 1
|
||||||
|
key = f"{original_key}_{idx}"
|
||||||
|
|
||||||
|
labware[key] = {
|
||||||
|
"slot": item.get("positions") or item.get("slot"),
|
||||||
|
"labware": item.get("material_name") or item.get("labware"),
|
||||||
|
"well": item.get("well", []),
|
||||||
|
"type": item.get("type", "reagent"),
|
||||||
|
"role": item.get("role", ""),
|
||||||
|
"name": key,
|
||||||
|
}
|
||||||
|
|
||||||
|
return labware
|
||||||
|
|
||||||
|
|
||||||
|
def convert_from_json(
|
||||||
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
|
workstation_name: str = "PRCXi",
|
||||||
|
validate: bool = True,
|
||||||
|
) -> WorkflowGraph:
|
||||||
|
"""
|
||||||
|
从 JSON 数据或文件转换为 WorkflowGraph
|
||||||
|
|
||||||
|
支持的 JSON 格式:
|
||||||
|
1. {"workflow": [...], "reagent": {...}} - 直接格式
|
||||||
|
2. {"steps_info": [...], "labware_info": [...]} - 需要规范化的格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: JSON 文件路径、字典数据、或 JSON 字符串
|
||||||
|
workstation_name: 工作站名称,默认 "PRCXi"
|
||||||
|
validate: 是否校验句柄配置,默认 True
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
WorkflowGraph: 构建好的工作流图
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: 不支持的 JSON 格式 或 句柄校验失败
|
||||||
|
FileNotFoundError: 文件不存在
|
||||||
|
json.JSONDecodeError: JSON 解析失败
|
||||||
|
"""
|
||||||
|
# 处理输入数据
|
||||||
|
if isinstance(data, (str, PathLike)):
|
||||||
|
path = Path(data)
|
||||||
|
if path.exists():
|
||||||
|
with path.open("r", encoding="utf-8") as fp:
|
||||||
|
json_data = json.load(fp)
|
||||||
|
elif isinstance(data, str):
|
||||||
|
# 尝试作为 JSON 字符串解析
|
||||||
|
json_data = json.loads(data)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"文件不存在: {data}")
|
||||||
|
elif isinstance(data, dict):
|
||||||
|
json_data = data
|
||||||
|
else:
|
||||||
|
raise TypeError(f"不支持的数据类型: {type(data)}")
|
||||||
|
|
||||||
|
# 根据格式解析数据
|
||||||
|
if "workflow" in json_data and "reagent" in json_data:
|
||||||
|
# 格式1: workflow/reagent(已经是规范格式)
|
||||||
|
protocol_steps = json_data["workflow"]
|
||||||
|
labware_info = json_data["reagent"]
|
||||||
|
elif "steps_info" in json_data and "labware_info" in json_data:
|
||||||
|
# 格式2: steps_info/labware_info(需要规范化)
|
||||||
|
protocol_steps = normalize_steps(json_data["steps_info"])
|
||||||
|
labware_info = normalize_labware(json_data["labware_info"])
|
||||||
|
elif "steps" in json_data and "labware" in json_data:
|
||||||
|
# 格式3: steps/labware(另一种常见格式)
|
||||||
|
protocol_steps = normalize_steps(json_data["steps"])
|
||||||
|
if isinstance(json_data["labware"], list):
|
||||||
|
labware_info = normalize_labware(json_data["labware"])
|
||||||
|
else:
|
||||||
|
labware_info = json_data["labware"]
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"不支持的 JSON 格式。支持的格式:\n"
|
||||||
|
"1. {'workflow': [...], 'reagent': {...}}\n"
|
||||||
|
"2. {'steps_info': [...], 'labware_info': [...]}\n"
|
||||||
|
"3. {'steps': [...], 'labware': [...]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 构建工作流图
|
||||||
|
graph = build_protocol_graph(
|
||||||
|
labware_info=labware_info,
|
||||||
|
protocol_steps=protocol_steps,
|
||||||
|
workstation_name=workstation_name,
|
||||||
|
action_resource_mapping=ACTION_RESOURCE_MAPPING,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 校验句柄配置
|
||||||
|
if validate:
|
||||||
|
is_valid, errors = validate_workflow_handles(graph)
|
||||||
|
if not is_valid:
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
for error in errors:
|
||||||
|
warnings.warn(f"句柄校验警告: {error}")
|
||||||
|
|
||||||
|
return graph
|
||||||
|
|
||||||
|
|
||||||
|
def convert_json_to_node_link(
|
||||||
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
|
workstation_name: str = "PRCXi",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
将 JSON 数据转换为 node-link 格式的字典
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: JSON 文件路径、字典数据、或 JSON 字符串
|
||||||
|
workstation_name: 工作站名称,默认 "PRCXi"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict: node-link 格式的工作流数据
|
||||||
|
"""
|
||||||
|
graph = convert_from_json(data, workstation_name)
|
||||||
|
return graph.to_node_link_dict()
|
||||||
|
|
||||||
|
|
||||||
|
def convert_json_to_workflow_list(
|
||||||
|
data: Union[str, PathLike, Dict[str, Any]],
|
||||||
|
workstation_name: str = "PRCXi",
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
将 JSON 数据转换为工作流列表格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: JSON 文件路径、字典数据、或 JSON 字符串
|
||||||
|
workstation_name: 工作站名称,默认 "PRCXi"
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List: 工作流节点列表
|
||||||
|
"""
|
||||||
|
graph = convert_from_json(data, workstation_name)
|
||||||
|
return graph.to_dict()
|
||||||
|
|
||||||
|
|
||||||
|
# 为了向后兼容,保留下划线前缀的别名
|
||||||
|
_normalize_steps = normalize_steps
|
||||||
|
_normalize_labware = normalize_labware
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="3">
|
<package format="3">
|
||||||
<name>unilabos_msgs</name>
|
<name>unilabos_msgs</name>
|
||||||
<version>0.10.13</version>
|
<version>0.10.17</version>
|
||||||
<description>ROS2 Messages package for unilabos devices</description>
|
<description>ROS2 Messages package for unilabos devices</description>
|
||||||
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
||||||
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
||||||
|
|||||||
Reference in New Issue
Block a user