mirror of
https://github.com/dptech-corp/Uni-Lab-OS.git
synced 2026-02-06 06:25:06 +00:00
Compare commits
8 Commits
aeeb36d075
...
v0.10.14
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
266366cc25 | ||
|
|
121c3985cc | ||
|
|
6ca5c72fc6 | ||
|
|
bc8c49ddda | ||
|
|
28f93737ac | ||
|
|
5dc81ec9be | ||
|
|
13a6795657 | ||
|
|
53219d8b04 |
@@ -1,60 +0,0 @@
|
|||||||
# unilabos: Production package (depends on unilabos-env + pip unilabos)
|
|
||||||
# For production deployment
|
|
||||||
|
|
||||||
package:
|
|
||||||
name: unilabos
|
|
||||||
version: 0.10.17
|
|
||||||
|
|
||||||
source:
|
|
||||||
path: ../../unilabos
|
|
||||||
target_directory: unilabos
|
|
||||||
|
|
||||||
build:
|
|
||||||
python:
|
|
||||||
entry_points:
|
|
||||||
- unilab = unilabos.app.main:main
|
|
||||||
script:
|
|
||||||
- set PIP_NO_INDEX=
|
|
||||||
- if: win
|
|
||||||
then:
|
|
||||||
- copy %RECIPE_DIR%\..\..\MANIFEST.in %SRC_DIR%
|
|
||||||
- copy %RECIPE_DIR%\..\..\setup.cfg %SRC_DIR%
|
|
||||||
- copy %RECIPE_DIR%\..\..\setup.py %SRC_DIR%
|
|
||||||
- pip install %SRC_DIR%
|
|
||||||
- if: unix
|
|
||||||
then:
|
|
||||||
- cp $RECIPE_DIR/../../MANIFEST.in $SRC_DIR
|
|
||||||
- cp $RECIPE_DIR/../../setup.cfg $SRC_DIR
|
|
||||||
- cp $RECIPE_DIR/../../setup.py $SRC_DIR
|
|
||||||
- pip install $SRC_DIR
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
host:
|
|
||||||
- python ==3.11.14
|
|
||||||
- pip
|
|
||||||
- setuptools
|
|
||||||
- zstd
|
|
||||||
- zstandard
|
|
||||||
run:
|
|
||||||
- zstd
|
|
||||||
- zstandard
|
|
||||||
- networkx
|
|
||||||
- typing_extensions
|
|
||||||
- websockets
|
|
||||||
- pint
|
|
||||||
- fastapi
|
|
||||||
- jinja2
|
|
||||||
- requests
|
|
||||||
- uvicorn
|
|
||||||
- opcua
|
|
||||||
- pyserial
|
|
||||||
- pandas
|
|
||||||
- pymodbus
|
|
||||||
- matplotlib
|
|
||||||
- pylibftdi
|
|
||||||
- uni-lab::unilabos-env ==0.10.17
|
|
||||||
|
|
||||||
about:
|
|
||||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
|
||||||
license: GPL-3.0-only
|
|
||||||
description: "UniLabOS - Production package with minimal ROS2 dependencies"
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
# unilabos-env: conda environment dependencies (ROS2 + conda packages)
|
|
||||||
|
|
||||||
package:
|
|
||||||
name: unilabos-env
|
|
||||||
version: 0.10.17
|
|
||||||
|
|
||||||
build:
|
|
||||||
noarch: generic
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
run:
|
|
||||||
# Python
|
|
||||||
- zstd
|
|
||||||
- zstandard
|
|
||||||
- conda-forge::python ==3.11.14
|
|
||||||
- conda-forge::opencv
|
|
||||||
# ROS2 dependencies (from ci-check.yml)
|
|
||||||
- robostack-staging::ros-humble-ros-core
|
|
||||||
- robostack-staging::ros-humble-action-msgs
|
|
||||||
- robostack-staging::ros-humble-std-msgs
|
|
||||||
- robostack-staging::ros-humble-geometry-msgs
|
|
||||||
- robostack-staging::ros-humble-control-msgs
|
|
||||||
- robostack-staging::ros-humble-nav2-msgs
|
|
||||||
- robostack-staging::ros-humble-cv-bridge
|
|
||||||
- robostack-staging::ros-humble-vision-opencv
|
|
||||||
- robostack-staging::ros-humble-tf-transformations
|
|
||||||
- robostack-staging::ros-humble-moveit-msgs
|
|
||||||
- robostack-staging::ros-humble-tf2-ros
|
|
||||||
- robostack-staging::ros-humble-tf2-ros-py
|
|
||||||
- conda-forge::transforms3d
|
|
||||||
- conda-forge::uv
|
|
||||||
|
|
||||||
# UniLabOS custom messages
|
|
||||||
- uni-lab::ros-humble-unilabos-msgs
|
|
||||||
|
|
||||||
about:
|
|
||||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
|
||||||
license: GPL-3.0-only
|
|
||||||
description: "UniLabOS Environment - ROS2 and conda dependencies"
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
# unilabos-full: Full package with all features
|
|
||||||
# Depends on unilabos + complete ROS2 desktop + dev tools
|
|
||||||
|
|
||||||
package:
|
|
||||||
name: unilabos-full
|
|
||||||
version: 0.10.17
|
|
||||||
|
|
||||||
build:
|
|
||||||
noarch: generic
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
run:
|
|
||||||
# Base unilabos package (includes unilabos-env)
|
|
||||||
- uni-lab::unilabos ==0.10.17
|
|
||||||
# Documentation tools
|
|
||||||
- sphinx
|
|
||||||
- sphinx_rtd_theme
|
|
||||||
# Web UI
|
|
||||||
- gradio
|
|
||||||
- flask
|
|
||||||
# Interactive development
|
|
||||||
- ipython
|
|
||||||
- jupyter
|
|
||||||
- jupyros
|
|
||||||
- colcon-common-extensions
|
|
||||||
# ROS2 full desktop (includes rviz2, gazebo, etc.)
|
|
||||||
- robostack-staging::ros-humble-desktop-full
|
|
||||||
# Navigation and motion control
|
|
||||||
- ros-humble-navigation2
|
|
||||||
- ros-humble-ros2-control
|
|
||||||
- ros-humble-robot-state-publisher
|
|
||||||
- ros-humble-joint-state-publisher
|
|
||||||
# MoveIt motion planning
|
|
||||||
- ros-humble-moveit
|
|
||||||
- ros-humble-moveit-servo
|
|
||||||
# Simulation
|
|
||||||
- ros-humble-simulation
|
|
||||||
|
|
||||||
about:
|
|
||||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
|
||||||
license: GPL-3.0-only
|
|
||||||
description: "UniLabOS Full - Complete package with ROS2 Desktop, MoveIt, Navigation2, Gazebo, Jupyter"
|
|
||||||
91
.conda/recipe.yaml
Normal file
91
.conda/recipe.yaml
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
package:
|
||||||
|
name: unilabos
|
||||||
|
version: 0.10.14
|
||||||
|
|
||||||
|
source:
|
||||||
|
path: ../unilabos
|
||||||
|
target_directory: unilabos
|
||||||
|
|
||||||
|
build:
|
||||||
|
python:
|
||||||
|
entry_points:
|
||||||
|
- unilab = unilabos.app.main:main
|
||||||
|
script:
|
||||||
|
- set PIP_NO_INDEX=
|
||||||
|
- if: win
|
||||||
|
then:
|
||||||
|
- copy %RECIPE_DIR%\..\MANIFEST.in %SRC_DIR%
|
||||||
|
- copy %RECIPE_DIR%\..\setup.cfg %SRC_DIR%
|
||||||
|
- copy %RECIPE_DIR%\..\setup.py %SRC_DIR%
|
||||||
|
- call %PYTHON% -m pip install %SRC_DIR%
|
||||||
|
- if: unix
|
||||||
|
then:
|
||||||
|
- cp $RECIPE_DIR/../MANIFEST.in $SRC_DIR
|
||||||
|
- cp $RECIPE_DIR/../setup.cfg $SRC_DIR
|
||||||
|
- cp $RECIPE_DIR/../setup.py $SRC_DIR
|
||||||
|
- $PYTHON -m pip install $SRC_DIR
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
host:
|
||||||
|
- python ==3.11.11
|
||||||
|
- pip
|
||||||
|
- setuptools
|
||||||
|
- zstd
|
||||||
|
- zstandard
|
||||||
|
run:
|
||||||
|
- conda-forge::python ==3.11.11
|
||||||
|
- compilers
|
||||||
|
- cmake
|
||||||
|
- zstd
|
||||||
|
- zstandard
|
||||||
|
- ninja
|
||||||
|
- if: unix
|
||||||
|
then:
|
||||||
|
- make
|
||||||
|
- sphinx
|
||||||
|
- sphinx_rtd_theme
|
||||||
|
- numpy
|
||||||
|
- scipy
|
||||||
|
- pandas
|
||||||
|
- networkx
|
||||||
|
- matplotlib
|
||||||
|
- pint
|
||||||
|
- pyserial
|
||||||
|
- pyusb
|
||||||
|
- pylibftdi
|
||||||
|
- pymodbus
|
||||||
|
- python-can
|
||||||
|
- pyvisa
|
||||||
|
- opencv
|
||||||
|
- pydantic
|
||||||
|
- fastapi
|
||||||
|
- uvicorn
|
||||||
|
- gradio
|
||||||
|
- flask
|
||||||
|
- websockets
|
||||||
|
- ipython
|
||||||
|
- jupyter
|
||||||
|
- jupyros
|
||||||
|
- colcon-common-extensions
|
||||||
|
- robostack-staging::ros-humble-desktop-full
|
||||||
|
- robostack-staging::ros-humble-control-msgs
|
||||||
|
- robostack-staging::ros-humble-sensor-msgs
|
||||||
|
- robostack-staging::ros-humble-trajectory-msgs
|
||||||
|
- ros-humble-navigation2
|
||||||
|
- ros-humble-ros2-control
|
||||||
|
- ros-humble-robot-state-publisher
|
||||||
|
- ros-humble-joint-state-publisher
|
||||||
|
- ros-humble-rosbridge-server
|
||||||
|
- ros-humble-cv-bridge
|
||||||
|
- ros-humble-tf2
|
||||||
|
- ros-humble-moveit
|
||||||
|
- ros-humble-moveit-servo
|
||||||
|
- ros-humble-simulation
|
||||||
|
- ros-humble-tf-transformations
|
||||||
|
- transforms3d
|
||||||
|
- uni-lab::ros-humble-unilabos-msgs
|
||||||
|
|
||||||
|
about:
|
||||||
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
|
license: GPL-3.0-only
|
||||||
|
description: "Uni-Lab-OS"
|
||||||
67
.github/workflows/ci-check.yml
vendored
67
.github/workflows/ci-check.yml
vendored
@@ -1,67 +0,0 @@
|
|||||||
name: CI Check
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, dev]
|
|
||||||
pull_request:
|
|
||||||
branches: [main, dev]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
registry-check:
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Fix Unicode encoding issue on Windows runner (cp1252 -> utf-8)
|
|
||||||
PYTHONIOENCODING: utf-8
|
|
||||||
PYTHONUTF8: 1
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: cmd
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup Miniforge
|
|
||||||
uses: conda-incubator/setup-miniconda@v3
|
|
||||||
with:
|
|
||||||
miniforge-version: latest
|
|
||||||
use-mamba: true
|
|
||||||
channels: robostack-staging,conda-forge,uni-lab
|
|
||||||
channel-priority: flexible
|
|
||||||
activate-environment: check-env
|
|
||||||
auto-update-conda: false
|
|
||||||
show-channel-urls: true
|
|
||||||
|
|
||||||
- name: Install ROS dependencies, uv and unilabos-msgs
|
|
||||||
run: |
|
|
||||||
echo Installing ROS dependencies...
|
|
||||||
mamba install -n check-env conda-forge::uv conda-forge::opencv robostack-staging::ros-humble-ros-core robostack-staging::ros-humble-action-msgs robostack-staging::ros-humble-std-msgs robostack-staging::ros-humble-geometry-msgs robostack-staging::ros-humble-control-msgs robostack-staging::ros-humble-nav2-msgs uni-lab::ros-humble-unilabos-msgs robostack-staging::ros-humble-cv-bridge robostack-staging::ros-humble-vision-opencv robostack-staging::ros-humble-tf-transformations robostack-staging::ros-humble-moveit-msgs robostack-staging::ros-humble-tf2-ros robostack-staging::ros-humble-tf2-ros-py conda-forge::transforms3d -c robostack-staging -c conda-forge -c uni-lab -y
|
|
||||||
|
|
||||||
- name: Install pip dependencies and unilabos
|
|
||||||
run: |
|
|
||||||
call conda activate check-env
|
|
||||||
echo Installing pip dependencies...
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
uv pip install pywinauto git+https://github.com/Xuwznln/pylabrobot.git
|
|
||||||
uv pip uninstall enum34 || echo enum34 not installed, skipping
|
|
||||||
uv pip install .
|
|
||||||
|
|
||||||
- name: Run check mode (complete_registry)
|
|
||||||
run: |
|
|
||||||
call conda activate check-env
|
|
||||||
echo Running check mode...
|
|
||||||
python -m unilabos --check_mode --skip_env_check
|
|
||||||
|
|
||||||
- name: Check for uncommitted changes
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if ! git diff --exit-code; then
|
|
||||||
echo "::error::检测到文件变化!请先在本地运行 'python -m unilabos --complete_registry' 并提交变更"
|
|
||||||
echo "变化的文件:"
|
|
||||||
git diff --name-only
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "检查通过:无文件变化"
|
|
||||||
45
.github/workflows/conda-pack-build.yml
vendored
45
.github/workflows/conda-pack-build.yml
vendored
@@ -13,11 +13,6 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: 'win-64'
|
default: 'win-64'
|
||||||
type: string
|
type: string
|
||||||
build_full:
|
|
||||||
description: '是否构建完整版 unilabos-full (默认构建轻量版 unilabos)'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-conda-pack:
|
build-conda-pack:
|
||||||
@@ -29,7 +24,7 @@ jobs:
|
|||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
script_ext: sh
|
script_ext: sh
|
||||||
@@ -62,7 +57,7 @@ jobs:
|
|||||||
echo "should_build=false" >> $GITHUB_OUTPUT
|
echo "should_build=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.inputs.branch }}
|
ref: ${{ github.event.inputs.branch }}
|
||||||
@@ -74,7 +69,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
miniforge-version: latest
|
miniforge-version: latest
|
||||||
use-mamba: true
|
use-mamba: true
|
||||||
python-version: '3.11.14'
|
python-version: '3.11.11'
|
||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: flexible
|
channel-priority: flexible
|
||||||
activate-environment: unilab
|
activate-environment: unilab
|
||||||
@@ -86,14 +81,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo Installing unilabos and dependencies to unilab environment...
|
echo Installing unilabos and dependencies to unilab environment...
|
||||||
echo Using mamba for faster and more reliable dependency resolution...
|
echo Using mamba for faster and more reliable dependency resolution...
|
||||||
echo Build full: ${{ github.event.inputs.build_full }}
|
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
if "${{ github.event.inputs.build_full }}"=="true" (
|
|
||||||
echo Installing unilabos-full ^(complete package^)...
|
|
||||||
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
|
||||||
) else (
|
|
||||||
echo Installing unilabos ^(minimal package^)...
|
|
||||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
|
||||||
)
|
|
||||||
|
|
||||||
- name: Install conda-pack, unilabos and dependencies (Unix)
|
- name: Install conda-pack, unilabos and dependencies (Unix)
|
||||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||||
@@ -101,14 +89,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Installing unilabos and dependencies to unilab environment..."
|
echo "Installing unilabos and dependencies to unilab environment..."
|
||||||
echo "Using mamba for faster and more reliable dependency resolution..."
|
echo "Using mamba for faster and more reliable dependency resolution..."
|
||||||
echo "Build full: ${{ github.event.inputs.build_full }}"
|
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
|
||||||
echo "Installing unilabos-full (complete package)..."
|
|
||||||
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
|
||||||
else
|
|
||||||
echo "Installing unilabos (minimal package)..."
|
|
||||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
||||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||||
@@ -312,7 +293,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload distribution package
|
- name: Upload distribution package
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v6
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
||||||
path: dist-package/
|
path: dist-package/
|
||||||
@@ -327,12 +308,7 @@ jobs:
|
|||||||
echo ==========================================
|
echo ==========================================
|
||||||
echo Platform: ${{ matrix.platform }}
|
echo Platform: ${{ matrix.platform }}
|
||||||
echo Branch: ${{ github.event.inputs.branch }}
|
echo Branch: ${{ github.event.inputs.branch }}
|
||||||
echo Python version: 3.11.14
|
echo Python version: 3.11.11
|
||||||
if "${{ github.event.inputs.build_full }}"=="true" (
|
|
||||||
echo Package: unilabos-full ^(complete^)
|
|
||||||
) else (
|
|
||||||
echo Package: unilabos ^(minimal^)
|
|
||||||
)
|
|
||||||
echo.
|
echo.
|
||||||
echo Distribution package contents:
|
echo Distribution package contents:
|
||||||
dir dist-package
|
dir dist-package
|
||||||
@@ -352,12 +328,7 @@ jobs:
|
|||||||
echo "=========================================="
|
echo "=========================================="
|
||||||
echo "Platform: ${{ matrix.platform }}"
|
echo "Platform: ${{ matrix.platform }}"
|
||||||
echo "Branch: ${{ github.event.inputs.branch }}"
|
echo "Branch: ${{ github.event.inputs.branch }}"
|
||||||
echo "Python version: 3.11.14"
|
echo "Python version: 3.11.11"
|
||||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
|
||||||
echo "Package: unilabos-full (complete)"
|
|
||||||
else
|
|
||||||
echo "Package: unilabos (minimal)"
|
|
||||||
fi
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Distribution package contents:"
|
echo "Distribution package contents:"
|
||||||
ls -lh dist-package/
|
ls -lh dist-package/
|
||||||
|
|||||||
37
.github/workflows/deploy-docs.yml
vendored
37
.github/workflows/deploy-docs.yml
vendored
@@ -1,12 +1,10 @@
|
|||||||
name: Deploy Docs
|
name: Deploy Docs
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# 在 CI Check 成功后自动触发(仅 main 分支)
|
push:
|
||||||
workflow_run:
|
branches: [main]
|
||||||
workflows: ["CI Check"]
|
pull_request:
|
||||||
types: [completed]
|
|
||||||
branches: [main]
|
branches: [main]
|
||||||
# 手动触发
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
branch:
|
branch:
|
||||||
@@ -35,19 +33,12 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Build documentation
|
# Build documentation
|
||||||
build:
|
build:
|
||||||
# 只在以下情况运行:
|
|
||||||
# 1. workflow_run 触发且 CI Check 成功
|
|
||||||
# 2. 手动触发
|
|
||||||
if: |
|
|
||||||
github.event_name == 'workflow_dispatch' ||
|
|
||||||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# workflow_run 时使用触发工作流的分支,手动触发时使用输入的分支
|
ref: ${{ github.event.inputs.branch || github.ref }}
|
||||||
ref: ${{ github.event.workflow_run.head_branch || github.event.inputs.branch || github.ref }}
|
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup Miniforge (with mamba)
|
- name: Setup Miniforge (with mamba)
|
||||||
@@ -55,7 +46,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
miniforge-version: latest
|
miniforge-version: latest
|
||||||
use-mamba: true
|
use-mamba: true
|
||||||
python-version: '3.11.14'
|
python-version: '3.11.11'
|
||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: flexible
|
channel-priority: flexible
|
||||||
activate-environment: unilab
|
activate-environment: unilab
|
||||||
@@ -84,10 +75,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Setup Pages
|
- name: Setup Pages
|
||||||
id: pages
|
id: pages
|
||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@v4
|
||||||
if: |
|
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
github.event.workflow_run.head_branch == 'main' ||
|
|
||||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
|
||||||
|
|
||||||
- name: Build Sphinx documentation
|
- name: Build Sphinx documentation
|
||||||
run: |
|
run: |
|
||||||
@@ -105,18 +94,14 @@ jobs:
|
|||||||
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
||||||
|
|
||||||
- name: Upload build artifacts
|
- name: Upload build artifacts
|
||||||
uses: actions/upload-pages-artifact@v4
|
uses: actions/upload-pages-artifact@v3
|
||||||
if: |
|
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
github.event.workflow_run.head_branch == 'main' ||
|
|
||||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
|
||||||
with:
|
with:
|
||||||
path: docs/_build/html
|
path: docs/_build/html
|
||||||
|
|
||||||
# Deploy to GitHub Pages
|
# Deploy to GitHub Pages
|
||||||
deploy:
|
deploy:
|
||||||
if: |
|
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||||
github.event.workflow_run.head_branch == 'main' ||
|
|
||||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
|
||||||
environment:
|
environment:
|
||||||
name: github-pages
|
name: github-pages
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
|||||||
48
.github/workflows/multi-platform-build.yml
vendored
48
.github/workflows/multi-platform-build.yml
vendored
@@ -1,16 +1,11 @@
|
|||||||
name: Multi-Platform Conda Build
|
name: Multi-Platform Conda Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# 在 CI Check 工作流完成后触发(仅限 main/dev 分支)
|
|
||||||
workflow_run:
|
|
||||||
workflows: ["CI Check"]
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
branches: [main, dev]
|
|
||||||
# 支持 tag 推送(不依赖 CI Check)
|
|
||||||
push:
|
push:
|
||||||
|
branches: [main, dev]
|
||||||
tags: ['v*']
|
tags: ['v*']
|
||||||
# 手动触发
|
pull_request:
|
||||||
|
branches: [main, dev]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
platforms:
|
platforms:
|
||||||
@@ -22,37 +17,9 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
type: boolean
|
type: boolean
|
||||||
skip_ci_check:
|
|
||||||
description: '跳过等待 CI Check (手动触发时可选)'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
|
||||||
wait-for-ci:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event_name == 'workflow_run'
|
|
||||||
outputs:
|
|
||||||
should_continue: ${{ steps.check.outputs.should_continue }}
|
|
||||||
steps:
|
|
||||||
- name: Check CI status
|
|
||||||
id: check
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
|
||||||
echo "should_continue=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "CI Check passed, proceeding with build"
|
|
||||||
else
|
|
||||||
echo "should_continue=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
|
||||||
fi
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: [wait-for-ci]
|
|
||||||
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
|
||||||
if: |
|
|
||||||
always() &&
|
|
||||||
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -60,7 +27,7 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
env_file: unilabos-linux-64.yaml
|
env_file: unilabos-linux-64.yaml
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
env_file: unilabos-osx-64.yaml
|
env_file: unilabos-osx-64.yaml
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
@@ -77,10 +44,8 @@ jobs:
|
|||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
|
||||||
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Check if platform should be built
|
- name: Check if platform should be built
|
||||||
@@ -104,6 +69,7 @@ jobs:
|
|||||||
channels: conda-forge,robostack-staging,defaults
|
channels: conda-forge,robostack-staging,defaults
|
||||||
channel-priority: strict
|
channel-priority: strict
|
||||||
activate-environment: build-env
|
activate-environment: build-env
|
||||||
|
auto-activate-base: false
|
||||||
auto-update-conda: false
|
auto-update-conda: false
|
||||||
show-channel-urls: true
|
show-channel-urls: true
|
||||||
|
|
||||||
@@ -149,7 +115,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload conda package artifacts
|
- name: Upload conda package artifacts
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v6
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: conda-package-${{ matrix.platform }}
|
name: conda-package-${{ matrix.platform }}
|
||||||
path: conda-packages-temp
|
path: conda-packages-temp
|
||||||
|
|||||||
115
.github/workflows/unilabos-conda-build.yml
vendored
115
.github/workflows/unilabos-conda-build.yml
vendored
@@ -1,69 +1,32 @@
|
|||||||
name: UniLabOS Conda Build
|
name: UniLabOS Conda Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# 在 CI Check 成功后自动触发
|
|
||||||
workflow_run:
|
|
||||||
workflows: ["CI Check"]
|
|
||||||
types: [completed]
|
|
||||||
branches: [main, dev]
|
|
||||||
# 标签推送时直接触发(发布版本)
|
|
||||||
push:
|
push:
|
||||||
|
branches: [main, dev]
|
||||||
tags: ['v*']
|
tags: ['v*']
|
||||||
# 手动触发
|
pull_request:
|
||||||
|
branches: [main, dev]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
platforms:
|
platforms:
|
||||||
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
||||||
required: false
|
required: false
|
||||||
default: 'linux-64'
|
default: 'linux-64'
|
||||||
build_full:
|
|
||||||
description: '是否构建 unilabos-full 完整包 (默认只构建 unilabos 基础包)'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
upload_to_anaconda:
|
upload_to_anaconda:
|
||||||
description: '是否上传到Anaconda.org'
|
description: '是否上传到Anaconda.org'
|
||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
type: boolean
|
type: boolean
|
||||||
skip_ci_check:
|
|
||||||
description: '跳过等待 CI Check (手动触发时可选)'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
|
||||||
wait-for-ci:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event_name == 'workflow_run'
|
|
||||||
outputs:
|
|
||||||
should_continue: ${{ steps.check.outputs.should_continue }}
|
|
||||||
steps:
|
|
||||||
- name: Check CI status
|
|
||||||
id: check
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
|
||||||
echo "should_continue=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "CI Check passed, proceeding with build"
|
|
||||||
else
|
|
||||||
echo "should_continue=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
|
||||||
fi
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: [wait-for-ci]
|
|
||||||
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
|
||||||
if: |
|
|
||||||
always() &&
|
|
||||||
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
platform: linux-64
|
platform: linux-64
|
||||||
- os: macos-15 # Intel (via Rosetta)
|
- os: macos-13 # Intel
|
||||||
platform: osx-64
|
platform: osx-64
|
||||||
- os: macos-latest # ARM64
|
- os: macos-latest # ARM64
|
||||||
platform: osx-arm64
|
platform: osx-arm64
|
||||||
@@ -77,10 +40,8 @@ jobs:
|
|||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
|
||||||
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Check if platform should be built
|
- name: Check if platform should be built
|
||||||
@@ -104,6 +65,7 @@ jobs:
|
|||||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||||
channel-priority: strict
|
channel-priority: strict
|
||||||
activate-environment: build-env
|
activate-environment: build-env
|
||||||
|
auto-activate-base: false
|
||||||
auto-update-conda: false
|
auto-update-conda: false
|
||||||
show-channel-urls: true
|
show-channel-urls: true
|
||||||
|
|
||||||
@@ -119,61 +81,12 @@ jobs:
|
|||||||
conda list | grep -E "(rattler-build|anaconda-client)"
|
conda list | grep -E "(rattler-build|anaconda-client)"
|
||||||
echo "Platform: ${{ matrix.platform }}"
|
echo "Platform: ${{ matrix.platform }}"
|
||||||
echo "OS: ${{ matrix.os }}"
|
echo "OS: ${{ matrix.os }}"
|
||||||
echo "Build full package: ${{ github.event.inputs.build_full || 'false' }}"
|
echo "Building UniLabOS package"
|
||||||
echo "Building packages:"
|
|
||||||
echo " - unilabos-env (environment dependencies)"
|
|
||||||
echo " - unilabos (with pip package)"
|
|
||||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
|
||||||
echo " - unilabos-full (complete package)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Build unilabos-env (conda environment only, noarch)
|
- name: Build conda package
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
run: |
|
run: |
|
||||||
echo "Building unilabos-env (conda environment dependencies)..."
|
rattler-build build -r .conda/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
||||||
rattler-build build -r .conda/environment/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
|
||||||
|
|
||||||
- name: Upload unilabos-env to Anaconda.org (if enabled)
|
|
||||||
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Uploading unilabos-env to uni-lab organization..."
|
|
||||||
for package in $(find ./output -name "unilabos-env*.conda"); do
|
|
||||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Build unilabos (with pip package)
|
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Building unilabos package..."
|
|
||||||
# 如果已上传到 Anaconda,从 uni-lab channel 获取 unilabos-env;否则从本地 output 获取
|
|
||||||
rattler-build build -r .conda/base/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
|
||||||
|
|
||||||
- name: Upload unilabos to Anaconda.org (if enabled)
|
|
||||||
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Uploading unilabos to uni-lab organization..."
|
|
||||||
for package in $(find ./output -name "unilabos-0*.conda" -o -name "unilabos-[0-9]*.conda"); do
|
|
||||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Build unilabos-full - Only when explicitly requested
|
|
||||||
if: |
|
|
||||||
steps.should_build.outputs.should_build == 'true' &&
|
|
||||||
github.event.inputs.build_full == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Building unilabos-full package on ${{ matrix.platform }}..."
|
|
||||||
rattler-build build -r .conda/full/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
|
||||||
|
|
||||||
- name: Upload unilabos-full to Anaconda.org (if enabled)
|
|
||||||
if: |
|
|
||||||
steps.should_build.outputs.should_build == 'true' &&
|
|
||||||
github.event.inputs.build_full == 'true' &&
|
|
||||||
github.event.inputs.upload_to_anaconda == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Uploading unilabos-full to uni-lab organization..."
|
|
||||||
for package in $(find ./output -name "unilabos-full*.conda"); do
|
|
||||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: List built packages
|
- name: List built packages
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
@@ -195,9 +108,17 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload conda package artifacts
|
- name: Upload conda package artifacts
|
||||||
if: steps.should_build.outputs.should_build == 'true'
|
if: steps.should_build.outputs.should_build == 'true'
|
||||||
uses: actions/upload-artifact@v6
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: conda-package-unilabos-${{ matrix.platform }}
|
name: conda-package-unilabos-${{ matrix.platform }}
|
||||||
path: conda-packages-temp
|
path: conda-packages-temp
|
||||||
if-no-files-found: warn
|
if-no-files-found: warn
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Upload to Anaconda.org (uni-lab organization)
|
||||||
|
if: github.event.inputs.upload_to_anaconda == 'true'
|
||||||
|
run: |
|
||||||
|
for package in $(find ./output -name "*.conda"); do
|
||||||
|
echo "Uploading $package to uni-lab organization..."
|
||||||
|
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||||
|
done
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
recursive-include unilabos/test *
|
recursive-include unilabos/test *
|
||||||
recursive-include unilabos/utils *
|
|
||||||
recursive-include unilabos/registry *.yaml
|
recursive-include unilabos/registry *.yaml
|
||||||
recursive-include unilabos/app/web/static *
|
recursive-include unilabos/app/web/static *
|
||||||
recursive-include unilabos/app/web/templates *
|
recursive-include unilabos/app/web/templates *
|
||||||
|
|||||||
46
README.md
46
README.md
@@ -31,63 +31,37 @@ Detailed documentation can be found at:
|
|||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
### 1. Setup Conda Environment
|
Uni-Lab-OS recommends using `mamba` for environment management. Choose the appropriate environment file for your operating system:
|
||||||
|
|
||||||
Uni-Lab-OS recommends using `mamba` for environment management. Choose the package that fits your needs:
|
|
||||||
|
|
||||||
| Package | Use Case | Contents |
|
|
||||||
|---------|----------|----------|
|
|
||||||
| `unilabos` | **Recommended for most users** | Complete package, ready to use |
|
|
||||||
| `unilabos-env` | Developers (editable install) | Environment only, install unilabos via pip |
|
|
||||||
| `unilabos-full` | Simulation/Visualization | unilabos + ROS2 Desktop + Gazebo + MoveIt |
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create new environment
|
# Create new environment
|
||||||
mamba create -n unilab python=3.11.14
|
mamba create -n unilab python=3.11.11
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
|
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
# Option A: Standard installation (recommended for most users)
|
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
|
||||||
# Option B: For developers (editable mode development)
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
# Then install unilabos and dependencies:
|
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
|
||||||
pip install -e .
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
|
|
||||||
# Option C: Full installation (simulation/visualization)
|
|
||||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**When to use which?**
|
## Install Dev Uni-Lab-OS
|
||||||
- **unilabos**: Standard installation for production deployment and general usage (recommended)
|
|
||||||
- **unilabos-env**: For developers who need `pip install -e .` editable mode, modify source code
|
|
||||||
- **unilabos-full**: For simulation (Gazebo), visualization (rviz2), and Jupyter notebooks
|
|
||||||
|
|
||||||
### 2. Clone Repository (Optional, for developers)
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Clone the repository (only needed for development or examples)
|
# Clone the repository
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
|
# Install Uni-Lab-OS
|
||||||
|
pip install .
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Start Uni-Lab System
|
3. Start Uni-Lab System:
|
||||||
|
|
||||||
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
4. Best Practice
|
|
||||||
|
|
||||||
See [Best Practice Guide](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
|
||||||
|
|
||||||
## Message Format
|
## Message Format
|
||||||
|
|
||||||
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
||||||
|
|
||||||
## Citation
|
## Citation
|
||||||
|
|
||||||
If you use [Uni-Lab-OS](https://arxiv.org/abs/2512.21766) in academic research, please cite:
|
If you use Uni-Lab-OS in academic research, please cite:
|
||||||
|
|
||||||
```bibtex
|
```bibtex
|
||||||
@article{gao2025unilabos,
|
@article{gao2025unilabos,
|
||||||
|
|||||||
46
README_zh.md
46
README_zh.md
@@ -31,63 +31,39 @@ Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控
|
|||||||
|
|
||||||
## 快速开始
|
## 快速开始
|
||||||
|
|
||||||
### 1. 配置 Conda 环境
|
1. 配置 Conda 环境
|
||||||
|
|
||||||
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的需求选择合适的安装包:
|
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的操作系统选择适当的环境文件:
|
||||||
|
|
||||||
| 安装包 | 适用场景 | 包含内容 |
|
|
||||||
|--------|----------|----------|
|
|
||||||
| `unilabos` | **推荐大多数用户** | 完整安装包,开箱即用 |
|
|
||||||
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
|
||||||
| `unilabos-full` | 仿真/可视化 | unilabos + ROS2 桌面版 + Gazebo + MoveIt |
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 创建新环境
|
# 创建新环境
|
||||||
mamba create -n unilab python=3.11.14
|
mamba create -n unilab python=3.11.11
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
|
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
# 方案 A:标准安装(推荐大多数用户)
|
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
|
||||||
# 方案 B:开发者环境(可编辑模式开发)
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
# 然后安装 unilabos 和依赖:
|
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
|
||||||
pip install -e .
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
|
|
||||||
# 方案 C:完整安装(仿真/可视化)
|
|
||||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**如何选择?**
|
2. 安装开发版 Uni-Lab-OS:
|
||||||
- **unilabos**:标准安装,适用于生产部署和日常使用(推荐)
|
|
||||||
- **unilabos-env**:开发者使用,支持 `pip install -e .` 可编辑模式,可修改源代码
|
|
||||||
- **unilabos-full**:需要仿真(Gazebo)、可视化(rviz2)或 Jupyter Notebook
|
|
||||||
|
|
||||||
### 2. 克隆仓库(可选,供开发者使用)
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 克隆仓库(仅开发或查看示例时需要)
|
# 克隆仓库
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
|
|
||||||
|
# 安装 Uni-Lab-OS
|
||||||
|
pip install .
|
||||||
```
|
```
|
||||||
|
|
||||||
3. 启动 Uni-Lab 系统
|
3. 启动 Uni-Lab 系统:
|
||||||
|
|
||||||
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||||
|
|
||||||
4. 最佳实践
|
|
||||||
|
|
||||||
请见[最佳实践指南](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
|
||||||
|
|
||||||
## 消息格式
|
## 消息格式
|
||||||
|
|
||||||
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
||||||
|
|
||||||
## 引用
|
## 引用
|
||||||
|
|
||||||
如果您在学术研究中使用 [Uni-Lab-OS](https://arxiv.org/abs/2512.21766),请引用:
|
如果您在学术研究中使用 Uni-Lab-OS,请引用:
|
||||||
|
|
||||||
```bibtex
|
```bibtex
|
||||||
@article{gao2025unilabos,
|
@article{gao2025unilabos,
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ extensions = [
|
|||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
||||||
"sphinx_rtd_theme",
|
"sphinx_rtd_theme",
|
||||||
"sphinxcontrib.mermaid",
|
"sphinxcontrib.mermaid"
|
||||||
]
|
]
|
||||||
|
|
||||||
source_suffix = {
|
source_suffix = {
|
||||||
@@ -58,7 +58,7 @@ html_theme = "sphinx_rtd_theme"
|
|||||||
|
|
||||||
# sphinx-book-theme 主题选项
|
# sphinx-book-theme 主题选项
|
||||||
html_theme_options = {
|
html_theme_options = {
|
||||||
"repository_url": "https://github.com/deepmodeling/Uni-Lab-OS",
|
"repository_url": "https://github.com/用户名/Uni-Lab",
|
||||||
"use_repository_button": True,
|
"use_repository_button": True,
|
||||||
"use_issues_button": True,
|
"use_issues_button": True,
|
||||||
"use_edit_page_button": True,
|
"use_edit_page_button": True,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,3 @@ sphinx-copybutton>=0.5.0
|
|||||||
|
|
||||||
# 用于自动摘要生成
|
# 用于自动摘要生成
|
||||||
sphinx-autobuild>=2024.2.4
|
sphinx-autobuild>=2024.2.4
|
||||||
|
|
||||||
# 用于PDF导出 (rinohtype方案,纯Python无需LaTeX)
|
|
||||||
rinohtype>=0.5.4
|
|
||||||
sphinx-simplepdf>=1.6.0
|
|
||||||
@@ -31,14 +31,6 @@
|
|||||||
|
|
||||||
详细的安装步骤请参考 [安装指南](installation.md)。
|
详细的安装步骤请参考 [安装指南](installation.md)。
|
||||||
|
|
||||||
**选择合适的安装包:**
|
|
||||||
|
|
||||||
| 安装包 | 适用场景 | 包含组件 |
|
|
||||||
|--------|----------|----------|
|
|
||||||
| `unilabos` | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 |
|
|
||||||
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
|
||||||
| `unilabos-full` | 仿真/可视化 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt |
|
|
||||||
|
|
||||||
**关键步骤:**
|
**关键步骤:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -46,30 +38,15 @@
|
|||||||
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
|
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
|
||||||
|
|
||||||
# 2. 创建 Conda 环境
|
# 2. 创建 Conda 环境
|
||||||
mamba create -n unilab python=3.11.14
|
mamba create -n unilab python=3.11.11
|
||||||
|
|
||||||
# 3. 激活环境
|
# 3. 激活环境
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
|
|
||||||
# 4. 安装 Uni-Lab-OS(选择其一)
|
# 4. 安装 Uni-Lab-OS
|
||||||
|
|
||||||
# 方案 A:标准安装(推荐大多数用户)
|
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
|
||||||
# 方案 B:开发者环境(可编辑模式开发)
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
pip install -e /path/to/Uni-Lab-OS # 可编辑安装
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt # 安装 pip 依赖
|
|
||||||
|
|
||||||
# 方案 C:完整版(仿真/可视化)
|
|
||||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**选择建议:**
|
|
||||||
- **日常使用/生产部署**:使用 `unilabos`(推荐),完整功能,开箱即用
|
|
||||||
- **开发者**:使用 `unilabos-env` + `pip install -e .` + `uv pip install -r unilabos/utils/requirements.txt`,代码修改立即生效
|
|
||||||
- **仿真/可视化**:使用 `unilabos-full`,含 Gazebo、rviz2、MoveIt
|
|
||||||
|
|
||||||
#### 1.2 验证安装
|
#### 1.2 验证安装
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -791,43 +768,7 @@ Waiting for host service...
|
|||||||
|
|
||||||
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
|
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
|
||||||
|
|
||||||
#### 9.1 开发环境准备
|
#### 9.1 为什么需要自定义设备?
|
||||||
|
|
||||||
**推荐使用 `unilabos-env` + `pip install -e .` + `uv pip install`** 进行设备开发:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. 创建环境并安装 unilabos-env(ROS2 + conda 依赖 + uv)
|
|
||||||
mamba create -n unilab python=3.11.14
|
|
||||||
conda activate unilab
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
|
|
||||||
# 2. 克隆代码
|
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
|
||||||
cd Uni-Lab-OS
|
|
||||||
|
|
||||||
# 3. 以可编辑模式安装(推荐使用脚本,自动检测中文环境)
|
|
||||||
python scripts/dev_install.py
|
|
||||||
|
|
||||||
# 或手动安装:
|
|
||||||
pip install -e .
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
**为什么使用这种方式?**
|
|
||||||
- `unilabos-env` 提供 ROS2 核心组件和 uv(通过 conda 安装,避免编译)
|
|
||||||
- `unilabos/utils/requirements.txt` 包含所有运行时需要的 pip 依赖
|
|
||||||
- `dev_install.py` 自动检测中文环境,中文系统自动使用清华镜像
|
|
||||||
- 使用 `uv` 替代 `pip`,安装速度更快
|
|
||||||
- 可编辑模式:代码修改**立即生效**,无需重新安装
|
|
||||||
|
|
||||||
**如果安装失败或速度太慢**,可以手动执行(使用清华镜像):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 9.2 为什么需要自定义设备?
|
|
||||||
|
|
||||||
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
|
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
|
||||||
|
|
||||||
@@ -836,7 +777,7 @@ Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要
|
|||||||
- 特殊的实验流程
|
- 特殊的实验流程
|
||||||
- 第三方设备集成
|
- 第三方设备集成
|
||||||
|
|
||||||
#### 9.3 创建 Python 包
|
#### 9.2 创建 Python 包
|
||||||
|
|
||||||
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
|
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
|
||||||
|
|
||||||
@@ -873,7 +814,7 @@ touch my_lab_devices/my_lab_devices/__init__.py
|
|||||||
touch my_lab_devices/my_lab_devices/devices/__init__.py
|
touch my_lab_devices/my_lab_devices/devices/__init__.py
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 9.4 创建 setup.py
|
#### 9.3 创建 setup.py
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# my_lab_devices/setup.py
|
# my_lab_devices/setup.py
|
||||||
@@ -904,7 +845,7 @@ setup(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### 9.5 开发安装
|
#### 9.4 开发安装
|
||||||
|
|
||||||
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
|
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
|
||||||
|
|
||||||
@@ -919,7 +860,7 @@ pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|||||||
- 方便调试和测试
|
- 方便调试和测试
|
||||||
- 支持版本控制(git)
|
- 支持版本控制(git)
|
||||||
|
|
||||||
#### 9.6 编写设备驱动
|
#### 9.5 编写设备驱动
|
||||||
|
|
||||||
创建设备驱动文件:
|
创建设备驱动文件:
|
||||||
|
|
||||||
@@ -1060,7 +1001,7 @@ class MyPump:
|
|||||||
- **返回 Dict**:所有动作方法返回字典类型
|
- **返回 Dict**:所有动作方法返回字典类型
|
||||||
- **文档字符串**:详细说明参数和功能
|
- **文档字符串**:详细说明参数和功能
|
||||||
|
|
||||||
#### 9.7 测试设备驱动
|
#### 9.6 测试设备驱动
|
||||||
|
|
||||||
创建简单的测试脚本:
|
创建简单的测试脚本:
|
||||||
|
|
||||||
|
|||||||
@@ -463,7 +463,7 @@ Uni-Lab 使用 `ResourceDictInstance.get_resource_instance_from_dict()` 方法
|
|||||||
### 使用示例
|
### 使用示例
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 旧格式节点
|
# 旧格式节点
|
||||||
old_format_node = {
|
old_format_node = {
|
||||||
@@ -477,10 +477,10 @@ old_format_node = {
|
|||||||
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
||||||
|
|
||||||
# 访问标准化后的数据
|
# 访问标准化后的数据
|
||||||
print(instance.res_content.id) # "pump_1"
|
print(instance.res_content.id) # "pump_1"
|
||||||
print(instance.res_content.uuid) # 自动生成的 UUID
|
print(instance.res_content.uuid) # 自动生成的 UUID
|
||||||
print(instance.res_content.config) # {}
|
print(instance.res_content.config) # {}
|
||||||
print(instance.res_content.data) # {}
|
print(instance.res_content.data) # {}
|
||||||
```
|
```
|
||||||
|
|
||||||
### 格式迁移建议
|
### 格式迁移建议
|
||||||
|
|||||||
@@ -13,26 +13,15 @@
|
|||||||
- 开发者需要 Git 和基本的 Python 开发知识
|
- 开发者需要 Git 和基本的 Python 开发知识
|
||||||
- 自定义 msgs 需要 GitHub 账号
|
- 自定义 msgs 需要 GitHub 账号
|
||||||
|
|
||||||
## 安装包选择
|
|
||||||
|
|
||||||
Uni-Lab-OS 提供三个安装包版本,根据您的需求选择:
|
|
||||||
|
|
||||||
| 安装包 | 适用场景 | 包含组件 | 磁盘占用 |
|
|
||||||
|--------|----------|----------|----------|
|
|
||||||
| **unilabos** | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 | ~2-3 GB |
|
|
||||||
| **unilabos-env** | 开发者环境(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos | ~2 GB |
|
|
||||||
| **unilabos-full** | 仿真可视化、完整功能体验 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt | ~8-10 GB |
|
|
||||||
|
|
||||||
## 安装方式选择
|
## 安装方式选择
|
||||||
|
|
||||||
根据您的使用场景,选择合适的安装方式:
|
根据您的使用场景,选择合适的安装方式:
|
||||||
|
|
||||||
| 安装方式 | 适用人群 | 推荐安装包 | 特点 | 安装时间 |
|
| 安装方式 | 适用人群 | 特点 | 安装时间 |
|
||||||
| ---------------------- | -------------------- | ----------------- | ------------------------------ | ---------------------------- |
|
| ---------------------- | -------------------- | ------------------------------ | ---------------------------- |
|
||||||
| **方式一:一键安装** | 快速体验、演示 | 预打包环境 | 离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
| **方式一:一键安装** | 实验室用户、快速体验 | 预打包环境,离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
||||||
| **方式二:手动安装** | **大多数用户** | `unilabos` | 完整功能,开箱即用 | 10-20 分钟 |
|
| **方式二:手动安装** | 标准用户、生产环境 | 灵活配置,版本可控 | 10-20 分钟 |
|
||||||
| **方式三:开发者安装** | 开发者、需要修改源码 | `unilabos-env` | 可编辑模式,支持自定义开发 | 20-30 分钟 |
|
| **方式三:开发者安装** | 开发者、需要修改源码 | 可编辑模式,支持自定义 msgs | 20-30 分钟 |
|
||||||
| **仿真/可视化** | 仿真测试、可视化调试 | `unilabos-full` | 含 Gazebo、rviz2、MoveIt | 30-60 分钟 |
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -155,38 +144,17 @@ bash Miniforge3-$(uname)-$(uname -m).sh
|
|||||||
使用以下命令创建 Uni-Lab 专用环境:
|
使用以下命令创建 Uni-Lab 专用环境:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mamba create -n unilab python=3.11.14 # 目前ros2组件依赖版本大多为3.11.14
|
mamba create -n unilab python=3.11.11 # 目前ros2组件依赖版本大多为3.11.11
|
||||||
mamba activate unilab
|
mamba activate unilab
|
||||||
|
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
# 选择安装包(三选一):
|
|
||||||
|
|
||||||
# 方案 A:标准安装(推荐大多数用户)
|
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
|
|
||||||
# 方案 B:开发者环境(可编辑模式开发)
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
# 然后安装 unilabos 和 pip 依赖:
|
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
|
||||||
pip install -e .
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
|
|
||||||
# 方案 C:完整版(含仿真和可视化工具)
|
|
||||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**参数说明**:
|
**参数说明**:
|
||||||
|
|
||||||
- `-n unilab`: 创建名为 "unilab" 的环境
|
- `-n unilab`: 创建名为 "unilab" 的环境
|
||||||
- `uni-lab::unilabos`: 安装 unilabos 完整包,开箱即用(推荐)
|
- `uni-lab::unilabos`: 从 uni-lab channel 安装 unilabos 包
|
||||||
- `uni-lab::unilabos-env`: 仅安装环境依赖,适合开发者使用 `pip install -e .`
|
|
||||||
- `uni-lab::unilabos-full`: 安装完整包(含 ROS2 Desktop、Gazebo、MoveIt 等)
|
|
||||||
- `-c robostack-staging -c conda-forge`: 添加额外的软件源
|
- `-c robostack-staging -c conda-forge`: 添加额外的软件源
|
||||||
|
|
||||||
**包选择建议**:
|
|
||||||
- **日常使用/生产部署**:安装 `unilabos`(推荐,完整功能,开箱即用)
|
|
||||||
- **开发者**:安装 `unilabos-env`,然后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖,再 `pip install -e .` 进行可编辑安装
|
|
||||||
- **仿真/可视化**:安装 `unilabos-full`(Gazebo、rviz2、MoveIt)
|
|
||||||
|
|
||||||
**如果遇到网络问题**,可以使用清华镜像源加速下载:
|
**如果遇到网络问题**,可以使用清华镜像源加速下载:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -195,14 +163,8 @@ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/m
|
|||||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
|
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
|
||||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
|
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
|
||||||
|
|
||||||
# 然后重新执行安装命令(推荐标准安装)
|
# 然后重新执行安装命令
|
||||||
mamba create -n unilab uni-lab::unilabos -c robostack-staging
|
mamba create -n unilab uni-lab::unilabos -c robostack-staging
|
||||||
|
|
||||||
# 或完整版(仿真/可视化)
|
|
||||||
mamba create -n unilab uni-lab::unilabos-full -c robostack-staging
|
|
||||||
|
|
||||||
# pip 安装时使用清华镜像(开发者安装时使用)
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### 第三步:激活环境
|
### 第三步:激活环境
|
||||||
@@ -241,87 +203,58 @@ cd Uni-Lab-OS
|
|||||||
cd Uni-Lab-OS
|
cd Uni-Lab-OS
|
||||||
```
|
```
|
||||||
|
|
||||||
### 第二步:安装开发环境(unilabos-env)
|
### 第二步:安装基础环境
|
||||||
|
|
||||||
**重要**:开发者请使用 `unilabos-env` 包,它专为开发者设计:
|
**推荐方式**:先通过**方式一(一键安装)**或**方式二(手动安装)**完成基础环境的安装,这将包含所有必需的依赖项(ROS2、msgs 等)。
|
||||||
- 包含 ROS2 核心组件和消息包(ros-humble-ros-core、std-msgs、geometry-msgs 等)
|
|
||||||
- 包含 transforms3d、cv-bridge、tf2 等 conda 依赖
|
#### 选项 A:通过一键安装(推荐)
|
||||||
- 包含 `uv` 工具,用于快速安装 pip 依赖
|
|
||||||
- **不包含** pip 依赖和 unilabos 包(由 `pip install -e .` 和 `uv pip install` 安装)
|
参考上文"方式一:一键安装",完成基础环境的安装后,激活环境:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 创建并激活环境
|
|
||||||
mamba create -n unilab python=3.11.14
|
|
||||||
conda activate unilab
|
conda activate unilab
|
||||||
|
|
||||||
# 安装开发者环境包(ROS2 + conda 依赖 + uv)
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### 第三步:安装 pip 依赖和可编辑模式安装
|
#### 选项 B:通过手动安装
|
||||||
|
|
||||||
克隆代码并安装依赖:
|
参考上文"方式二:手动安装",创建并安装环境:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mamba create -n unilab python=3.11.11
|
||||||
|
conda activate unilab
|
||||||
|
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||||
|
```
|
||||||
|
|
||||||
|
**说明**:这会安装包括 Python 3.11.11、ROS2 Humble、ros-humble-unilabos-msgs 和所有必需依赖
|
||||||
|
|
||||||
|
### 第三步:切换到开发版本
|
||||||
|
|
||||||
|
现在你已经有了一个完整可用的 Uni-Lab 环境,接下来将 unilabos 包切换为开发版本:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 确保环境已激活
|
# 确保环境已激活
|
||||||
conda activate unilab
|
conda activate unilab
|
||||||
|
|
||||||
# 克隆仓库(如果还未克隆)
|
# 卸载 pip 安装的 unilabos(保留所有 conda 依赖)
|
||||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
pip uninstall unilabos -y
|
||||||
cd Uni-Lab-OS
|
|
||||||
|
|
||||||
# 切换到 dev 分支(可选)
|
# 克隆 dev 分支(如果还未克隆)
|
||||||
|
cd /path/to/your/workspace
|
||||||
|
git clone -b dev https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||||
|
# 或者如果已经克隆,切换到 dev 分支
|
||||||
|
cd Uni-Lab-OS
|
||||||
git checkout dev
|
git checkout dev
|
||||||
git pull
|
git pull
|
||||||
```
|
|
||||||
|
|
||||||
**推荐:使用安装脚本**(自动检测中文环境,使用 uv 加速):
|
# 以可编辑模式安装开发版 unilabos
|
||||||
|
|
||||||
```bash
|
|
||||||
# 自动检测中文环境,如果是中文系统则使用清华镜像
|
|
||||||
python scripts/dev_install.py
|
|
||||||
|
|
||||||
# 或者手动指定:
|
|
||||||
python scripts/dev_install.py --china # 强制使用清华镜像
|
|
||||||
python scripts/dev_install.py --no-mirror # 强制使用 PyPI
|
|
||||||
python scripts/dev_install.py --skip-deps # 跳过 pip 依赖安装
|
|
||||||
python scripts/dev_install.py --use-pip # 使用 pip 而非 uv
|
|
||||||
```
|
|
||||||
|
|
||||||
**手动安装**(如果脚本安装失败或速度太慢):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. 安装 unilabos(可编辑模式)
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
# 2. 使用 uv 安装 pip 依赖(推荐,速度更快)
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
|
|
||||||
# 国内用户使用清华镜像:
|
|
||||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**注意**:
|
**参数说明**:
|
||||||
- `uv` 已包含在 `unilabos-env` 中,无需单独安装
|
|
||||||
- `unilabos/utils/requirements.txt` 包含运行 unilabos 所需的所有 pip 依赖
|
|
||||||
- 部分特殊包(如 pylabrobot)会在运行时由 unilabos 自动检测并安装
|
|
||||||
|
|
||||||
**为什么使用可编辑模式?**
|
- `-e`: editable mode(可编辑模式),代码修改立即生效,无需重新安装
|
||||||
|
- `-i`: 使用清华镜像源加速下载
|
||||||
- `-e` (editable mode):代码修改**立即生效**,无需重新安装
|
- `pip uninstall unilabos`: 只卸载 pip 安装的 unilabos 包,不影响 conda 安装的其他依赖(如 ROS2、msgs 等)
|
||||||
- 适合开发调试:修改代码后直接运行测试
|
|
||||||
- 与 `unilabos-env` 配合:环境依赖由 conda 管理,unilabos 代码由 pip 管理
|
|
||||||
|
|
||||||
**验证安装**:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 检查 unilabos 版本
|
|
||||||
python -c "import unilabos; print(unilabos.__version__)"
|
|
||||||
|
|
||||||
# 检查安装位置(应该指向你的代码目录)
|
|
||||||
pip show unilabos | grep Location
|
|
||||||
```
|
|
||||||
|
|
||||||
### 第四步:安装或自定义 ros-humble-unilabos-msgs(可选)
|
### 第四步:安装或自定义 ros-humble-unilabos-msgs(可选)
|
||||||
|
|
||||||
@@ -531,45 +464,7 @@ cd $CONDA_PREFIX/envs/unilab
|
|||||||
|
|
||||||
### 问题 8: 环境很大,有办法减小吗?
|
### 问题 8: 环境很大,有办法减小吗?
|
||||||
|
|
||||||
**解决方案**:
|
**解决方案**: 预打包的环境包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。如果空间有限,考虑使用方式二手动安装,只安装需要的组件。
|
||||||
|
|
||||||
1. **使用 `unilabos` 标准版**(推荐大多数用户):
|
|
||||||
```bash
|
|
||||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
|
||||||
```
|
|
||||||
标准版包含完整功能,环境大小约 2-3GB(相比完整版的 8-10GB)。
|
|
||||||
|
|
||||||
2. **使用 `unilabos-env` 开发者版**(最小化):
|
|
||||||
```bash
|
|
||||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
|
||||||
# 然后手动安装依赖
|
|
||||||
pip install -e .
|
|
||||||
uv pip install -r unilabos/utils/requirements.txt
|
|
||||||
```
|
|
||||||
开发者版只包含环境依赖,体积最小约 2GB。
|
|
||||||
|
|
||||||
3. **按需安装额外组件**:
|
|
||||||
如果后续需要特定功能,可以单独安装:
|
|
||||||
```bash
|
|
||||||
# 需要 Jupyter
|
|
||||||
mamba install jupyter jupyros
|
|
||||||
|
|
||||||
# 需要可视化
|
|
||||||
mamba install matplotlib opencv
|
|
||||||
|
|
||||||
# 需要仿真(注意:这会安装大量依赖)
|
|
||||||
mamba install ros-humble-gazebo-ros
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **预打包环境问题**:
|
|
||||||
预打包环境(方式一)包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。
|
|
||||||
|
|
||||||
**包选择建议**:
|
|
||||||
| 需求 | 推荐包 | 预估大小 |
|
|
||||||
|------|--------|----------|
|
|
||||||
| 日常使用/生产部署 | `unilabos` | ~2-3 GB |
|
|
||||||
| 开发调试(可编辑模式) | `unilabos-env` | ~2 GB |
|
|
||||||
| 仿真/可视化 | `unilabos-full` | ~8-10 GB |
|
|
||||||
|
|
||||||
### 问题 9: 如何更新到最新版本?
|
### 问题 9: 如何更新到最新版本?
|
||||||
|
|
||||||
@@ -616,7 +511,6 @@ mamba update ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-f
|
|||||||
|
|
||||||
**提示**:
|
**提示**:
|
||||||
|
|
||||||
- **大多数用户**推荐使用方式二(手动安装)的 `unilabos` 标准版
|
- 生产环境推荐使用方式二(手动安装)的稳定版本
|
||||||
- **开发者**推荐使用方式三(开发者安装),安装 `unilabos-env` 后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖
|
- 开发和测试推荐使用方式三(开发者安装)
|
||||||
- **仿真/可视化**推荐安装 `unilabos-full` 完整版
|
- 快速体验和演示推荐使用方式一(一键安装)
|
||||||
- **快速体验和演示**推荐使用方式一(一键安装)
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: ros-humble-unilabos-msgs
|
name: ros-humble-unilabos-msgs
|
||||||
version: 0.10.17
|
version: 0.10.14
|
||||||
source:
|
source:
|
||||||
path: ../../unilabos_msgs
|
path: ../../unilabos_msgs
|
||||||
target_directory: src
|
target_directory: src
|
||||||
@@ -25,7 +25,7 @@ requirements:
|
|||||||
build:
|
build:
|
||||||
- ${{ compiler('cxx') }}
|
- ${{ compiler('cxx') }}
|
||||||
- ${{ compiler('c') }}
|
- ${{ compiler('c') }}
|
||||||
- python ==3.11.14
|
- python ==3.11.11
|
||||||
- numpy
|
- numpy
|
||||||
- if: build_platform != target_platform
|
- if: build_platform != target_platform
|
||||||
then:
|
then:
|
||||||
@@ -63,14 +63,14 @@ requirements:
|
|||||||
- robostack-staging::ros-humble-rosidl-default-generators
|
- robostack-staging::ros-humble-rosidl-default-generators
|
||||||
- robostack-staging::ros-humble-std-msgs
|
- robostack-staging::ros-humble-std-msgs
|
||||||
- robostack-staging::ros-humble-geometry-msgs
|
- robostack-staging::ros-humble-geometry-msgs
|
||||||
- robostack-staging::ros2-distro-mutex=0.7
|
- robostack-staging::ros2-distro-mutex=0.6
|
||||||
run:
|
run:
|
||||||
- robostack-staging::ros-humble-action-msgs
|
- robostack-staging::ros-humble-action-msgs
|
||||||
- robostack-staging::ros-humble-ros-workspace
|
- robostack-staging::ros-humble-ros-workspace
|
||||||
- robostack-staging::ros-humble-rosidl-default-runtime
|
- robostack-staging::ros-humble-rosidl-default-runtime
|
||||||
- robostack-staging::ros-humble-std-msgs
|
- robostack-staging::ros-humble-std-msgs
|
||||||
- robostack-staging::ros-humble-geometry-msgs
|
- robostack-staging::ros-humble-geometry-msgs
|
||||||
- robostack-staging::ros2-distro-mutex=0.7
|
- robostack-staging::ros2-distro-mutex=0.6
|
||||||
- if: osx and x86_64
|
- if: osx and x86_64
|
||||||
then:
|
then:
|
||||||
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: "0.10.17"
|
version: "0.10.14"
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../..
|
path: ../..
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ Verification:
|
|||||||
-------------
|
-------------
|
||||||
|
|
||||||
The verify_installation.py script will check:
|
The verify_installation.py script will check:
|
||||||
- Python version (3.11.14)
|
- Python version (3.11.11)
|
||||||
- ROS2 rclpy installation
|
- ROS2 rclpy installation
|
||||||
- UniLabOS installation and dependencies
|
- UniLabOS installation and dependencies
|
||||||
|
|
||||||
@@ -104,7 +104,7 @@ Build Information:
|
|||||||
|
|
||||||
Branch: {branch}
|
Branch: {branch}
|
||||||
Platform: {platform}
|
Platform: {platform}
|
||||||
Python: 3.11.14
|
Python: 3.11.11
|
||||||
Date: {build_date}
|
Date: {build_date}
|
||||||
|
|
||||||
Troubleshooting:
|
Troubleshooting:
|
||||||
|
|||||||
@@ -1,214 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Development installation script for UniLabOS.
|
|
||||||
Auto-detects Chinese locale and uses appropriate mirror.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python scripts/dev_install.py
|
|
||||||
python scripts/dev_install.py --no-mirror # Force no mirror
|
|
||||||
python scripts/dev_install.py --china # Force China mirror
|
|
||||||
python scripts/dev_install.py --skip-deps # Skip pip dependencies installation
|
|
||||||
|
|
||||||
Flow:
|
|
||||||
1. pip install -e . (install unilabos in editable mode)
|
|
||||||
2. Detect Chinese locale
|
|
||||||
3. Use uv to install pip dependencies from requirements.txt
|
|
||||||
4. Special packages (like pylabrobot) are handled by environment_check.py at runtime
|
|
||||||
"""
|
|
||||||
|
|
||||||
import locale
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import argparse
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Tsinghua mirror URL
|
|
||||||
TSINGHUA_MIRROR = "https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
|
|
||||||
|
|
||||||
|
|
||||||
def is_chinese_locale() -> bool:
|
|
||||||
"""
|
|
||||||
Detect if system is in Chinese locale.
|
|
||||||
Same logic as EnvironmentChecker._is_chinese_locale()
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
lang = locale.getdefaultlocale()[0]
|
|
||||||
if lang and ("zh" in lang.lower() or "chinese" in lang.lower()):
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def run_command(cmd: list, description: str, retry: int = 2) -> bool:
|
|
||||||
"""Run command with retry support."""
|
|
||||||
print(f"[INFO] {description}")
|
|
||||||
print(f"[CMD] {' '.join(cmd)}")
|
|
||||||
|
|
||||||
for attempt in range(retry + 1):
|
|
||||||
try:
|
|
||||||
result = subprocess.run(cmd, check=True, timeout=600)
|
|
||||||
print(f"[OK] {description}")
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
if attempt < retry:
|
|
||||||
print(f"[WARN] Attempt {attempt + 1} failed, retrying...")
|
|
||||||
else:
|
|
||||||
print(f"[ERROR] {description} failed: {e}")
|
|
||||||
return False
|
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
print(f"[ERROR] {description} timed out")
|
|
||||||
return False
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def install_editable(project_root: Path, use_mirror: bool) -> bool:
|
|
||||||
"""Install unilabos in editable mode using pip."""
|
|
||||||
cmd = [sys.executable, "-m", "pip", "install", "-e", str(project_root)]
|
|
||||||
if use_mirror:
|
|
||||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
|
||||||
|
|
||||||
return run_command(cmd, "Installing unilabos in editable mode")
|
|
||||||
|
|
||||||
|
|
||||||
def install_requirements_uv(requirements_file: Path, use_mirror: bool) -> bool:
|
|
||||||
"""Install pip dependencies using uv (installed via conda-forge::uv)."""
|
|
||||||
cmd = ["uv", "pip", "install", "-r", str(requirements_file)]
|
|
||||||
if use_mirror:
|
|
||||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
|
||||||
|
|
||||||
return run_command(cmd, "Installing pip dependencies with uv", retry=2)
|
|
||||||
|
|
||||||
|
|
||||||
def install_requirements_pip(requirements_file: Path, use_mirror: bool) -> bool:
|
|
||||||
"""Fallback: Install pip dependencies using pip."""
|
|
||||||
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
|
||||||
if use_mirror:
|
|
||||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
|
||||||
|
|
||||||
return run_command(cmd, "Installing pip dependencies with pip", retry=2)
|
|
||||||
|
|
||||||
|
|
||||||
def check_uv_available() -> bool:
|
|
||||||
"""Check if uv is available (installed via conda-forge::uv)."""
|
|
||||||
try:
|
|
||||||
subprocess.run(["uv", "--version"], capture_output=True, check=True)
|
|
||||||
return True
|
|
||||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Development installation script for UniLabOS")
|
|
||||||
parser.add_argument("--china", action="store_true", help="Force use China mirror (Tsinghua)")
|
|
||||||
parser.add_argument("--no-mirror", action="store_true", help="Force use default PyPI (no mirror)")
|
|
||||||
parser.add_argument(
|
|
||||||
"--skip-deps", action="store_true", help="Skip pip dependencies installation (only install unilabos)"
|
|
||||||
)
|
|
||||||
parser.add_argument("--use-pip", action="store_true", help="Use pip instead of uv for dependencies")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Determine project root
|
|
||||||
script_dir = Path(__file__).parent
|
|
||||||
project_root = script_dir.parent
|
|
||||||
requirements_file = project_root / "unilabos" / "utils" / "requirements.txt"
|
|
||||||
|
|
||||||
if not (project_root / "setup.py").exists():
|
|
||||||
print(f"[ERROR] setup.py not found in {project_root}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print("=" * 60)
|
|
||||||
print("UniLabOS Development Installation")
|
|
||||||
print("=" * 60)
|
|
||||||
print(f"Project root: {project_root}")
|
|
||||||
print()
|
|
||||||
|
|
||||||
# Determine mirror usage based on locale
|
|
||||||
if args.no_mirror:
|
|
||||||
use_mirror = False
|
|
||||||
print("[INFO] Mirror disabled by --no-mirror flag")
|
|
||||||
elif args.china:
|
|
||||||
use_mirror = True
|
|
||||||
print("[INFO] China mirror enabled by --china flag")
|
|
||||||
else:
|
|
||||||
use_mirror = is_chinese_locale()
|
|
||||||
if use_mirror:
|
|
||||||
print("[INFO] Chinese locale detected, using Tsinghua mirror")
|
|
||||||
else:
|
|
||||||
print("[INFO] Non-Chinese locale detected, using default PyPI")
|
|
||||||
|
|
||||||
print()
|
|
||||||
|
|
||||||
# Step 1: Install unilabos in editable mode
|
|
||||||
print("[STEP 1] Installing unilabos in editable mode...")
|
|
||||||
if not install_editable(project_root, use_mirror):
|
|
||||||
print("[ERROR] Failed to install unilabos")
|
|
||||||
print()
|
|
||||||
print("Manual fallback:")
|
|
||||||
if use_mirror:
|
|
||||||
print(f" pip install -e {project_root} -i {TSINGHUA_MIRROR}")
|
|
||||||
else:
|
|
||||||
print(f" pip install -e {project_root}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print()
|
|
||||||
|
|
||||||
# Step 2: Install pip dependencies
|
|
||||||
if args.skip_deps:
|
|
||||||
print("[INFO] Skipping pip dependencies installation (--skip-deps)")
|
|
||||||
else:
|
|
||||||
print("[STEP 2] Installing pip dependencies...")
|
|
||||||
|
|
||||||
if not requirements_file.exists():
|
|
||||||
print(f"[WARN] Requirements file not found: {requirements_file}")
|
|
||||||
print("[INFO] Skipping dependencies installation")
|
|
||||||
else:
|
|
||||||
# Try uv first (faster), fallback to pip
|
|
||||||
if args.use_pip:
|
|
||||||
print("[INFO] Using pip (--use-pip flag)")
|
|
||||||
success = install_requirements_pip(requirements_file, use_mirror)
|
|
||||||
elif check_uv_available():
|
|
||||||
print("[INFO] Using uv (installed via conda-forge::uv)")
|
|
||||||
success = install_requirements_uv(requirements_file, use_mirror)
|
|
||||||
if not success:
|
|
||||||
print("[WARN] uv failed, falling back to pip...")
|
|
||||||
success = install_requirements_pip(requirements_file, use_mirror)
|
|
||||||
else:
|
|
||||||
print("[WARN] uv not available (should be installed via: mamba install conda-forge::uv)")
|
|
||||||
print("[INFO] Falling back to pip...")
|
|
||||||
success = install_requirements_pip(requirements_file, use_mirror)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
print()
|
|
||||||
print("[WARN] Failed to install some dependencies automatically.")
|
|
||||||
print("You can manually install them:")
|
|
||||||
if use_mirror:
|
|
||||||
print(f" uv pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
|
||||||
print(" or:")
|
|
||||||
print(f" pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
|
||||||
else:
|
|
||||||
print(f" uv pip install -r {requirements_file}")
|
|
||||||
print(" or:")
|
|
||||||
print(f" pip install -r {requirements_file}")
|
|
||||||
|
|
||||||
print()
|
|
||||||
print("=" * 60)
|
|
||||||
print("Installation complete!")
|
|
||||||
print("=" * 60)
|
|
||||||
print()
|
|
||||||
print("Note: Some special packages (like pylabrobot) are installed")
|
|
||||||
print("automatically at runtime by unilabos if needed.")
|
|
||||||
print()
|
|
||||||
print("Verify installation:")
|
|
||||||
print(' python -c "import unilabos; print(unilabos.__version__)"')
|
|
||||||
print()
|
|
||||||
print("If you encounter issues, you can manually install dependencies:")
|
|
||||||
if use_mirror:
|
|
||||||
print(f" uv pip install -r unilabos/utils/requirements.txt -i {TSINGHUA_MIRROR}")
|
|
||||||
else:
|
|
||||||
print(" uv pip install -r unilabos/utils/requirements.txt")
|
|
||||||
print()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -2,6 +2,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
@@ -24,15 +25,7 @@ class SimpleGraph:
|
|||||||
|
|
||||||
def add_edge(self, source, target, **attrs):
|
def add_edge(self, source, target, **attrs):
|
||||||
"""添加边"""
|
"""添加边"""
|
||||||
# edge = {"source": source, "target": target, **attrs}
|
edge = {"source": source, "target": target, **attrs}
|
||||||
edge = {
|
|
||||||
"source": source, "target": target,
|
|
||||||
"source_node_uuid": source,
|
|
||||||
"target_node_uuid": target,
|
|
||||||
"source_handle_io": "source",
|
|
||||||
"target_handle_io": "target",
|
|
||||||
**attrs
|
|
||||||
}
|
|
||||||
self.edges.append(edge)
|
self.edges.append(edge)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
@@ -49,7 +42,6 @@ class SimpleGraph:
|
|||||||
"multigraph": False,
|
"multigraph": False,
|
||||||
"graph": {},
|
"graph": {},
|
||||||
"nodes": nodes_list,
|
"nodes": nodes_list,
|
||||||
"edges": self.edges,
|
|
||||||
"links": self.edges,
|
"links": self.edges,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,8 +58,495 @@ def extract_json_from_markdown(text: str) -> str:
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_type(val: str) -> Any:
|
||||||
|
"""将字符串值转换为适当的数据类型"""
|
||||||
|
if val == "True":
|
||||||
|
return True
|
||||||
|
if val == "False":
|
||||||
|
return False
|
||||||
|
if val == "?":
|
||||||
|
return None
|
||||||
|
if val.endswith(" g"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
if val.endswith("mg"):
|
||||||
|
return float(val.split("mg")[0])
|
||||||
|
elif val.endswith("mmol"):
|
||||||
|
return float(val.split("mmol")[0]) / 1000
|
||||||
|
elif val.endswith("mol"):
|
||||||
|
return float(val.split("mol")[0])
|
||||||
|
elif val.endswith("ml"):
|
||||||
|
return float(val.split("ml")[0])
|
||||||
|
elif val.endswith("RPM"):
|
||||||
|
return float(val.split("RPM")[0])
|
||||||
|
elif val.endswith(" °C"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
elif val.endswith(" %"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def refactor_data(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""统一的数据重构函数,根据操作类型自动选择模板"""
|
||||||
|
refactored_data = []
|
||||||
|
|
||||||
|
# 定义操作映射,包含生物实验和有机化学的所有操作
|
||||||
|
OPERATION_MAPPING = {
|
||||||
|
# 生物实验操作
|
||||||
|
"transfer_liquid": "SynBioFactory-liquid_handler.prcxi-transfer_liquid",
|
||||||
|
"transfer": "SynBioFactory-liquid_handler.biomek-transfer",
|
||||||
|
"incubation": "SynBioFactory-liquid_handler.biomek-incubation",
|
||||||
|
"move_labware": "SynBioFactory-liquid_handler.biomek-move_labware",
|
||||||
|
"oscillation": "SynBioFactory-liquid_handler.biomek-oscillation",
|
||||||
|
# 有机化学操作
|
||||||
|
"HeatChillToTemp": "SynBioFactory-workstation-HeatChillProtocol",
|
||||||
|
"StopHeatChill": "SynBioFactory-workstation-HeatChillStopProtocol",
|
||||||
|
"StartHeatChill": "SynBioFactory-workstation-HeatChillStartProtocol",
|
||||||
|
"HeatChill": "SynBioFactory-workstation-HeatChillProtocol",
|
||||||
|
"Dissolve": "SynBioFactory-workstation-DissolveProtocol",
|
||||||
|
"Transfer": "SynBioFactory-workstation-TransferProtocol",
|
||||||
|
"Evaporate": "SynBioFactory-workstation-EvaporateProtocol",
|
||||||
|
"Recrystallize": "SynBioFactory-workstation-RecrystallizeProtocol",
|
||||||
|
"Filter": "SynBioFactory-workstation-FilterProtocol",
|
||||||
|
"Dry": "SynBioFactory-workstation-DryProtocol",
|
||||||
|
"Add": "SynBioFactory-workstation-AddProtocol",
|
||||||
|
}
|
||||||
|
|
||||||
|
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||||
|
|
||||||
|
for step in data:
|
||||||
|
operation = step.get("action")
|
||||||
|
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 处理重复操作
|
||||||
|
if operation == "Repeat":
|
||||||
|
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
||||||
|
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
||||||
|
for i in range(int(times)):
|
||||||
|
sub_data = refactor_data(sub_steps)
|
||||||
|
refactored_data.extend(sub_data)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 获取模板名称
|
||||||
|
template = OPERATION_MAPPING.get(operation)
|
||||||
|
if not template:
|
||||||
|
# 自动推断模板类型
|
||||||
|
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
||||||
|
template = f"SynBioFactory-liquid_handler.biomek-{operation}"
|
||||||
|
else:
|
||||||
|
template = f"SynBioFactory-workstation-{operation}Protocol"
|
||||||
|
|
||||||
|
# 创建步骤数据
|
||||||
|
step_data = {
|
||||||
|
"template": template,
|
||||||
|
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
||||||
|
"lab_node_type": "Device",
|
||||||
|
"parameters": step.get("parameters", step.get("action_args", {})),
|
||||||
|
}
|
||||||
|
refactored_data.append(step_data)
|
||||||
|
|
||||||
|
return refactored_data
|
||||||
|
|
||||||
|
|
||||||
|
def build_protocol_graph(
|
||||||
|
labware_info: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]], workstation_name: str
|
||||||
|
) -> SimpleGraph:
|
||||||
|
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑"""
|
||||||
|
G = SimpleGraph()
|
||||||
|
resource_last_writer = {}
|
||||||
|
LAB_NAME = "SynBioFactory"
|
||||||
|
|
||||||
|
protocol_steps = refactor_data(protocol_steps)
|
||||||
|
|
||||||
|
# 检查协议步骤中的模板来判断协议类型
|
||||||
|
has_biomek_template = any(
|
||||||
|
("biomek" in step.get("template", "")) or ("prcxi" in step.get("template", ""))
|
||||||
|
for step in protocol_steps
|
||||||
|
)
|
||||||
|
|
||||||
|
if has_biomek_template:
|
||||||
|
# 生物实验协议图构建
|
||||||
|
for labware_id, labware in labware_info.items():
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
labware_attrs = labware.copy()
|
||||||
|
labware_id = labware_attrs.pop("id", labware_attrs.get("name", f"labware_{uuid.uuid4()}"))
|
||||||
|
labware_attrs["description"] = labware_id
|
||||||
|
labware_attrs["lab_node_type"] = (
|
||||||
|
"Reagent" if "Plate" in str(labware_id) else "Labware" if "Rack" in str(labware_id) else "Sample"
|
||||||
|
)
|
||||||
|
labware_attrs["device_id"] = workstation_name
|
||||||
|
|
||||||
|
G.add_node(node_id, template=f"{LAB_NAME}-host_node-create_resource", **labware_attrs)
|
||||||
|
resource_last_writer[labware_id] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
# 处理协议步骤
|
||||||
|
prev_node = None
|
||||||
|
for i, step in enumerate(protocol_steps):
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
G.add_node(node_id, **step)
|
||||||
|
|
||||||
|
# 添加控制流边
|
||||||
|
if prev_node is not None:
|
||||||
|
G.add_edge(prev_node, node_id, source_port="ready", target_port="ready")
|
||||||
|
prev_node = node_id
|
||||||
|
|
||||||
|
# 处理物料流
|
||||||
|
params = step.get("parameters", {})
|
||||||
|
if "sources" in params and params["sources"] in resource_last_writer:
|
||||||
|
source_node, source_port = resource_last_writer[params["sources"]].split(":")
|
||||||
|
G.add_edge(source_node, node_id, source_port=source_port, target_port="labware")
|
||||||
|
|
||||||
|
if "targets" in params:
|
||||||
|
resource_last_writer[params["targets"]] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
# 添加协议结束节点
|
||||||
|
end_id = str(uuid.uuid4())
|
||||||
|
G.add_node(end_id, template=f"{LAB_NAME}-liquid_handler.biomek-run_protocol")
|
||||||
|
if prev_node is not None:
|
||||||
|
G.add_edge(prev_node, end_id, source_port="ready", target_port="ready")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# 有机化学协议图构建
|
||||||
|
WORKSTATION_ID = workstation_name
|
||||||
|
|
||||||
|
# 为所有labware创建资源节点
|
||||||
|
for item_id, item in labware_info.items():
|
||||||
|
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# 判断节点类型
|
||||||
|
if item.get("type") == "hardware" or "reactor" in str(item_id).lower():
|
||||||
|
if "reactor" not in str(item_id).lower():
|
||||||
|
continue
|
||||||
|
lab_node_type = "Sample"
|
||||||
|
description = f"Prepare Reactor: {item_id}"
|
||||||
|
liquid_type = []
|
||||||
|
liquid_volume = []
|
||||||
|
else:
|
||||||
|
lab_node_type = "Reagent"
|
||||||
|
description = f"Add Reagent to Flask: {item_id}"
|
||||||
|
liquid_type = [item_id]
|
||||||
|
liquid_volume = [1e5]
|
||||||
|
|
||||||
|
G.add_node(
|
||||||
|
node_id,
|
||||||
|
template=f"{LAB_NAME}-host_node-create_resource",
|
||||||
|
description=description,
|
||||||
|
lab_node_type=lab_node_type,
|
||||||
|
res_id=item_id,
|
||||||
|
device_id=WORKSTATION_ID,
|
||||||
|
class_name="container",
|
||||||
|
parent=WORKSTATION_ID,
|
||||||
|
bind_locations={"x": 0.0, "y": 0.0, "z": 0.0},
|
||||||
|
liquid_input_slot=[-1],
|
||||||
|
liquid_type=liquid_type,
|
||||||
|
liquid_volume=liquid_volume,
|
||||||
|
slot_on_deck="",
|
||||||
|
role=item.get("role", ""),
|
||||||
|
)
|
||||||
|
resource_last_writer[item_id] = f"{node_id}:labware"
|
||||||
|
|
||||||
|
last_control_node_id = None
|
||||||
|
|
||||||
|
# 处理协议步骤
|
||||||
|
for step in protocol_steps:
|
||||||
|
node_id = str(uuid.uuid4())
|
||||||
|
G.add_node(node_id, **step)
|
||||||
|
|
||||||
|
# 控制流
|
||||||
|
if last_control_node_id is not None:
|
||||||
|
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
||||||
|
last_control_node_id = node_id
|
||||||
|
|
||||||
|
# 物料流
|
||||||
|
params = step.get("parameters", {})
|
||||||
|
input_resources = {
|
||||||
|
"Vessel": params.get("vessel"),
|
||||||
|
"ToVessel": params.get("to_vessel"),
|
||||||
|
"FromVessel": params.get("from_vessel"),
|
||||||
|
"reagent": params.get("reagent"),
|
||||||
|
"solvent": params.get("solvent"),
|
||||||
|
"compound": params.get("compound"),
|
||||||
|
"sources": params.get("sources"),
|
||||||
|
"targets": params.get("targets"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for target_port, resource_name in input_resources.items():
|
||||||
|
if resource_name and resource_name in resource_last_writer:
|
||||||
|
source_node, source_port = resource_last_writer[resource_name].split(":")
|
||||||
|
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
||||||
|
|
||||||
|
output_resources = {
|
||||||
|
"VesselOut": params.get("vessel"),
|
||||||
|
"FromVesselOut": params.get("from_vessel"),
|
||||||
|
"ToVesselOut": params.get("to_vessel"),
|
||||||
|
"FiltrateOut": params.get("filtrate_vessel"),
|
||||||
|
"reagent": params.get("reagent"),
|
||||||
|
"solvent": params.get("solvent"),
|
||||||
|
"compound": params.get("compound"),
|
||||||
|
"sources_out": params.get("sources"),
|
||||||
|
"targets_out": params.get("targets"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for source_port, resource_name in output_resources.items():
|
||||||
|
if resource_name:
|
||||||
|
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
||||||
|
|
||||||
|
return G
|
||||||
|
|
||||||
|
|
||||||
|
def draw_protocol_graph(protocol_graph: SimpleGraph, output_path: str):
|
||||||
|
"""
|
||||||
|
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
||||||
|
"""
|
||||||
|
if not protocol_graph:
|
||||||
|
print("Cannot draw graph: Graph object is empty.")
|
||||||
|
return
|
||||||
|
|
||||||
|
G = nx.DiGraph()
|
||||||
|
|
||||||
|
for node_id, attrs in protocol_graph.nodes.items():
|
||||||
|
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||||
|
G.add_node(node_id, label=label, **attrs)
|
||||||
|
|
||||||
|
for edge in protocol_graph.edges:
|
||||||
|
G.add_edge(edge["source"], edge["target"])
|
||||||
|
|
||||||
|
plt.figure(figsize=(20, 15))
|
||||||
|
try:
|
||||||
|
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
||||||
|
except Exception:
|
||||||
|
pos = nx.shell_layout(G) # Fallback layout
|
||||||
|
|
||||||
|
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
||||||
|
nx.draw(
|
||||||
|
G,
|
||||||
|
pos,
|
||||||
|
with_labels=False,
|
||||||
|
node_size=2500,
|
||||||
|
node_color="skyblue",
|
||||||
|
node_shape="o",
|
||||||
|
edge_color="gray",
|
||||||
|
width=1.5,
|
||||||
|
arrowsize=15,
|
||||||
|
)
|
||||||
|
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
||||||
|
|
||||||
|
plt.title("Chemical Protocol Workflow Graph", size=15)
|
||||||
|
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
||||||
|
plt.close()
|
||||||
|
print(f" - Visualization saved to '{output_path}'")
|
||||||
|
|
||||||
|
|
||||||
|
from networkx.drawing.nx_agraph import to_agraph
|
||||||
|
import re
|
||||||
|
|
||||||
|
COMPASS = {"n","e","s","w","ne","nw","se","sw","c"}
|
||||||
|
|
||||||
|
def _is_compass(port: str) -> bool:
|
||||||
|
return isinstance(port, str) and port.lower() in COMPASS
|
||||||
|
|
||||||
|
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
||||||
|
"""
|
||||||
|
使用 Graphviz 端口语法绘制协议工作流图。
|
||||||
|
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
||||||
|
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
||||||
|
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
||||||
|
"""
|
||||||
|
if not protocol_graph:
|
||||||
|
print("Cannot draw graph: Graph object is empty.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1) 先用 networkx 搭建有向图,保留端口属性
|
||||||
|
G = nx.DiGraph()
|
||||||
|
for node_id, attrs in protocol_graph.nodes.items():
|
||||||
|
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||||
|
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
||||||
|
G.add_node(node_id, _core_label=str(label), **{k:v for k,v in attrs.items() if k not in ("label",)})
|
||||||
|
|
||||||
|
edges_data = []
|
||||||
|
in_ports_by_node = {} # 收集命名输入端口
|
||||||
|
out_ports_by_node = {} # 收集命名输出端口
|
||||||
|
|
||||||
|
for edge in protocol_graph.edges:
|
||||||
|
u = edge["source"]
|
||||||
|
v = edge["target"]
|
||||||
|
sp = edge.get("source_port")
|
||||||
|
tp = edge.get("target_port")
|
||||||
|
|
||||||
|
# 记录到图里(保留原始端口信息)
|
||||||
|
G.add_edge(u, v, source_port=sp, target_port=tp)
|
||||||
|
edges_data.append((u, v, sp, tp))
|
||||||
|
|
||||||
|
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
||||||
|
if sp and not _is_compass(sp):
|
||||||
|
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
||||||
|
if tp and not _is_compass(tp):
|
||||||
|
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
||||||
|
|
||||||
|
# 2) 转为 AGraph,使用 Graphviz 渲染
|
||||||
|
A = to_agraph(G)
|
||||||
|
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
||||||
|
A.node_attr.update(shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica")
|
||||||
|
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
||||||
|
|
||||||
|
# 3) 为需要命名端口的节点设置 record 形状与 label
|
||||||
|
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
||||||
|
for n in A.nodes():
|
||||||
|
node = A.get_node(n)
|
||||||
|
core = G.nodes[n].get("_core_label", n)
|
||||||
|
|
||||||
|
in_ports = sorted(in_ports_by_node.get(n, []))
|
||||||
|
out_ports = sorted(out_ports_by_node.get(n, []))
|
||||||
|
|
||||||
|
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
||||||
|
if in_ports or out_ports:
|
||||||
|
def port_fields(ports):
|
||||||
|
if not ports:
|
||||||
|
return " " # 必须留一个空槽占位
|
||||||
|
# 每个端口一个小格子,<p> name
|
||||||
|
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
||||||
|
|
||||||
|
left = port_fields(in_ports)
|
||||||
|
right = port_fields(out_ports)
|
||||||
|
|
||||||
|
# 三栏:左(入) | 中(节点名) | 右(出)
|
||||||
|
record_label = f"{{ {left} | {core} | {right} }}"
|
||||||
|
node.attr.update(shape="record", label=record_label)
|
||||||
|
else:
|
||||||
|
# 没有命名端口:普通盒子,显示核心标签
|
||||||
|
node.attr.update(label=str(core))
|
||||||
|
|
||||||
|
# 4) 给边设置 headport / tailport
|
||||||
|
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
||||||
|
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
||||||
|
for (u, v, sp, tp) in edges_data:
|
||||||
|
e = A.get_edge(u, v)
|
||||||
|
|
||||||
|
# Graphviz 属性:tail 是源,head 是目标
|
||||||
|
if sp:
|
||||||
|
if _is_compass(sp):
|
||||||
|
e.attr["tailport"] = sp.lower()
|
||||||
|
else:
|
||||||
|
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
||||||
|
e.attr["tailport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(sp))
|
||||||
|
|
||||||
|
if tp:
|
||||||
|
if _is_compass(tp):
|
||||||
|
e.attr["headport"] = tp.lower()
|
||||||
|
else:
|
||||||
|
e.attr["headport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(tp))
|
||||||
|
|
||||||
|
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
||||||
|
# e.attr["arrowhead"] = "vee"
|
||||||
|
|
||||||
|
# 5) 输出
|
||||||
|
A.draw(output_path, prog="dot")
|
||||||
|
print(f" - Port-aware workflow rendered to '{output_path}'")
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
||||||
|
"""展平嵌套的XDL程序结构"""
|
||||||
|
flattened_operations = []
|
||||||
|
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||||
|
|
||||||
|
def extract_operations(element: ET.Element):
|
||||||
|
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
||||||
|
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
||||||
|
flattened_operations.append(element)
|
||||||
|
|
||||||
|
for child in element:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
for child in procedure_elem:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
return flattened_operations
|
||||||
|
|
||||||
|
|
||||||
|
def parse_xdl_content(xdl_content: str) -> tuple:
|
||||||
|
"""解析XDL内容"""
|
||||||
|
try:
|
||||||
|
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
||||||
|
root = ET.fromstring(xdl_content_cleaned)
|
||||||
|
|
||||||
|
synthesis_elem = root.find("Synthesis")
|
||||||
|
if synthesis_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
# 解析硬件组件
|
||||||
|
hardware_elem = synthesis_elem.find("Hardware")
|
||||||
|
hardware = []
|
||||||
|
if hardware_elem is not None:
|
||||||
|
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
||||||
|
|
||||||
|
# 解析试剂
|
||||||
|
reagents_elem = synthesis_elem.find("Reagents")
|
||||||
|
reagents = []
|
||||||
|
if reagents_elem is not None:
|
||||||
|
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
||||||
|
|
||||||
|
# 解析程序
|
||||||
|
procedure_elem = synthesis_elem.find("Procedure")
|
||||||
|
if procedure_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
||||||
|
return hardware, reagents, flattened_operations
|
||||||
|
|
||||||
|
except ET.ParseError as e:
|
||||||
|
raise ValueError(f"Invalid XDL format: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
将XDL XML格式转换为标准的字典格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xdl_content: XDL XML内容
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
转换结果,包含步骤和器材信息
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
||||||
|
if hardware is None:
|
||||||
|
return {"error": "Failed to parse XDL content", "success": False}
|
||||||
|
|
||||||
|
# 将XDL元素转换为字典格式
|
||||||
|
steps_data = []
|
||||||
|
for elem in flattened_operations:
|
||||||
|
# 转换参数类型
|
||||||
|
parameters = {}
|
||||||
|
for key, val in elem.attrib.items():
|
||||||
|
converted_val = convert_to_type(val)
|
||||||
|
if converted_val is not None:
|
||||||
|
parameters[key] = converted_val
|
||||||
|
|
||||||
|
step_dict = {
|
||||||
|
"operation": elem.tag,
|
||||||
|
"parameters": parameters,
|
||||||
|
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
||||||
|
}
|
||||||
|
steps_data.append(step_dict)
|
||||||
|
|
||||||
|
# 合并硬件和试剂为统一的labware_info格式
|
||||||
|
labware_data = []
|
||||||
|
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
||||||
|
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"steps": steps_data,
|
||||||
|
"labware": labware_data,
|
||||||
|
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"XDL conversion failed: {str(e)}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
return {"error": error_msg, "success": False}
|
||||||
|
|
||||||
|
|
||||||
def create_workflow(
|
def create_workflow(
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -4,7 +4,7 @@ package_name = 'unilabos'
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='0.10.17',
|
version='0.10.14',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=['setuptools'],
|
install_requires=['setuptools'],
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
# Liquid handling 集成测试
|
|
||||||
|
|
||||||
`test_transfer_liquid.py` 现在会调用 PRCXI 的 RViz 仿真 backend,运行前请确保:
|
|
||||||
|
|
||||||
1. 已安装包含 `pylabrobot`、`rclpy` 的运行环境;
|
|
||||||
2. 启动 ROS 依赖(`rviz` 可选,但是 `rviz_backend` 会创建 ROS 节点);
|
|
||||||
3. 在 shell 中设置 `UNILAB_SIM_TEST=1`,否则 pytest 会自动跳过这些慢速用例:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export UNILAB_SIM_TEST=1
|
|
||||||
pytest tests/devices/liquid_handling/test_transfer_liquid.py -m slow
|
|
||||||
```
|
|
||||||
|
|
||||||
如果只需验证逻辑层(不依赖仿真),可以直接运行 `tests/devices/liquid_handling/unit_test.py`,该文件使用 Fake backend,适合作为 CI 的快速测试。***
|
|
||||||
|
|
||||||
@@ -1,547 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Any, Iterable, List, Optional, Sequence, Tuple
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class DummyContainer:
|
|
||||||
name: str
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
|
||||||
return f"DummyContainer({self.name})"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class DummyTipSpot:
|
|
||||||
name: str
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
|
||||||
return f"DummyTipSpot({self.name})"
|
|
||||||
|
|
||||||
|
|
||||||
def make_tip_iter(n: int = 256) -> Iterable[List[DummyTipSpot]]:
|
|
||||||
"""Yield lists so code can safely call `tip.extend(next(self.current_tip))`."""
|
|
||||||
for i in range(n):
|
|
||||||
yield [DummyTipSpot(f"tip_{i}")]
|
|
||||||
|
|
||||||
|
|
||||||
class FakeLiquidHandler(LiquidHandlerAbstract):
|
|
||||||
"""不初始化真实 backend/deck;仅用来记录 transfer_liquid 内部调用序列。"""
|
|
||||||
|
|
||||||
def __init__(self, channel_num: int = 8):
|
|
||||||
# 不调用 super().__init__,避免真实硬件/后端依赖
|
|
||||||
self.channel_num = channel_num
|
|
||||||
self.support_touch_tip = True
|
|
||||||
self.current_tip = iter(make_tip_iter())
|
|
||||||
self.calls: List[Tuple[str, Any]] = []
|
|
||||||
|
|
||||||
async def pick_up_tips(self, tip_spots, use_channels=None, offsets=None, **backend_kwargs):
|
|
||||||
self.calls.append(("pick_up_tips", {"tips": list(tip_spots), "use_channels": use_channels}))
|
|
||||||
|
|
||||||
async def aspirate(
|
|
||||||
self,
|
|
||||||
resources: Sequence[Any],
|
|
||||||
vols: List[float],
|
|
||||||
use_channels: Optional[List[int]] = None,
|
|
||||||
flow_rates: Optional[List[Optional[float]]] = None,
|
|
||||||
offsets: Any = None,
|
|
||||||
liquid_height: Any = None,
|
|
||||||
blow_out_air_volume: Any = None,
|
|
||||||
spread: str = "wide",
|
|
||||||
**backend_kwargs,
|
|
||||||
):
|
|
||||||
self.calls.append(
|
|
||||||
(
|
|
||||||
"aspirate",
|
|
||||||
{
|
|
||||||
"resources": list(resources),
|
|
||||||
"vols": list(vols),
|
|
||||||
"use_channels": list(use_channels) if use_channels is not None else None,
|
|
||||||
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
|
||||||
"offsets": list(offsets) if offsets is not None else None,
|
|
||||||
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
|
||||||
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def dispense(
|
|
||||||
self,
|
|
||||||
resources: Sequence[Any],
|
|
||||||
vols: List[float],
|
|
||||||
use_channels: Optional[List[int]] = None,
|
|
||||||
flow_rates: Optional[List[Optional[float]]] = None,
|
|
||||||
offsets: Any = None,
|
|
||||||
liquid_height: Any = None,
|
|
||||||
blow_out_air_volume: Any = None,
|
|
||||||
spread: str = "wide",
|
|
||||||
**backend_kwargs,
|
|
||||||
):
|
|
||||||
self.calls.append(
|
|
||||||
(
|
|
||||||
"dispense",
|
|
||||||
{
|
|
||||||
"resources": list(resources),
|
|
||||||
"vols": list(vols),
|
|
||||||
"use_channels": list(use_channels) if use_channels is not None else None,
|
|
||||||
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
|
||||||
"offsets": list(offsets) if offsets is not None else None,
|
|
||||||
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
|
||||||
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def discard_tips(self, use_channels=None, *args, **kwargs):
|
|
||||||
# 有的分支是 discard_tips(use_channels=[0]),有的分支是 discard_tips([0..7])(位置参数)
|
|
||||||
self.calls.append(("discard_tips", {"use_channels": list(use_channels) if use_channels is not None else None}))
|
|
||||||
|
|
||||||
async def custom_delay(self, seconds=0, msg=None):
|
|
||||||
self.calls.append(("custom_delay", {"seconds": seconds, "msg": msg}))
|
|
||||||
|
|
||||||
async def touch_tip(self, targets):
|
|
||||||
# 原实现会访问 targets.get_size_x() 等;测试里只记录调用
|
|
||||||
self.calls.append(("touch_tip", {"targets": targets}))
|
|
||||||
|
|
||||||
def run(coro):
|
|
||||||
return asyncio.run(coro)
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_single_channel_basic_calls():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(64))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=[1, 2, 3],
|
|
||||||
dis_vols=[4, 5, 6],
|
|
||||||
mix_times=None, # 应该仍能执行(不 mix)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert [c[0] for c in lh.calls].count("pick_up_tips") == 3
|
|
||||||
assert [c[0] for c in lh.calls].count("aspirate") == 3
|
|
||||||
assert [c[0] for c in lh.calls].count("dispense") == 3
|
|
||||||
assert [c[0] for c in lh.calls].count("discard_tips") == 3
|
|
||||||
|
|
||||||
# 每次 aspirate/dispense 都是单孔列表
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
assert aspirates[0]["resources"] == [sources[0]]
|
|
||||||
assert aspirates[0]["vols"] == [1.0]
|
|
||||||
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert dispenses[2]["resources"] == [targets[2]]
|
|
||||||
assert dispenses[2]["vols"] == [6.0]
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_single_channel_before_stage_mixes_prior_to_aspirate():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(16))
|
|
||||||
|
|
||||||
source = DummyContainer("S0")
|
|
||||||
target = DummyContainer("T0")
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=[source],
|
|
||||||
targets=[target],
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=[5],
|
|
||||||
dis_vols=[5],
|
|
||||||
mix_stage="before",
|
|
||||||
mix_times=1,
|
|
||||||
mix_vol=3,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirate_calls = [(idx, payload) for idx, (name, payload) in enumerate(lh.calls) if name == "aspirate"]
|
|
||||||
assert len(aspirate_calls) >= 2
|
|
||||||
mix_idx, mix_payload = aspirate_calls[0]
|
|
||||||
assert mix_payload["resources"] == [target]
|
|
||||||
assert mix_payload["vols"] == [3]
|
|
||||||
transfer_idx, transfer_payload = aspirate_calls[1]
|
|
||||||
assert transfer_payload["resources"] == [source]
|
|
||||||
assert mix_idx < transfer_idx
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_eight_channel_groups_by_8():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(256))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
|
||||||
asp_vols = list(range(1, 17))
|
|
||||||
dis_vols = list(range(101, 117))
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=dis_vols,
|
|
||||||
mix_times=0, # 触发逻辑但不 mix
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# 16 个任务 -> 2 组,每组 8 通道一起做
|
|
||||||
assert [c[0] for c in lh.calls].count("pick_up_tips") == 2
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert len(aspirates) == 2
|
|
||||||
assert len(dispenses) == 2
|
|
||||||
|
|
||||||
assert aspirates[0]["resources"] == sources[0:8]
|
|
||||||
assert aspirates[0]["vols"] == [float(v) for v in asp_vols[0:8]]
|
|
||||||
assert dispenses[1]["resources"] == targets[8:16]
|
|
||||||
assert dispenses[1]["vols"] == [float(v) for v in dis_vols[8:16]]
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_eight_channel_requires_multiple_of_8_targets():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(64))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(9)]
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(9)]
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="multiple of 8"):
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=[1] * 9,
|
|
||||||
dis_vols=[1] * 9,
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_eight_channel_parameter_lists_are_chunked_per_8():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(512))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
|
||||||
asp_vols = [i + 1 for i in range(16)]
|
|
||||||
dis_vols = [200 + i for i in range(16)]
|
|
||||||
asp_flow_rates = [0.1 * (i + 1) for i in range(16)]
|
|
||||||
dis_flow_rates = [0.2 * (i + 1) for i in range(16)]
|
|
||||||
offsets = [f"offset_{i}" for i in range(16)]
|
|
||||||
liquid_heights = [i * 0.5 for i in range(16)]
|
|
||||||
blow_out_air_volume = [i + 0.05 for i in range(16)]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=dis_vols,
|
|
||||||
asp_flow_rates=asp_flow_rates,
|
|
||||||
dis_flow_rates=dis_flow_rates,
|
|
||||||
offsets=offsets,
|
|
||||||
liquid_height=liquid_heights,
|
|
||||||
blow_out_air_volume=blow_out_air_volume,
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert len(aspirates) == len(dispenses) == 2
|
|
||||||
|
|
||||||
for batch_idx in range(2):
|
|
||||||
start = batch_idx * 8
|
|
||||||
end = start + 8
|
|
||||||
asp_call = aspirates[batch_idx]
|
|
||||||
dis_call = dispenses[batch_idx]
|
|
||||||
assert asp_call["resources"] == sources[start:end]
|
|
||||||
assert asp_call["flow_rates"] == asp_flow_rates[start:end]
|
|
||||||
assert asp_call["offsets"] == offsets[start:end]
|
|
||||||
assert asp_call["liquid_height"] == liquid_heights[start:end]
|
|
||||||
assert asp_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
|
||||||
assert dis_call["flow_rates"] == dis_flow_rates[start:end]
|
|
||||||
assert dis_call["offsets"] == offsets[start:end]
|
|
||||||
assert dis_call["liquid_height"] == liquid_heights[start:end]
|
|
||||||
assert dis_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_one_eight_channel_handles_32_tasks_four_batches():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(1024))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(32)]
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(32)]
|
|
||||||
asp_vols = [i + 1 for i in range(32)]
|
|
||||||
dis_vols = [300 + i for i in range(32)]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=dis_vols,
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
pick_calls = [name for name, _ in lh.calls if name == "pick_up_tips"]
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert len(pick_calls) == 4
|
|
||||||
assert len(aspirates) == len(dispenses) == 4
|
|
||||||
assert aspirates[0]["resources"] == sources[0:8]
|
|
||||||
assert aspirates[-1]["resources"] == sources[24:32]
|
|
||||||
assert dispenses[0]["resources"] == targets[0:8]
|
|
||||||
assert dispenses[-1]["resources"] == targets[24:32]
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_many_single_channel_aspirates_total_when_asp_vol_too_small():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(64))
|
|
||||||
|
|
||||||
source = DummyContainer("SRC")
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
|
||||||
dis_vols = [10, 20, 30] # sum=60
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=[source],
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=10, # 小于 sum(dis_vols) -> 应吸 60
|
|
||||||
dis_vols=dis_vols,
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
assert len(aspirates) == 1
|
|
||||||
assert aspirates[0]["resources"] == [source]
|
|
||||||
assert aspirates[0]["vols"] == [60.0]
|
|
||||||
assert aspirates[0]["use_channels"] == [0]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert [d["vols"][0] for d in dispenses] == [10.0, 20.0, 30.0]
|
|
||||||
|
|
||||||
|
|
||||||
def test_one_to_many_eight_channel_basic():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(128))
|
|
||||||
|
|
||||||
source = DummyContainer("SRC")
|
|
||||||
targets = [DummyContainer(f"T{i}") for i in range(8)]
|
|
||||||
dis_vols = [i + 1 for i in range(8)]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=[source],
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=999, # one-to-many 8ch 会按 dis_vols 吸(每通道各自)
|
|
||||||
dis_vols=dis_vols,
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
assert aspirates[0]["resources"] == [source] * 8
|
|
||||||
assert aspirates[0]["vols"] == [float(v) for v in dis_vols]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert dispenses[0]["resources"] == targets
|
|
||||||
assert dispenses[0]["vols"] == [float(v) for v in dis_vols]
|
|
||||||
|
|
||||||
|
|
||||||
def test_many_to_one_single_channel_standard_dispense_equals_asp_by_default():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(128))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
|
||||||
target = DummyContainer("T")
|
|
||||||
asp_vols = [5, 6, 7]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=[target],
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=1, # many-to-one 允许标量;非比例模式下实际每次分液=对应 asp_vol
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert [d["vols"][0] for d in dispenses] == [float(v) for v in asp_vols]
|
|
||||||
assert all(d["resources"] == [target] for d in dispenses)
|
|
||||||
|
|
||||||
|
|
||||||
def test_many_to_one_single_channel_before_stage_mixes_target_once():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(128))
|
|
||||||
|
|
||||||
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
|
||||||
target = DummyContainer("T")
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=[target],
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=[5, 6],
|
|
||||||
dis_vols=1,
|
|
||||||
mix_stage="before",
|
|
||||||
mix_times=2,
|
|
||||||
mix_vol=4,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirate_calls = [(idx, payload) for idx, (name, payload) in enumerate(lh.calls) if name == "aspirate"]
|
|
||||||
assert len(aspirate_calls) >= 1
|
|
||||||
mix_idx, mix_payload = aspirate_calls[0]
|
|
||||||
assert mix_payload["resources"] == [target]
|
|
||||||
assert mix_payload["vols"] == [4]
|
|
||||||
# 第一個 mix 之後會真正開始吸 source
|
|
||||||
assert any(call["resources"] == [sources[0]] for _, call in aspirate_calls[1:])
|
|
||||||
|
|
||||||
|
|
||||||
def test_many_to_one_single_channel_proportional_mixing_uses_dis_vols_per_source():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
lh.current_tip = iter(make_tip_iter(128))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
|
||||||
target = DummyContainer("T")
|
|
||||||
asp_vols = [5, 6, 7]
|
|
||||||
dis_vols = [1, 2, 3]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=[target],
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=dis_vols, # 比例模式
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert [d["vols"][0] for d in dispenses] == [float(v) for v in dis_vols]
|
|
||||||
|
|
||||||
|
|
||||||
def test_many_to_one_eight_channel_basic():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(256))
|
|
||||||
|
|
||||||
sources = [DummyContainer(f"S{i}") for i in range(8)]
|
|
||||||
target = DummyContainer("T")
|
|
||||||
asp_vols = [10 + i for i in range(8)]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=[target],
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=list(range(8)),
|
|
||||||
asp_vols=asp_vols,
|
|
||||||
dis_vols=999, # 非比例模式下每通道分液=对应 asp_vol
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert aspirates[0]["resources"] == sources
|
|
||||||
assert aspirates[0]["vols"] == [float(v) for v in asp_vols]
|
|
||||||
assert dispenses[0]["resources"] == [target] * 8
|
|
||||||
assert dispenses[0]["vols"] == [float(v) for v in asp_vols]
|
|
||||||
|
|
||||||
|
|
||||||
def test_transfer_liquid_mode_detection_unsupported_shape_raises():
|
|
||||||
lh = FakeLiquidHandler(channel_num=8)
|
|
||||||
lh.current_tip = iter(make_tip_iter(64))
|
|
||||||
|
|
||||||
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
|
||||||
targets = [DummyContainer("T0"), DummyContainer("T1"), DummyContainer("T2")]
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="Unsupported transfer mode"):
|
|
||||||
run(
|
|
||||||
lh.transfer_liquid(
|
|
||||||
sources=sources,
|
|
||||||
targets=targets,
|
|
||||||
tip_racks=[],
|
|
||||||
use_channels=[0],
|
|
||||||
asp_vols=[1, 1],
|
|
||||||
dis_vols=[1, 1, 1],
|
|
||||||
mix_times=0,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_mix_single_target_produces_matching_cycles():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
target = DummyContainer("T_mix")
|
|
||||||
|
|
||||||
run(lh.mix(targets=[target], mix_time=2, mix_vol=5))
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
|
||||||
assert len(aspirates) == len(dispenses) == 2
|
|
||||||
assert all(call["resources"] == [target] for call in aspirates)
|
|
||||||
assert all(call["vols"] == [5] for call in aspirates)
|
|
||||||
assert all(call["resources"] == [target] for call in dispenses)
|
|
||||||
assert all(call["vols"] == [5] for call in dispenses)
|
|
||||||
|
|
||||||
|
|
||||||
def test_mix_multiple_targets_supports_per_target_offsets():
|
|
||||||
lh = FakeLiquidHandler(channel_num=1)
|
|
||||||
targets = [DummyContainer("T0"), DummyContainer("T1")]
|
|
||||||
offsets = ["left", "right"]
|
|
||||||
heights = [0.1, 0.2]
|
|
||||||
rates = [0.5, 1.0]
|
|
||||||
|
|
||||||
run(
|
|
||||||
lh.mix(
|
|
||||||
targets=targets,
|
|
||||||
mix_time=1,
|
|
||||||
mix_vol=3,
|
|
||||||
offsets=offsets,
|
|
||||||
height_to_bottom=heights,
|
|
||||||
mix_rate=rates,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
|
||||||
assert len(aspirates) == 2
|
|
||||||
assert aspirates[0]["resources"] == [targets[0]]
|
|
||||||
assert aspirates[0]["offsets"] == [offsets[0]]
|
|
||||||
assert aspirates[0]["liquid_height"] == [heights[0]]
|
|
||||||
assert aspirates[0]["flow_rates"] == [rates[0]]
|
|
||||||
assert aspirates[1]["resources"] == [targets[1]]
|
|
||||||
assert aspirates[1]["offsets"] == [offsets[1]]
|
|
||||||
assert aspirates[1]["liquid_height"] == [heights[1]]
|
|
||||||
assert aspirates[1]["flow_rates"] == [rates[1]]
|
|
||||||
|
|
||||||
|
|
||||||
@@ -2,8 +2,9 @@ import pytest
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from pylabrobot.resources import Resource as ResourcePLR
|
||||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = "0.10.17"
|
__version__ = "0.10.14"
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Dict, Any, List
|
from typing import Dict, Any, List
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -16,15 +17,9 @@ unilabos_dir = os.path.dirname(os.path.dirname(current_dir))
|
|||||||
if unilabos_dir not in sys.path:
|
if unilabos_dir not in sys.path:
|
||||||
sys.path.append(unilabos_dir)
|
sys.path.append(unilabos_dir)
|
||||||
|
|
||||||
from unilabos.app.utils import cleanup_for_restart
|
|
||||||
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
||||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||||
|
|
||||||
# Global restart flags (used by ws_client and web/server)
|
|
||||||
_restart_requested: bool = False
|
|
||||||
_restart_reason: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
def load_config_from_file(config_path):
|
def load_config_from_file(config_path):
|
||||||
if config_path is None:
|
if config_path is None:
|
||||||
config_path = os.environ.get("UNILABOS_BASICCONFIG_CONFIG_PATH", None)
|
config_path = os.environ.get("UNILABOS_BASICCONFIG_CONFIG_PATH", None)
|
||||||
@@ -46,7 +41,7 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
|
|||||||
for i, arg in enumerate(sys.argv):
|
for i, arg in enumerate(sys.argv):
|
||||||
for option_string in option_strings:
|
for option_string in option_strings:
|
||||||
if arg.startswith(option_string):
|
if arg.startswith(option_string):
|
||||||
new_arg = arg[:2] + arg[2 : len(option_string)].replace("-", "_") + arg[len(option_string) :]
|
new_arg = arg[:2] + arg[2:len(option_string)].replace("-", "_") + arg[len(option_string):]
|
||||||
sys.argv[i] = new_arg
|
sys.argv[i] = new_arg
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -54,8 +49,6 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
|
|||||||
def parse_args():
|
def parse_args():
|
||||||
"""解析命令行参数"""
|
"""解析命令行参数"""
|
||||||
parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.")
|
parser = argparse.ArgumentParser(description="Start Uni-Lab Edge server.")
|
||||||
subparsers = parser.add_subparsers(title="Valid subcommands", dest="command")
|
|
||||||
|
|
||||||
parser.add_argument("-g", "--graph", help="Physical setup graph file path.")
|
parser.add_argument("-g", "--graph", help="Physical setup graph file path.")
|
||||||
parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.")
|
parser.add_argument("-c", "--controllers", default=None, help="Controllers config file path.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -160,50 +153,6 @@ def parse_args():
|
|||||||
default=False,
|
default=False,
|
||||||
help="Complete registry information",
|
help="Complete registry information",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--check_mode",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Run in check mode for CI: validates registry imports and ensures no file changes",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--no_update_feedback",
|
|
||||||
action="store_true",
|
|
||||||
help="Disable sending update feedback to server",
|
|
||||||
)
|
|
||||||
# workflow upload subcommand
|
|
||||||
workflow_parser = subparsers.add_parser(
|
|
||||||
"workflow_upload",
|
|
||||||
aliases=["wf"],
|
|
||||||
help="Upload workflow from xdl/json/python files",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"-f",
|
|
||||||
"--workflow_file",
|
|
||||||
type=str,
|
|
||||||
required=True,
|
|
||||||
help="Path to the workflow file (JSON format)",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"-n",
|
|
||||||
"--workflow_name",
|
|
||||||
type=str,
|
|
||||||
default=None,
|
|
||||||
help="Workflow name, if not provided will use the name from file or filename",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"--tags",
|
|
||||||
type=str,
|
|
||||||
nargs="*",
|
|
||||||
default=[],
|
|
||||||
help="Tags for the workflow (space-separated)",
|
|
||||||
)
|
|
||||||
workflow_parser.add_argument(
|
|
||||||
"--published",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Whether to publish the workflow (default: False)",
|
|
||||||
)
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
@@ -216,12 +165,10 @@ def main():
|
|||||||
args_dict = vars(args)
|
args_dict = vars(args)
|
||||||
|
|
||||||
# 环境检查 - 检查并自动安装必需的包 (可选)
|
# 环境检查 - 检查并自动安装必需的包 (可选)
|
||||||
skip_env_check = args_dict.get("skip_env_check", False)
|
if not args_dict.get("skip_env_check", False):
|
||||||
check_mode = args_dict.get("check_mode", False)
|
|
||||||
|
|
||||||
if not skip_env_check:
|
|
||||||
from unilabos.utils.environment_check import check_environment
|
from unilabos.utils.environment_check import check_environment
|
||||||
|
|
||||||
|
print_status("正在进行环境依赖检查...", "info")
|
||||||
if not check_environment(auto_install=True):
|
if not check_environment(auto_install=True):
|
||||||
print_status("环境检查失败,程序退出", "error")
|
print_status("环境检查失败,程序退出", "error")
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
@@ -230,21 +177,7 @@ def main():
|
|||||||
|
|
||||||
# 加载配置文件,优先加载config,然后从env读取
|
# 加载配置文件,优先加载config,然后从env读取
|
||||||
config_path = args_dict.get("config")
|
config_path = args_dict.get("config")
|
||||||
|
if os.getcwd().endswith("unilabos_data"):
|
||||||
if check_mode:
|
|
||||||
args_dict["working_dir"] = os.path.abspath(os.getcwd())
|
|
||||||
# 当 skip_env_check 时,默认使用当前目录作为 working_dir
|
|
||||||
if skip_env_check and not args_dict.get("working_dir") and not config_path:
|
|
||||||
working_dir = os.path.abspath(os.getcwd())
|
|
||||||
print_status(f"跳过环境检查模式:使用当前目录作为工作目录 {working_dir}", "info")
|
|
||||||
# 检查当前目录是否有 local_config.py
|
|
||||||
local_config_in_cwd = os.path.join(working_dir, "local_config.py")
|
|
||||||
if os.path.exists(local_config_in_cwd):
|
|
||||||
config_path = local_config_in_cwd
|
|
||||||
print_status(f"发现本地配置文件: {config_path}", "info")
|
|
||||||
else:
|
|
||||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
|
||||||
elif os.getcwd().endswith("unilabos_data"):
|
|
||||||
working_dir = os.path.abspath(os.getcwd())
|
working_dir = os.path.abspath(os.getcwd())
|
||||||
else:
|
else:
|
||||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||||
@@ -263,7 +196,7 @@ def main():
|
|||||||
working_dir = os.path.dirname(config_path)
|
working_dir = os.path.dirname(config_path)
|
||||||
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
||||||
config_path = os.path.join(working_dir, "local_config.py")
|
config_path = os.path.join(working_dir, "local_config.py")
|
||||||
elif not skip_env_check and not config_path and (
|
elif not config_path and (
|
||||||
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
||||||
):
|
):
|
||||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||||
@@ -277,11 +210,9 @@ def main():
|
|||||||
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
||||||
else:
|
else:
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
|
# 加载配置文件
|
||||||
# 加载配置文件 (check_mode 跳过)
|
|
||||||
print_status(f"当前工作目录为 {working_dir}", "info")
|
print_status(f"当前工作目录为 {working_dir}", "info")
|
||||||
if not check_mode:
|
load_config_from_file(config_path)
|
||||||
load_config_from_file(config_path)
|
|
||||||
|
|
||||||
# 根据配置重新设置日志级别
|
# 根据配置重新设置日志级别
|
||||||
from unilabos.utils.log import configure_logger, logger
|
from unilabos.utils.log import configure_logger, logger
|
||||||
@@ -310,12 +241,9 @@ def main():
|
|||||||
if args_dict.get("sk", ""):
|
if args_dict.get("sk", ""):
|
||||||
BasicConfig.sk = args_dict.get("sk", "")
|
BasicConfig.sk = args_dict.get("sk", "")
|
||||||
print_status("传入了sk参数,优先采用传入参数!", "info")
|
print_status("传入了sk参数,优先采用传入参数!", "info")
|
||||||
BasicConfig.working_dir = working_dir
|
|
||||||
|
|
||||||
workflow_upload = args_dict.get("command") in ("workflow_upload", "wf")
|
|
||||||
|
|
||||||
# 使用远程资源启动
|
# 使用远程资源启动
|
||||||
if not workflow_upload and args_dict["use_remote_resource"]:
|
if args_dict["use_remote_resource"]:
|
||||||
print_status("使用远程资源启动", "info")
|
print_status("使用远程资源启动", "info")
|
||||||
from unilabos.app.web import http_client
|
from unilabos.app.web import http_client
|
||||||
|
|
||||||
@@ -328,16 +256,15 @@ def main():
|
|||||||
|
|
||||||
BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port
|
BasicConfig.port = args_dict["port"] if args_dict["port"] else BasicConfig.port
|
||||||
BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser
|
BasicConfig.disable_browser = args_dict["disable_browser"] or BasicConfig.disable_browser
|
||||||
|
BasicConfig.working_dir = working_dir
|
||||||
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
||||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||||
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
||||||
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
|
||||||
BasicConfig.communication_protocol = "websocket"
|
BasicConfig.communication_protocol = "websocket"
|
||||||
machine_name = os.popen("hostname").read().strip()
|
machine_name = os.popen("hostname").read().strip()
|
||||||
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
||||||
BasicConfig.machine_name = machine_name
|
BasicConfig.machine_name = machine_name
|
||||||
BasicConfig.vis_2d_enable = args_dict["2d_vis"]
|
BasicConfig.vis_2d_enable = args_dict["2d_vis"]
|
||||||
BasicConfig.check_mode = check_mode
|
|
||||||
|
|
||||||
from unilabos.resources.graphio import (
|
from unilabos.resources.graphio import (
|
||||||
read_node_link_json,
|
read_node_link_json,
|
||||||
@@ -351,41 +278,15 @@ def main():
|
|||||||
from unilabos.app.web import start_server
|
from unilabos.app.web import start_server
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.resources.graphio import modify_to_backend_format
|
from unilabos.resources.graphio import modify_to_backend_format
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDict
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDict
|
||||||
|
|
||||||
# 显示启动横幅
|
# 显示启动横幅
|
||||||
print_unilab_banner(args_dict)
|
print_unilab_banner(args_dict)
|
||||||
|
|
||||||
# 注册表 - check_mode 时强制启用 complete_registry
|
# 注册表
|
||||||
complete_registry = args_dict.get("complete_registry", False) or check_mode
|
lab_registry = build_registry(
|
||||||
lab_registry = build_registry(args_dict["registry_path"], complete_registry, BasicConfig.upload_registry)
|
args_dict["registry_path"], args_dict.get("complete_registry", False), args_dict["upload_registry"]
|
||||||
|
)
|
||||||
# Check mode: complete_registry 完成后直接退出,git diff 检测由 CI workflow 执行
|
|
||||||
if check_mode:
|
|
||||||
print_status("Check mode: complete_registry 完成,退出", "info")
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
if BasicConfig.upload_registry:
|
|
||||||
# 设备注册到服务端 - 需要 ak 和 sk
|
|
||||||
if BasicConfig.ak and BasicConfig.sk:
|
|
||||||
print_status("开始注册设备到服务端...", "info")
|
|
||||||
try:
|
|
||||||
register_devices_and_resources(lab_registry)
|
|
||||||
print_status("设备注册完成", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"设备注册失败: {e}", "error")
|
|
||||||
else:
|
|
||||||
print_status("未提供 ak 和 sk,跳过设备注册", "info")
|
|
||||||
else:
|
|
||||||
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
|
|
||||||
|
|
||||||
# 处理 workflow_upload 子命令
|
|
||||||
if workflow_upload:
|
|
||||||
from unilabos.workflow.wf_utils import handle_workflow_upload_command
|
|
||||||
|
|
||||||
handle_workflow_upload_command(args_dict)
|
|
||||||
print_status("工作流上传完成,程序退出", "info")
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
if not BasicConfig.ak or not BasicConfig.sk:
|
if not BasicConfig.ak or not BasicConfig.sk:
|
||||||
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
||||||
@@ -467,6 +368,20 @@ def main():
|
|||||||
args_dict["devices_config"] = resource_tree_set
|
args_dict["devices_config"] = resource_tree_set
|
||||||
args_dict["graph"] = graph_res.physical_setup_graph
|
args_dict["graph"] = graph_res.physical_setup_graph
|
||||||
|
|
||||||
|
if BasicConfig.upload_registry:
|
||||||
|
# 设备注册到服务端 - 需要 ak 和 sk
|
||||||
|
if BasicConfig.ak and BasicConfig.sk:
|
||||||
|
print_status("开始注册设备到服务端...", "info")
|
||||||
|
try:
|
||||||
|
register_devices_and_resources(lab_registry)
|
||||||
|
print_status("设备注册完成", "info")
|
||||||
|
except Exception as e:
|
||||||
|
print_status(f"设备注册失败: {e}", "error")
|
||||||
|
else:
|
||||||
|
print_status("未提供 ak 和 sk,跳过设备注册", "info")
|
||||||
|
else:
|
||||||
|
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
|
||||||
|
|
||||||
if args_dict["controllers"] is not None:
|
if args_dict["controllers"] is not None:
|
||||||
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
|
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
|
||||||
else:
|
else:
|
||||||
@@ -481,7 +396,6 @@ def main():
|
|||||||
comm_client = get_communication_client()
|
comm_client = get_communication_client()
|
||||||
if "websocket" in args_dict["app_bridges"]:
|
if "websocket" in args_dict["app_bridges"]:
|
||||||
args_dict["bridges"].append(comm_client)
|
args_dict["bridges"].append(comm_client)
|
||||||
|
|
||||||
def _exit(signum, frame):
|
def _exit(signum, frame):
|
||||||
comm_client.stop()
|
comm_client.stop()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
@@ -523,13 +437,16 @@ def main():
|
|||||||
resource_visualization.start()
|
resource_visualization.start()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if "AMENT_PREFIX_PATH" in str(e):
|
if "AMENT_PREFIX_PATH" in str(e):
|
||||||
print_status(f"ROS 2环境未正确设置,跳过3D可视化启动。错误详情: {e}", "warning")
|
print_status(
|
||||||
|
f"ROS 2环境未正确设置,跳过3D可视化启动。错误详情: {e}",
|
||||||
|
"warning"
|
||||||
|
)
|
||||||
print_status(
|
print_status(
|
||||||
"建议解决方案:\n"
|
"建议解决方案:\n"
|
||||||
"1. 激活Conda环境: conda activate unilab\n"
|
"1. 激活Conda环境: conda activate unilab\n"
|
||||||
"2. 或使用 --backend simple 参数\n"
|
"2. 或使用 --backend simple 参数\n"
|
||||||
"3. 或使用 --visual disable 参数禁用可视化",
|
"3. 或使用 --visual disable 参数禁用可视化",
|
||||||
"info",
|
"info"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
@@ -537,19 +454,13 @@ def main():
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
restart_requested = start_server(
|
start_server(
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
if restart_requested:
|
|
||||||
print_status("[Main] Restart requested, cleaning up...", "info")
|
|
||||||
cleanup_for_restart()
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
start_backend(**args_dict)
|
start_backend(**args_dict)
|
||||||
|
start_server(
|
||||||
# 启动服务器(默认支持WebSocket触发重启)
|
|
||||||
restart_requested = start_server(
|
|
||||||
open_browser=not args_dict["disable_browser"],
|
open_browser=not args_dict["disable_browser"],
|
||||||
port=BasicConfig.port,
|
port=BasicConfig.port,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,176 +0,0 @@
|
|||||||
"""
|
|
||||||
UniLabOS 应用工具函数
|
|
||||||
|
|
||||||
提供清理、重启等工具函数
|
|
||||||
"""
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def patch_rclpy_dll_windows():
|
|
||||||
"""在 Windows + conda 环境下为 rclpy 打 DLL 加载补丁"""
|
|
||||||
if sys.platform != "win32" or not os.environ.get("CONDA_PREFIX"):
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
import rclpy
|
|
||||||
|
|
||||||
return
|
|
||||||
except ImportError as e:
|
|
||||||
if not str(e).startswith("DLL load failed"):
|
|
||||||
return
|
|
||||||
cp = os.environ["CONDA_PREFIX"]
|
|
||||||
impl = os.path.join(cp, "Lib", "site-packages", "rclpy", "impl", "implementation_singleton.py")
|
|
||||||
pyd = glob.glob(os.path.join(cp, "Lib", "site-packages", "rclpy", "_rclpy_pybind11*.pyd"))
|
|
||||||
if not os.path.exists(impl) or not pyd:
|
|
||||||
return
|
|
||||||
with open(impl, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
lib_bin = os.path.join(cp, "Library", "bin").replace("\\", "/")
|
|
||||||
patch = f'# UniLabOS DLL Patch\nimport os,ctypes\nos.add_dll_directory("{lib_bin}") if hasattr(os,"add_dll_directory") else None\ntry: ctypes.CDLL("{pyd[0].replace(chr(92),"/")}")\nexcept: pass\n# End Patch\n'
|
|
||||||
shutil.copy2(impl, impl + ".bak")
|
|
||||||
with open(impl, "w", encoding="utf-8") as f:
|
|
||||||
f.write(patch + content)
|
|
||||||
|
|
||||||
|
|
||||||
patch_rclpy_dll_windows()
|
|
||||||
|
|
||||||
import gc
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
|
|
||||||
from unilabos.utils.banner_print import print_status
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_for_restart() -> bool:
|
|
||||||
"""
|
|
||||||
Clean up all resources for restart without exiting the process.
|
|
||||||
|
|
||||||
This function prepares the system for re-initialization by:
|
|
||||||
1. Stopping all communication clients
|
|
||||||
2. Destroying ROS nodes
|
|
||||||
3. Resetting singletons
|
|
||||||
4. Waiting for threads to finish
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if cleanup was successful, False otherwise
|
|
||||||
"""
|
|
||||||
print_status("[Restart] Starting cleanup for restart...", "info")
|
|
||||||
|
|
||||||
# Step 1: Stop WebSocket communication client
|
|
||||||
print_status("[Restart] Step 1: Stopping WebSocket client...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.app.communication import get_communication_client
|
|
||||||
|
|
||||||
comm_client = get_communication_client()
|
|
||||||
if comm_client is not None:
|
|
||||||
comm_client.stop()
|
|
||||||
print_status("[Restart] WebSocket client stopped", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error stopping WebSocket: {e}", "warning")
|
|
||||||
|
|
||||||
# Step 2: Get HostNode and cleanup ROS
|
|
||||||
print_status("[Restart] Step 2: Cleaning up ROS nodes...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.ros.nodes.presets.host_node import HostNode
|
|
||||||
import rclpy
|
|
||||||
from rclpy.timer import Timer
|
|
||||||
|
|
||||||
host_instance = HostNode.get_instance(timeout=5)
|
|
||||||
if host_instance is not None:
|
|
||||||
print_status(f"[Restart] Found HostNode: {host_instance.device_id}", "info")
|
|
||||||
|
|
||||||
# Gracefully shutdown background threads
|
|
||||||
print_status("[Restart] Shutting down background threads...", "info")
|
|
||||||
HostNode.shutdown_background_threads(timeout=5.0)
|
|
||||||
print_status("[Restart] Background threads shutdown complete", "info")
|
|
||||||
|
|
||||||
# Stop discovery timer
|
|
||||||
if hasattr(host_instance, "_discovery_timer") and isinstance(host_instance._discovery_timer, Timer):
|
|
||||||
host_instance._discovery_timer.cancel()
|
|
||||||
print_status("[Restart] Discovery timer cancelled", "info")
|
|
||||||
|
|
||||||
# Destroy device nodes
|
|
||||||
device_count = len(host_instance.devices_instances)
|
|
||||||
print_status(f"[Restart] Destroying {device_count} device instances...", "info")
|
|
||||||
for device_id, device_node in list(host_instance.devices_instances.items()):
|
|
||||||
try:
|
|
||||||
if hasattr(device_node, "ros_node_instance") and device_node.ros_node_instance is not None:
|
|
||||||
device_node.ros_node_instance.destroy_node()
|
|
||||||
print_status(f"[Restart] Device {device_id} destroyed", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error destroying device {device_id}: {e}", "warning")
|
|
||||||
|
|
||||||
# Clear devices instances
|
|
||||||
host_instance.devices_instances.clear()
|
|
||||||
host_instance.devices_names.clear()
|
|
||||||
|
|
||||||
# Destroy host node
|
|
||||||
try:
|
|
||||||
host_instance.destroy_node()
|
|
||||||
print_status("[Restart] HostNode destroyed", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error destroying HostNode: {e}", "warning")
|
|
||||||
|
|
||||||
# Reset HostNode state
|
|
||||||
HostNode.reset_state()
|
|
||||||
print_status("[Restart] HostNode state reset", "info")
|
|
||||||
|
|
||||||
# Shutdown executor first (to stop executor.spin() gracefully)
|
|
||||||
if hasattr(rclpy, "__executor") and rclpy.__executor is not None:
|
|
||||||
try:
|
|
||||||
rclpy.__executor.shutdown()
|
|
||||||
rclpy.__executor = None # Clear for restart
|
|
||||||
print_status("[Restart] ROS executor shutdown complete", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error shutting down executor: {e}", "warning")
|
|
||||||
|
|
||||||
# Shutdown rclpy
|
|
||||||
if rclpy.ok():
|
|
||||||
rclpy.shutdown()
|
|
||||||
print_status("[Restart] rclpy shutdown complete", "info")
|
|
||||||
|
|
||||||
except ImportError as e:
|
|
||||||
print_status(f"[Restart] ROS modules not available: {e}", "warning")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error in ROS cleanup: {e}", "warning")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Step 3: Reset communication client singleton
|
|
||||||
print_status("[Restart] Step 3: Resetting singletons...", "info")
|
|
||||||
try:
|
|
||||||
from unilabos.app import communication
|
|
||||||
|
|
||||||
if hasattr(communication, "_communication_client"):
|
|
||||||
communication._communication_client = None
|
|
||||||
print_status("[Restart] Communication client singleton reset", "info")
|
|
||||||
except Exception as e:
|
|
||||||
print_status(f"[Restart] Error resetting communication singleton: {e}", "warning")
|
|
||||||
|
|
||||||
# Step 4: Wait for threads to finish
|
|
||||||
print_status("[Restart] Step 4: Waiting for threads to finish...", "info")
|
|
||||||
time.sleep(3) # Give threads time to finish
|
|
||||||
|
|
||||||
# Check remaining threads
|
|
||||||
remaining_threads = []
|
|
||||||
for t in threading.enumerate():
|
|
||||||
if t.name != "MainThread" and t.is_alive():
|
|
||||||
remaining_threads.append(t.name)
|
|
||||||
|
|
||||||
if remaining_threads:
|
|
||||||
print_status(
|
|
||||||
f"[Restart] Warning: {len(remaining_threads)} threads still running: {remaining_threads}", "warning"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
print_status("[Restart] All threads stopped", "info")
|
|
||||||
|
|
||||||
# Step 5: Force garbage collection
|
|
||||||
print_status("[Restart] Step 5: Running garbage collection...", "info")
|
|
||||||
gc.collect()
|
|
||||||
gc.collect() # Run twice for weak references
|
|
||||||
print_status("[Restart] Garbage collection complete", "info")
|
|
||||||
|
|
||||||
print_status("[Restart] Cleanup complete. Ready for re-initialization.", "info")
|
|
||||||
return True
|
|
||||||
@@ -6,10 +6,12 @@ HTTP客户端模块
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
from threading import Thread
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||||
from unilabos.utils.log import info
|
from unilabos.utils.log import info
|
||||||
from unilabos.config.config import HTTPConfig, BasicConfig
|
from unilabos.config.config import HTTPConfig, BasicConfig
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
@@ -74,8 +76,7 @@ class HTTPClient:
|
|||||||
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
||||||
"""
|
"""
|
||||||
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
||||||
payload = {"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}
|
f.write(json.dumps({"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}, indent=4))
|
||||||
f.write(json.dumps(payload, indent=4))
|
|
||||||
# 从序列化数据中提取所有节点的UUID(保存旧UUID)
|
# 从序列化数据中提取所有节点的UUID(保存旧UUID)
|
||||||
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
|
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
|
||||||
if not self.initialized or first_add:
|
if not self.initialized or first_add:
|
||||||
@@ -334,67 +335,6 @@ class HTTPClient:
|
|||||||
logger.error(f"响应内容: {response.text}")
|
logger.error(f"响应内容: {response.text}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def workflow_import(
|
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
workflow_uuid: str,
|
|
||||||
workflow_name: str,
|
|
||||||
nodes: List[Dict[str, Any]],
|
|
||||||
edges: List[Dict[str, Any]],
|
|
||||||
tags: Optional[List[str]] = None,
|
|
||||||
published: bool = False,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
导入工作流到服务器
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: 工作流名称(顶层)
|
|
||||||
workflow_uuid: 工作流UUID
|
|
||||||
workflow_name: 工作流名称(data内部)
|
|
||||||
nodes: 工作流节点列表
|
|
||||||
edges: 工作流边列表
|
|
||||||
tags: 工作流标签列表,默认为空列表
|
|
||||||
published: 是否发布工作流,默认为False
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: API响应数据,包含 code 和 data (uuid, name)
|
|
||||||
"""
|
|
||||||
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
|
|
||||||
payload = {
|
|
||||||
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
|
|
||||||
"name": name,
|
|
||||||
"data": {
|
|
||||||
"workflow_uuid": workflow_uuid,
|
|
||||||
"workflow_name": workflow_name,
|
|
||||||
"nodes": nodes,
|
|
||||||
"edges": edges,
|
|
||||||
"tags": tags if tags is not None else [],
|
|
||||||
"published": published,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
# 保存请求到文件
|
|
||||||
with open(os.path.join(BasicConfig.working_dir, "req_workflow_upload.json"), "w", encoding="utf-8") as f:
|
|
||||||
f.write(json.dumps(payload, indent=4, ensure_ascii=False))
|
|
||||||
|
|
||||||
response = requests.post(
|
|
||||||
f"{self.remote_addr}/lab/workflow/owner/import",
|
|
||||||
json=payload,
|
|
||||||
headers={"Authorization": f"Lab {self.auth}"},
|
|
||||||
timeout=60,
|
|
||||||
)
|
|
||||||
# 保存响应到文件
|
|
||||||
with open(os.path.join(BasicConfig.working_dir, "res_workflow_upload.json"), "w", encoding="utf-8") as f:
|
|
||||||
f.write(f"{response.status_code}" + "\n" + response.text)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
res = response.json()
|
|
||||||
if "code" in res and res["code"] != 0:
|
|
||||||
logger.error(f"导入工作流失败: {response.text}")
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
|
|
||||||
return {"code": response.status_code, "message": response.text}
|
|
||||||
|
|
||||||
|
|
||||||
# 创建默认客户端实例
|
# 创建默认客户端实例
|
||||||
http_client = HTTPClient()
|
http_client = HTTPClient()
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ Web服务器模块
|
|||||||
|
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
from fastapi import FastAPI, Request
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from starlette.responses import Response
|
from starlette.responses import Response
|
||||||
@@ -95,7 +96,7 @@ def setup_server() -> FastAPI:
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> bool:
|
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> None:
|
||||||
"""
|
"""
|
||||||
启动服务器
|
启动服务器
|
||||||
|
|
||||||
@@ -103,14 +104,7 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
host: 服务器主机
|
host: 服务器主机
|
||||||
port: 服务器端口
|
port: 服务器端口
|
||||||
open_browser: 是否自动打开浏览器
|
open_browser: 是否自动打开浏览器
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if restart was requested, False otherwise
|
|
||||||
"""
|
"""
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from uvicorn import Config, Server
|
|
||||||
|
|
||||||
# 设置服务器
|
# 设置服务器
|
||||||
setup_server()
|
setup_server()
|
||||||
|
|
||||||
@@ -129,37 +123,7 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
|||||||
|
|
||||||
# 启动服务器
|
# 启动服务器
|
||||||
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
||||||
|
uvicorn.run(app, host=host, port=port, log_config=log_config)
|
||||||
# 使用支持重启的模式
|
|
||||||
config = Config(app=app, host=host, port=port, log_config=log_config)
|
|
||||||
server = Server(config)
|
|
||||||
|
|
||||||
# 启动服务器线程
|
|
||||||
server_thread = threading.Thread(target=server.run, daemon=True, name="uvicorn_server")
|
|
||||||
server_thread.start()
|
|
||||||
|
|
||||||
info("[Web] Server started, monitoring for restart requests...")
|
|
||||||
|
|
||||||
# 监控重启标志
|
|
||||||
import unilabos.app.main as main_module
|
|
||||||
|
|
||||||
while server_thread.is_alive():
|
|
||||||
if hasattr(main_module, "_restart_requested") and main_module._restart_requested:
|
|
||||||
info(
|
|
||||||
f"[Web] Restart requested via WebSocket, reason: {getattr(main_module, '_restart_reason', 'unknown')}"
|
|
||||||
)
|
|
||||||
main_module._restart_requested = False
|
|
||||||
|
|
||||||
# 停止服务器
|
|
||||||
server.should_exit = True
|
|
||||||
server_thread.join(timeout=5)
|
|
||||||
|
|
||||||
info("[Web] Server stopped, ready for restart")
|
|
||||||
return True
|
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# 当脚本直接运行时启动服务器
|
# 当脚本直接运行时启动服务器
|
||||||
|
|||||||
@@ -359,7 +359,7 @@ class MessageProcessor:
|
|||||||
self.device_manager = device_manager
|
self.device_manager = device_manager
|
||||||
self.queue_processor = None # 延迟设置
|
self.queue_processor = None # 延迟设置
|
||||||
self.websocket_client = None # 延迟设置
|
self.websocket_client = None # 延迟设置
|
||||||
self.session_id = str(uuid.uuid4())[:6] # 产生一个随机的session_id
|
self.session_id = ""
|
||||||
|
|
||||||
# WebSocket连接
|
# WebSocket连接
|
||||||
self.websocket = None
|
self.websocket = None
|
||||||
@@ -438,7 +438,7 @@ class MessageProcessor:
|
|||||||
self.connected = True
|
self.connected = True
|
||||||
self.reconnect_count = 0
|
self.reconnect_count = 0
|
||||||
|
|
||||||
logger.trace(f"[MessageProcessor] Connected to {self.websocket_url}")
|
logger.info(f"[MessageProcessor] Connected to {self.websocket_url}")
|
||||||
|
|
||||||
# 启动发送协程
|
# 启动发送协程
|
||||||
send_task = asyncio.create_task(self._send_handler())
|
send_task = asyncio.create_task(self._send_handler())
|
||||||
@@ -488,16 +488,7 @@ class MessageProcessor:
|
|||||||
async for message in self.websocket:
|
async for message in self.websocket:
|
||||||
try:
|
try:
|
||||||
data = json.loads(message)
|
data = json.loads(message)
|
||||||
message_type = data.get("action", "")
|
await self._process_message(data)
|
||||||
message_data = data.get("data")
|
|
||||||
if self.session_id and self.session_id == data.get("edge_session"):
|
|
||||||
await self._process_message(message_type, message_data)
|
|
||||||
else:
|
|
||||||
if message_type.endswith("_material"):
|
|
||||||
logger.trace(f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息:{data}")
|
|
||||||
logger.debug(f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}")
|
|
||||||
else:
|
|
||||||
await self._process_message(message_type, message_data)
|
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -512,7 +503,7 @@ class MessageProcessor:
|
|||||||
|
|
||||||
async def _send_handler(self):
|
async def _send_handler(self):
|
||||||
"""处理发送队列中的消息"""
|
"""处理发送队列中的消息"""
|
||||||
logger.trace("[MessageProcessor] Send handler started")
|
logger.debug("[MessageProcessor] Send handler started")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while self.connected and self.websocket:
|
while self.connected and self.websocket:
|
||||||
@@ -563,8 +554,11 @@ class MessageProcessor:
|
|||||||
finally:
|
finally:
|
||||||
logger.debug("[MessageProcessor] Send handler stopped")
|
logger.debug("[MessageProcessor] Send handler stopped")
|
||||||
|
|
||||||
async def _process_message(self, message_type: str, message_data: Dict[str, Any]):
|
async def _process_message(self, data: Dict[str, Any]):
|
||||||
"""处理收到的消息"""
|
"""处理收到的消息"""
|
||||||
|
message_type = data.get("action", "")
|
||||||
|
message_data = data.get("data")
|
||||||
|
|
||||||
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -577,19 +571,16 @@ class MessageProcessor:
|
|||||||
elif message_type == "cancel_action" or message_type == "cancel_task":
|
elif message_type == "cancel_action" or message_type == "cancel_task":
|
||||||
await self._handle_cancel_action(message_data)
|
await self._handle_cancel_action(message_data)
|
||||||
elif message_type == "add_material":
|
elif message_type == "add_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "add")
|
await self._handle_resource_tree_update(message_data, "add")
|
||||||
elif message_type == "update_material":
|
elif message_type == "update_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "update")
|
await self._handle_resource_tree_update(message_data, "update")
|
||||||
elif message_type == "remove_material":
|
elif message_type == "remove_material":
|
||||||
# noinspection PyTypeChecker
|
|
||||||
await self._handle_resource_tree_update(message_data, "remove")
|
await self._handle_resource_tree_update(message_data, "remove")
|
||||||
# elif message_type == "session_id":
|
elif message_type == "session_id":
|
||||||
# self.session_id = message_data.get("session_id")
|
self.session_id = message_data.get("session_id")
|
||||||
# logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
||||||
elif message_type == "request_restart":
|
elif message_type == "request_reload":
|
||||||
await self._handle_request_restart(message_data)
|
await self._handle_request_reload(message_data)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
||||||
|
|
||||||
@@ -848,7 +839,7 @@ class MessageProcessor:
|
|||||||
device_action_groups[key_add].append(item["uuid"])
|
device_action_groups[key_add].append(item["uuid"])
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"[资源同步] 跨站Transfer: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
f"[MessageProcessor] Resource migrated: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# 正常update
|
# 正常update
|
||||||
@@ -863,11 +854,11 @@ class MessageProcessor:
|
|||||||
device_action_groups[key] = []
|
device_action_groups[key] = []
|
||||||
device_action_groups[key].append(item["uuid"])
|
device_action_groups[key].append(item["uuid"])
|
||||||
|
|
||||||
logger.trace(f"[资源同步] 动作 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
||||||
|
|
||||||
# 为每个(device_id, action)创建独立的更新线程
|
# 为每个(device_id, action)创建独立的更新线程
|
||||||
for (device_id, actual_action), items in device_action_groups.items():
|
for (device_id, actual_action), items in device_action_groups.items():
|
||||||
logger.trace(f"[资源同步] {device_id} 物料动作 {actual_action} 数量: {len(items)}")
|
logger.info(f"设备 {device_id} 物料更新 {actual_action} 数量: {len(items)}")
|
||||||
|
|
||||||
def _notify_resource_tree(dev_id, act, item_list):
|
def _notify_resource_tree(dev_id, act, item_list):
|
||||||
try:
|
try:
|
||||||
@@ -899,48 +890,19 @@ class MessageProcessor:
|
|||||||
)
|
)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
async def _handle_request_restart(self, data: Dict[str, Any]):
|
async def _handle_request_reload(self, data: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
处理重启请求
|
处理重载请求
|
||||||
|
|
||||||
当LabGo发送request_restart时,执行清理并触发重启
|
当LabGo发送request_reload时,重新发送设备注册信息
|
||||||
"""
|
"""
|
||||||
reason = data.get("reason", "unknown")
|
reason = data.get("reason", "unknown")
|
||||||
delay = data.get("delay", 2) # 默认延迟2秒
|
logger.info(f"[MessageProcessor] Received reload request, reason: {reason}")
|
||||||
logger.info(f"[MessageProcessor] Received restart request, reason: {reason}, delay: {delay}s")
|
|
||||||
|
|
||||||
# 发送确认消息
|
# 重新发送host_node_ready信息
|
||||||
if self.websocket_client:
|
if self.websocket_client:
|
||||||
await self.websocket_client.send_message({
|
self.websocket_client.publish_host_ready()
|
||||||
"action": "restart_acknowledged",
|
logger.info("[MessageProcessor] Re-sent host_node_ready after reload request")
|
||||||
"data": {"reason": reason, "delay": delay}
|
|
||||||
})
|
|
||||||
|
|
||||||
# 设置全局重启标志
|
|
||||||
import unilabos.app.main as main_module
|
|
||||||
main_module._restart_requested = True
|
|
||||||
main_module._restart_reason = reason
|
|
||||||
|
|
||||||
# 延迟后执行清理
|
|
||||||
await asyncio.sleep(delay)
|
|
||||||
|
|
||||||
# 在新线程中执行清理,避免阻塞当前事件循环
|
|
||||||
def do_cleanup():
|
|
||||||
import time
|
|
||||||
time.sleep(0.5) # 给当前消息处理完成的时间
|
|
||||||
logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}")
|
|
||||||
try:
|
|
||||||
from unilabos.app.utils import cleanup_for_restart
|
|
||||||
if cleanup_for_restart():
|
|
||||||
logger.info("[MessageProcessor] Cleanup successful, main() will restart")
|
|
||||||
else:
|
|
||||||
logger.error("[MessageProcessor] Cleanup failed")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"[MessageProcessor] Error during cleanup: {e}")
|
|
||||||
|
|
||||||
cleanup_thread = threading.Thread(target=do_cleanup, name="RestartCleanupThread", daemon=True)
|
|
||||||
cleanup_thread.start()
|
|
||||||
logger.info(f"[MessageProcessor] Restart cleanup scheduled")
|
|
||||||
|
|
||||||
async def _send_action_state_response(
|
async def _send_action_state_response(
|
||||||
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
||||||
@@ -1019,7 +981,7 @@ class QueueProcessor:
|
|||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""运行队列处理主循环"""
|
"""运行队列处理主循环"""
|
||||||
logger.trace("[QueueProcessor] Queue processor started")
|
logger.debug("[QueueProcessor] Queue processor started")
|
||||||
|
|
||||||
while self.is_running:
|
while self.is_running:
|
||||||
try:
|
try:
|
||||||
@@ -1229,6 +1191,7 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
else:
|
else:
|
||||||
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
||||||
|
|
||||||
|
logger.debug(f"[WebSocketClient] URL: {url}")
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
@@ -1241,11 +1204,13 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
logger.error("[WebSocketClient] WebSocket URL not configured")
|
logger.error("[WebSocketClient] WebSocket URL not configured")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.info(f"[WebSocketClient] Starting connection to {self.websocket_url}")
|
||||||
|
|
||||||
# 启动两个核心线程
|
# 启动两个核心线程
|
||||||
self.message_processor.start()
|
self.message_processor.start()
|
||||||
self.queue_processor.start()
|
self.queue_processor.start()
|
||||||
|
|
||||||
logger.trace("[WebSocketClient] All threads started")
|
logger.info("[WebSocketClient] All threads started")
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
"""停止WebSocket客户端"""
|
"""停止WebSocket客户端"""
|
||||||
|
|||||||
@@ -16,14 +16,12 @@ class BasicConfig:
|
|||||||
upload_registry = False
|
upload_registry = False
|
||||||
machine_name = "undefined"
|
machine_name = "undefined"
|
||||||
vis_2d_enable = False
|
vis_2d_enable = False
|
||||||
no_update_feedback = False
|
|
||||||
enable_resource_load = True
|
enable_resource_load = True
|
||||||
communication_protocol = "websocket"
|
communication_protocol = "websocket"
|
||||||
startup_json_path = None # 填写绝对路径
|
startup_json_path = None # 填写绝对路径
|
||||||
disable_browser = False # 禁止浏览器自动打开
|
disable_browser = False # 禁止浏览器自动打开
|
||||||
port = 8002 # 本地HTTP服务
|
port = 8002 # 本地HTTP服务
|
||||||
# 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
log_level: Literal['TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] = "DEBUG" # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
||||||
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def auth_secret(cls):
|
def auth_secret(cls):
|
||||||
@@ -67,14 +65,13 @@ def _update_config_from_module(module):
|
|||||||
if not attr.startswith("_"):
|
if not attr.startswith("_"):
|
||||||
setattr(obj, attr, getattr(getattr(module, name), attr))
|
setattr(obj, attr, getattr(getattr(module, name), attr))
|
||||||
|
|
||||||
|
|
||||||
def _update_config_from_env():
|
def _update_config_from_env():
|
||||||
prefix = "UNILABOS_"
|
prefix = "UNILABOS_"
|
||||||
for env_key, env_value in os.environ.items():
|
for env_key, env_value in os.environ.items():
|
||||||
if not env_key.startswith(prefix):
|
if not env_key.startswith(prefix):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
key_path = env_key[len(prefix) :] # Remove UNILAB_ prefix
|
key_path = env_key[len(prefix):] # Remove UNILAB_ prefix
|
||||||
class_field = key_path.upper().split("_", 1)
|
class_field = key_path.upper().split("_", 1)
|
||||||
if len(class_field) != 2:
|
if len(class_field) != 2:
|
||||||
logger.warning(f"[ENV] 环境变量格式不正确:{env_key}")
|
logger.warning(f"[ENV] 环境变量格式不正确:{env_key}")
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Coin Cell Assembly Workstation
|
|||||||
"""
|
"""
|
||||||
from typing import Dict, Any, List, Optional, Union
|
from typing import Dict, Any, List, Optional, Union
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
||||||
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
from unilabos.device_comms.workstation_base import WorkstationBase, WorkflowInfo
|
||||||
from unilabos.device_comms.workstation_communication import (
|
from unilabos.device_comms.workstation_communication import (
|
||||||
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
WorkstationCommunicationBase, CommunicationConfig, CommunicationProtocol, CoinCellCommunication
|
||||||
@@ -61,7 +61,7 @@ class CoinCellAssemblyWorkstation(WorkstationBase):
|
|||||||
|
|
||||||
# 创建资源跟踪器(如果没有提供)
|
# 创建资源跟踪器(如果没有提供)
|
||||||
if resource_tracker is None:
|
if resource_tracker is None:
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker
|
||||||
resource_tracker = DeviceNodeResourceTracker()
|
resource_tracker = DeviceNodeResourceTracker()
|
||||||
|
|
||||||
# 初始化基类
|
# 初始化基类
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from pylabrobot.resources import (
|
|||||||
import copy
|
import copy
|
||||||
from unilabos_msgs.msg import Resource
|
from unilabos_msgs.msg import Resource
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class LiquidHandlerBiomek:
|
class LiquidHandlerBiomek:
|
||||||
|
|||||||
@@ -207,14 +207,7 @@ class LiquidHandlerMiddleware(LiquidHandler):
|
|||||||
|
|
||||||
res_samples = []
|
res_samples = []
|
||||||
res_volumes = []
|
res_volumes = []
|
||||||
# 处理 use_channels 为 None 的情况(通常用于单通道操作)
|
for resource, volume, channel in zip(resources, vols, use_channels):
|
||||||
if use_channels is None:
|
|
||||||
# 对于单通道操作,推断通道为 [0]
|
|
||||||
channels_to_use = [0] * len(resources)
|
|
||||||
else:
|
|
||||||
channels_to_use = use_channels
|
|
||||||
|
|
||||||
for resource, volume, channel in zip(resources, vols, channels_to_use):
|
|
||||||
res_samples.append({"name": resource.name, "sample_uuid": resource.unilabos_extra.get("sample_uuid", None)})
|
res_samples.append({"name": resource.name, "sample_uuid": resource.unilabos_extra.get("sample_uuid", None)})
|
||||||
res_volumes.append(volume)
|
res_volumes.append(volume)
|
||||||
self.pending_liquids_dict[channel] = {
|
self.pending_liquids_dict[channel] = {
|
||||||
@@ -927,7 +920,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
@@ -991,7 +983,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
@@ -1174,7 +1165,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.aspirate(
|
await self.aspirate(
|
||||||
@@ -1209,7 +1199,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
@@ -1246,7 +1235,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.aspirate(
|
await self.aspirate(
|
||||||
@@ -1283,7 +1271,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
@@ -1340,7 +1327,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[idx:idx + 1] if offsets and len(offsets) > idx else None,
|
offsets=offsets[idx:idx + 1] if offsets and len(offsets) > idx else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 从源容器吸液(总体积)
|
# 从源容器吸液(总体积)
|
||||||
@@ -1380,7 +1366,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[idx:idx+1] if offsets else None,
|
offsets=offsets[idx:idx+1] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
if touch_tip:
|
if touch_tip:
|
||||||
await self.touch_tip([target])
|
await self.touch_tip([target])
|
||||||
@@ -1416,7 +1401,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[i:i + 8] if offsets else None,
|
offsets=offsets[i:i + 8] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 从源容器吸液(8个通道都从同一个源,但每个通道的吸液体积不同)
|
# 从源容器吸液(8个通道都从同一个源,但每个通道的吸液体积不同)
|
||||||
@@ -1462,7 +1446,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets if offsets else None,
|
offsets=offsets if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if touch_tip:
|
if touch_tip:
|
||||||
@@ -1514,19 +1497,10 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
f"(matching `asp_vols`). Got length {len(dis_vols)}."
|
f"(matching `asp_vols`). Got length {len(dis_vols)}."
|
||||||
)
|
)
|
||||||
|
|
||||||
need_mix_after = mix_stage in ["after", "both"] and mix_times is not None and mix_times > 0
|
|
||||||
defer_final_discard = need_mix_after or touch_tip
|
|
||||||
|
|
||||||
if len(use_channels) == 1:
|
if len(use_channels) == 1:
|
||||||
# 单通道模式:多次吸液,一次分液
|
# 单通道模式:多次吸液,一次分液
|
||||||
|
# 先混合前(如果需要)
|
||||||
# 如果需要 before mix,先 pick up tip 并执行 mix
|
|
||||||
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
|
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
|
||||||
tip = []
|
|
||||||
for _ in range(len(use_channels)):
|
|
||||||
tip.extend(next(self.current_tip))
|
|
||||||
await self.pick_up_tips(tip)
|
|
||||||
|
|
||||||
await self.mix(
|
await self.mix(
|
||||||
targets=[target],
|
targets=[target],
|
||||||
mix_time=mix_times,
|
mix_time=mix_times,
|
||||||
@@ -1534,11 +1508,8 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[0:1] if offsets else None,
|
offsets=offsets[0:1] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.discard_tips(use_channels=use_channels)
|
|
||||||
|
|
||||||
# 从每个源容器吸液并分液到目标容器
|
# 从每个源容器吸液并分液到目标容器
|
||||||
for idx, source in enumerate(sources):
|
for idx, source in enumerate(sources):
|
||||||
tip = []
|
tip = []
|
||||||
@@ -1556,10 +1527,10 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
blow_out_air_volume=[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None,
|
blow_out_air_volume=[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None,
|
||||||
spread=spread,
|
spread=spread,
|
||||||
)
|
)
|
||||||
|
|
||||||
if delays is not None:
|
if delays is not None:
|
||||||
await self.custom_delay(seconds=delays[0])
|
await self.custom_delay(seconds=delays[0])
|
||||||
|
|
||||||
# 分液到目标容器
|
# 分液到目标容器
|
||||||
if use_proportional_mixing:
|
if use_proportional_mixing:
|
||||||
# 按不同比例混合:使用对应的 dis_vols
|
# 按不同比例混合:使用对应的 dis_vols
|
||||||
@@ -1575,7 +1546,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
dis_offset = offsets[0] if offsets and len(offsets) > 0 else None
|
dis_offset = offsets[0] if offsets and len(offsets) > 0 else None
|
||||||
dis_liquid_height = liquid_height[0] if liquid_height and len(liquid_height) > 0 else None
|
dis_liquid_height = liquid_height[0] if liquid_height and len(liquid_height) > 0 else None
|
||||||
dis_blow_out = blow_out_air_volume[0] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None
|
dis_blow_out = blow_out_air_volume[0] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None
|
||||||
|
|
||||||
await self.dispense(
|
await self.dispense(
|
||||||
resources=[target],
|
resources=[target],
|
||||||
vols=[dis_vol],
|
vols=[dis_vol],
|
||||||
@@ -1586,15 +1557,14 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
liquid_height=[dis_liquid_height] if dis_liquid_height is not None else None,
|
liquid_height=[dis_liquid_height] if dis_liquid_height is not None else None,
|
||||||
spread=spread,
|
spread=spread,
|
||||||
)
|
)
|
||||||
|
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
|
|
||||||
if not (defer_final_discard and idx == len(sources) - 1):
|
await self.discard_tips(use_channels=use_channels)
|
||||||
await self.discard_tips(use_channels=use_channels)
|
|
||||||
|
|
||||||
# 最后在目标容器中混合(如果需要)
|
# 最后在目标容器中混合(如果需要)
|
||||||
if need_mix_after:
|
if mix_stage in ["after", "both"] and mix_times is not None and mix_times > 0:
|
||||||
await self.mix(
|
await self.mix(
|
||||||
targets=[target],
|
targets=[target],
|
||||||
mix_time=mix_times,
|
mix_time=mix_times,
|
||||||
@@ -1602,27 +1572,18 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[0:1] if offsets else None,
|
offsets=offsets[0:1] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if touch_tip:
|
if touch_tip:
|
||||||
await self.touch_tip([target])
|
await self.touch_tip([target])
|
||||||
|
|
||||||
if defer_final_discard:
|
|
||||||
await self.discard_tips(use_channels=use_channels)
|
|
||||||
|
|
||||||
elif len(use_channels) == 8:
|
elif len(use_channels) == 8:
|
||||||
# 8通道模式:需要确保源数量是8的倍数
|
# 8通道模式:需要确保源数量是8的倍数
|
||||||
if len(sources) % 8 != 0:
|
if len(sources) % 8 != 0:
|
||||||
raise ValueError(f"For 8-channel mode, number of sources {len(sources)} must be a multiple of 8.")
|
raise ValueError(f"For 8-channel mode, number of sources {len(sources)} must be a multiple of 8.")
|
||||||
|
|
||||||
# 如果需要 before mix,先 pick up tips 并执行 mix
|
# 每次处理8个源
|
||||||
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
|
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
|
||||||
tip = []
|
|
||||||
for _ in range(len(use_channels)):
|
|
||||||
tip.extend(next(self.current_tip))
|
|
||||||
await self.pick_up_tips(tip)
|
|
||||||
|
|
||||||
await self.mix(
|
await self.mix(
|
||||||
targets=[target],
|
targets=[target],
|
||||||
mix_time=mix_times,
|
mix_time=mix_times,
|
||||||
@@ -1630,11 +1591,8 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[0:1] if offsets else None,
|
offsets=offsets[0:1] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.discard_tips([0,1,2,3,4,5,6,7])
|
|
||||||
|
|
||||||
for i in range(0, len(sources), 8):
|
for i in range(0, len(sources), 8):
|
||||||
tip = []
|
tip = []
|
||||||
for _ in range(len(use_channels)):
|
for _ in range(len(use_channels)):
|
||||||
@@ -1692,12 +1650,11 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
|
|
||||||
if delays is not None and len(delays) > 1:
|
if delays is not None and len(delays) > 1:
|
||||||
await self.custom_delay(seconds=delays[1])
|
await self.custom_delay(seconds=delays[1])
|
||||||
|
|
||||||
if not (defer_final_discard and i + 8 >= len(sources)):
|
await self.discard_tips([0,1,2,3,4,5,6,7])
|
||||||
await self.discard_tips([0,1,2,3,4,5,6,7])
|
|
||||||
|
|
||||||
# 最后在目标容器中混合(如果需要)
|
# 最后在目标容器中混合(如果需要)
|
||||||
if need_mix_after:
|
if mix_stage in ["after", "both"] and mix_times is not None and mix_times > 0:
|
||||||
await self.mix(
|
await self.mix(
|
||||||
targets=[target],
|
targets=[target],
|
||||||
mix_time=mix_times,
|
mix_time=mix_times,
|
||||||
@@ -1705,15 +1662,11 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
offsets=offsets[0:1] if offsets else None,
|
offsets=offsets[0:1] if offsets else None,
|
||||||
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
|
||||||
mix_rate=mix_rate if mix_rate else None,
|
mix_rate=mix_rate if mix_rate else None,
|
||||||
use_channels=use_channels,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if touch_tip:
|
if touch_tip:
|
||||||
await self.touch_tip([target])
|
await self.touch_tip([target])
|
||||||
|
|
||||||
if defer_final_discard:
|
|
||||||
await self.discard_tips([0,1,2,3,4,5,6,7])
|
|
||||||
|
|
||||||
# except Exception as e:
|
# except Exception as e:
|
||||||
# traceback.print_exc()
|
# traceback.print_exc()
|
||||||
# raise RuntimeError(f"Liquid addition failed: {e}") from e
|
# raise RuntimeError(f"Liquid addition failed: {e}") from e
|
||||||
@@ -1733,12 +1686,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
print(f"Waiting time: {msg}")
|
print(f"Waiting time: {msg}")
|
||||||
print(f"Current time: {time.strftime('%H:%M:%S')}")
|
print(f"Current time: {time.strftime('%H:%M:%S')}")
|
||||||
print(f"Time to finish: {time.strftime('%H:%M:%S', time.localtime(time.time() + seconds))}")
|
print(f"Time to finish: {time.strftime('%H:%M:%S', time.localtime(time.time() + seconds))}")
|
||||||
# Use ROS node sleep if available, otherwise use asyncio.sleep
|
await self._ros_node.sleep(seconds)
|
||||||
if hasattr(self, '_ros_node') and self._ros_node is not None:
|
|
||||||
await self._ros_node.sleep(seconds)
|
|
||||||
else:
|
|
||||||
import asyncio
|
|
||||||
await asyncio.sleep(seconds)
|
|
||||||
if msg:
|
if msg:
|
||||||
print(f"Done: {msg}")
|
print(f"Done: {msg}")
|
||||||
print(f"Current time: {time.strftime('%H:%M:%S')}")
|
print(f"Current time: {time.strftime('%H:%M:%S')}")
|
||||||
@@ -1777,59 +1725,27 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
height_to_bottom: Optional[float] = None,
|
height_to_bottom: Optional[float] = None,
|
||||||
offsets: Optional[Coordinate] = None,
|
offsets: Optional[Coordinate] = None,
|
||||||
mix_rate: Optional[float] = None,
|
mix_rate: Optional[float] = None,
|
||||||
use_channels: Optional[List[int]] = None,
|
|
||||||
none_keys: List[str] = [],
|
none_keys: List[str] = [],
|
||||||
):
|
):
|
||||||
if mix_time is None or mix_time <= 0: # No mixing required
|
if mix_time is None: # No mixing required
|
||||||
return
|
return
|
||||||
"""Mix the liquid in the target wells."""
|
"""Mix the liquid in the target wells."""
|
||||||
if mix_vol is None:
|
|
||||||
raise ValueError("`mix_vol` must be provided when `mix_time` is set.")
|
|
||||||
|
|
||||||
targets_list: List[Container] = list(targets)
|
|
||||||
if len(targets_list) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
def _expand(value, count: int):
|
|
||||||
if value is None:
|
|
||||||
return [None] * count
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
if len(value) != count:
|
|
||||||
raise ValueError("Length of per-target parameters must match targets.")
|
|
||||||
return list(value)
|
|
||||||
return [value] * count
|
|
||||||
|
|
||||||
offsets_list = _expand(offsets, len(targets_list))
|
|
||||||
heights_list = _expand(height_to_bottom, len(targets_list))
|
|
||||||
rates_list = _expand(mix_rate, len(targets_list))
|
|
||||||
|
|
||||||
for _ in range(mix_time):
|
for _ in range(mix_time):
|
||||||
for idx, target in enumerate(targets_list):
|
await self.aspirate(
|
||||||
offset_arg = (
|
resources=[targets],
|
||||||
[offsets_list[idx]] if offsets_list[idx] is not None else None
|
vols=[mix_vol],
|
||||||
)
|
flow_rates=[mix_rate] if mix_rate else None,
|
||||||
height_arg = (
|
offsets=[offsets] if offsets else None,
|
||||||
[heights_list[idx]] if heights_list[idx] is not None else None
|
liquid_height=[height_to_bottom] if height_to_bottom else None,
|
||||||
)
|
)
|
||||||
rate_arg = [rates_list[idx]] if rates_list[idx] is not None else None
|
await self.custom_delay(seconds=1)
|
||||||
|
await self.dispense(
|
||||||
await self.aspirate(
|
resources=[targets],
|
||||||
resources=[target],
|
vols=[mix_vol],
|
||||||
vols=[mix_vol],
|
flow_rates=[mix_rate] if mix_rate else None,
|
||||||
use_channels=use_channels,
|
offsets=[offsets] if offsets else None,
|
||||||
flow_rates=rate_arg,
|
liquid_height=[height_to_bottom] if height_to_bottom else None,
|
||||||
offsets=offset_arg,
|
)
|
||||||
liquid_height=height_arg,
|
|
||||||
)
|
|
||||||
await self.custom_delay(seconds=1)
|
|
||||||
await self.dispense(
|
|
||||||
resources=[target],
|
|
||||||
vols=[mix_vol],
|
|
||||||
use_channels=use_channels,
|
|
||||||
flow_rates=rate_arg,
|
|
||||||
offsets=offset_arg,
|
|
||||||
liquid_height=height_arg,
|
|
||||||
)
|
|
||||||
|
|
||||||
def iter_tips(self, tip_racks: Sequence[TipRack]) -> Iterator[Resource]:
|
def iter_tips(self, tip_racks: Sequence[TipRack]) -> Iterator[Resource]:
|
||||||
"""Yield tips from a list of TipRacks one-by-one until depleted."""
|
"""Yield tips from a list of TipRacks one-by-one until depleted."""
|
||||||
|
|||||||
@@ -30,11 +30,11 @@ from pylabrobot.liquid_handling.standard import (
|
|||||||
ResourceMove,
|
ResourceMove,
|
||||||
ResourceDrop,
|
ResourceDrop,
|
||||||
)
|
)
|
||||||
from pylabrobot.resources import ResourceHolder, ResourceStack, Tip, Deck, Plate, Well, TipRack, Resource, Container, Coordinate, TipSpot, Trash, PlateAdapter, TubeRack, create_homogeneous_resources, create_ordered_items_2d
|
from pylabrobot.resources import ResourceHolder, ResourceStack, Tip, Deck, Plate, Well, TipRack, Resource, Container, Coordinate, TipSpot, Trash, PlateAdapter, TubeRack
|
||||||
|
|
||||||
from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract, SimpleReturn
|
from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract, SimpleReturn
|
||||||
from unilabos.resources.itemized_carrier import ItemizedCarrier
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode
|
|
||||||
|
|
||||||
class PRCXIError(RuntimeError):
|
class PRCXIError(RuntimeError):
|
||||||
"""Lilith 返回 Success=false 时抛出的业务异常"""
|
"""Lilith 返回 Success=false 时抛出的业务异常"""
|
||||||
@@ -71,10 +71,7 @@ class PRCXI9300Deck(Deck):
|
|||||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float, **kwargs):
|
def __init__(self, name: str, size_x: float, size_y: float, size_z: float, **kwargs):
|
||||||
super().__init__(name, size_x, size_y, size_z)
|
super().__init__(name, size_x, size_y, size_z)
|
||||||
self.slots = [None] * 16 # PRCXI 9300/9320 最大有 16 个槽位
|
self.slots = [None] * 16 # PRCXI 9300/9320 最大有 16 个槽位
|
||||||
self.slot_locations = []
|
self.slot_locations = [Coordinate(0, 0, 0)] * 16
|
||||||
|
|
||||||
for i in range(0, 16):
|
|
||||||
self.slot_locations.append(Coordinate((i%4)*137.5+5, (3-int(i/4))*96+13, 0))
|
|
||||||
|
|
||||||
def assign_child_at_slot(self, resource: Resource, slot: int, reassign: bool = False) -> None:
|
def assign_child_at_slot(self, resource: Resource, slot: int, reassign: bool = False) -> None:
|
||||||
if self.slots[slot - 1] is not None and not reassign:
|
if self.slots[slot - 1] is not None and not reassign:
|
||||||
@@ -139,31 +136,13 @@ class PRCXI9300Plate(Plate):
|
|||||||
# 使用 ordering 参数,只包含位置信息(键)
|
# 使用 ordering 参数,只包含位置信息(键)
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
||||||
else:
|
else:
|
||||||
# ordering 的值是对象(可能是 Well 对象),检查是否有有效的 location
|
# ordering 的值已经是对象,可以直接使用
|
||||||
# 如果是反序列化过程,Well 对象可能没有正确的 location,需要让 Plate 重新创建
|
items = ordering
|
||||||
sample_value = next(iter(ordering.values()), None)
|
ordering_param = None
|
||||||
if sample_value is not None and hasattr(sample_value, 'location'):
|
|
||||||
# 如果是 Well 对象但 location 为 None,说明是反序列化过程
|
|
||||||
# 让 Plate 自己创建 Well 对象
|
|
||||||
if sample_value.location is None:
|
|
||||||
items = None
|
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
|
||||||
else:
|
|
||||||
# Well 对象有有效的 location,可以直接使用
|
|
||||||
items = ordering
|
|
||||||
ordering_param = None
|
|
||||||
elif sample_value is None:
|
|
||||||
# ordering 的值都是 None,让 Plate 自己创建 Well 对象
|
|
||||||
items = None
|
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
|
||||||
else:
|
|
||||||
# 其他情况,直接使用
|
|
||||||
items = ordering
|
|
||||||
ordering_param = None
|
|
||||||
else:
|
else:
|
||||||
items = None
|
items = None
|
||||||
ordering_param = collections.OrderedDict() # 提供空的 ordering
|
ordering_param = None
|
||||||
|
|
||||||
# 根据情况传递不同的参数
|
# 根据情况传递不同的参数
|
||||||
if items is not None:
|
if items is not None:
|
||||||
super().__init__(name, size_x, size_y, size_z,
|
super().__init__(name, size_x, size_y, size_z,
|
||||||
@@ -240,16 +219,9 @@ class PRCXI9300TipRack(TipRack):
|
|||||||
# 使用 ordering 参数,只包含位置信息(键)
|
# 使用 ordering 参数,只包含位置信息(键)
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
||||||
else:
|
else:
|
||||||
# ordering 的值已经是对象,需要过滤掉 None 值
|
# ordering 的值已经是对象,可以直接使用
|
||||||
# 只保留有效的对象,用于 ordered_items 参数
|
items = ordering
|
||||||
valid_items = {k: v for k, v in ordering.items() if v is not None}
|
ordering_param = None
|
||||||
if valid_items:
|
|
||||||
items = valid_items
|
|
||||||
ordering_param = None
|
|
||||||
else:
|
|
||||||
# 如果没有有效对象,使用 ordering 参数
|
|
||||||
items = None
|
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
|
||||||
else:
|
else:
|
||||||
items = None
|
items = None
|
||||||
ordering_param = None
|
ordering_param = None
|
||||||
@@ -312,7 +284,7 @@ class PRCXI9300Trash(Trash):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
||||||
category: str = "plate",
|
category: str = "trash",
|
||||||
material_info: Optional[Dict[str, Any]] = None,
|
material_info: Optional[Dict[str, Any]] = None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
|
|
||||||
@@ -364,8 +336,8 @@ class PRCXI9300TubeRack(TubeRack):
|
|||||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
||||||
category: str = "tube_rack",
|
category: str = "tube_rack",
|
||||||
items: Optional[Dict[str, Any]] = None,
|
items: Optional[Dict[str, Any]] = None,
|
||||||
ordered_items: collections.OrderedDict = None,
|
ordered_items: Optional[OrderedDict] = None,
|
||||||
ordering: Optional[collections.OrderedDict] = None,
|
ordering: Optional[OrderedDict] = None,
|
||||||
model: Optional[str] = None,
|
model: Optional[str] = None,
|
||||||
material_info: Optional[Dict[str, Any]] = None,
|
material_info: Optional[Dict[str, Any]] = None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
@@ -376,24 +348,18 @@ class PRCXI9300TubeRack(TubeRack):
|
|||||||
ordering_param = None
|
ordering_param = None
|
||||||
elif ordering is not None:
|
elif ordering is not None:
|
||||||
# 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况)
|
# 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况)
|
||||||
|
# 如果是字符串,说明这是位置名称,需要让 TubeRack 自己创建 Tube 对象
|
||||||
|
# 我们只传递位置信息(键),不传递值,使用 ordering 参数
|
||||||
if ordering and isinstance(next(iter(ordering.values()), None), str):
|
if ordering and isinstance(next(iter(ordering.values()), None), str):
|
||||||
# ordering 的值是字符串,这种情况下我们让 TubeRack 使用默认行为
|
# ordering 的值是字符串,只使用键(位置信息)创建新的 OrderedDict
|
||||||
# 不在初始化时创建 items,而是在 deserialize 后处理
|
# 传递 ordering 参数而不是 ordered_items,让 TubeRack 自己创建 Tube 对象
|
||||||
items_to_pass = None
|
items_to_pass = None
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys()) # 提供空的 ordering 来满足要求
|
# 使用 ordering 参数,只包含位置信息(键)
|
||||||
# 保存 ordering 信息以便后续处理
|
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
||||||
self._temp_ordering = ordering
|
|
||||||
else:
|
else:
|
||||||
# ordering 的值已经是对象,需要过滤掉 None 值
|
# ordering 的值已经是对象,可以直接使用
|
||||||
# 只保留有效的对象,用于 ordered_items 参数
|
items_to_pass = ordering
|
||||||
valid_items = {k: v for k, v in ordering.items() if v is not None}
|
ordering_param = None
|
||||||
if valid_items:
|
|
||||||
items_to_pass = valid_items
|
|
||||||
ordering_param = None
|
|
||||||
else:
|
|
||||||
# 如果没有有效对象,创建空的 ordered_items
|
|
||||||
items_to_pass = {}
|
|
||||||
ordering_param = None
|
|
||||||
elif items is not None:
|
elif items is not None:
|
||||||
# 兼容旧的 items 参数
|
# 兼容旧的 items 参数
|
||||||
items_to_pass = items
|
items_to_pass = items
|
||||||
@@ -404,50 +370,25 @@ class PRCXI9300TubeRack(TubeRack):
|
|||||||
|
|
||||||
# 根据情况传递不同的参数
|
# 根据情况传递不同的参数
|
||||||
if items_to_pass is not None:
|
if items_to_pass is not None:
|
||||||
super().__init__(name, size_x, size_y, size_z,
|
super().__init__(name, size_x, size_y, size_z,
|
||||||
ordered_items=items_to_pass,
|
ordered_items=items_to_pass,
|
||||||
model=model,
|
model=model,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
elif ordering_param is not None:
|
elif ordering_param is not None:
|
||||||
# 直接调用 ItemizedResource 的构造函数来处理 ordering
|
# 传递 ordering 参数,让 TubeRack 自己创建 Tube 对象
|
||||||
from pylabrobot.resources import ItemizedResource
|
super().__init__(name, size_x, size_y, size_z,
|
||||||
ItemizedResource.__init__(self, name, size_x, size_y, size_z,
|
ordering=ordering_param,
|
||||||
ordering=ordering_param,
|
model=model,
|
||||||
category=category,
|
|
||||||
model=model,
|
|
||||||
**kwargs)
|
|
||||||
else:
|
|
||||||
super().__init__(name, size_x, size_y, size_z,
|
|
||||||
model=model,
|
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
else:
|
||||||
|
super().__init__(name, size_x, size_y, size_z,
|
||||||
|
model=model,
|
||||||
|
**kwargs)
|
||||||
|
|
||||||
self._unilabos_state = {}
|
self._unilabos_state = {}
|
||||||
if material_info:
|
if material_info:
|
||||||
self._unilabos_state["Material"] = material_info
|
self._unilabos_state["Material"] = material_info
|
||||||
|
|
||||||
# 如果有临时 ordering 信息,在初始化完成后处理
|
|
||||||
if hasattr(self, '_temp_ordering') and self._temp_ordering:
|
|
||||||
self._process_temp_ordering()
|
|
||||||
|
|
||||||
def _process_temp_ordering(self):
|
|
||||||
"""处理临时的 ordering 信息,创建相应的 Tube 对象"""
|
|
||||||
from pylabrobot.resources import Tube, Coordinate
|
|
||||||
|
|
||||||
for location, item_type in self._temp_ordering.items():
|
|
||||||
if item_type == 'Tube' or item_type == 'tube':
|
|
||||||
# 为每个位置创建 Tube 对象
|
|
||||||
tube = Tube(name=f"{self.name}_{location}", size_x=10, size_y=10, size_z=50, max_volume=2000.0)
|
|
||||||
# 使用 assign_child_resource 添加到 rack 中
|
|
||||||
self.assign_child_resource(tube, location=Coordinate(0, 0, 0))
|
|
||||||
|
|
||||||
# 清理临时数据
|
|
||||||
del self._temp_ordering
|
|
||||||
|
|
||||||
def load_state(self, state: Dict[str, Any]) -> None:
|
|
||||||
"""从给定的状态加载工作台信息。"""
|
|
||||||
# super().load_state(state)
|
|
||||||
self._unilabos_state = state
|
|
||||||
|
|
||||||
def serialize_state(self) -> Dict[str, Dict[str, Any]]:
|
def serialize_state(self) -> Dict[str, Dict[str, Any]]:
|
||||||
try:
|
try:
|
||||||
data = super().serialize_state()
|
data = super().serialize_state()
|
||||||
@@ -474,97 +415,6 @@ class PRCXI9300TubeRack(TubeRack):
|
|||||||
|
|
||||||
data.update(safe_state)
|
data.update(safe_state)
|
||||||
return data
|
return data
|
||||||
class PRCXI9300PlateAdapterSite(ItemizedCarrier):
|
|
||||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
|
||||||
material_info: Optional[Dict[str, Any]] = None, **kwargs):
|
|
||||||
# 处理 sites 参数的不同格式
|
|
||||||
|
|
||||||
sites = create_homogeneous_resources(
|
|
||||||
klass=ResourceHolder,
|
|
||||||
locations=[Coordinate(0, 0, 0)],
|
|
||||||
resource_size_x=size_x,
|
|
||||||
resource_size_y=size_y,
|
|
||||||
resource_size_z=size_z,
|
|
||||||
name_prefix=name,
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
# 确保不传递重复的参数
|
|
||||||
kwargs.pop('layout', None)
|
|
||||||
sites_in = kwargs.pop('sites', None)
|
|
||||||
|
|
||||||
# 创建默认的sites字典
|
|
||||||
sites_dict = {name: sites}
|
|
||||||
# 优先从 sites_in 读取 'content_type',否则使用默认值
|
|
||||||
|
|
||||||
content_type = [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
# 如果提供了sites参数,则用sites_in中的值替换sites_dict中对应的元素
|
|
||||||
if sites_in is not None and isinstance(sites_in, dict):
|
|
||||||
for site_key, site_value in sites_in.items():
|
|
||||||
if site_key in sites_dict:
|
|
||||||
sites_dict[site_key] = site_value
|
|
||||||
|
|
||||||
super().__init__(name, size_x, size_y, size_z,
|
|
||||||
sites=sites_dict,
|
|
||||||
num_items_x=kwargs.pop('num_items_x', 1),
|
|
||||||
num_items_y=kwargs.pop('num_items_y', 1),
|
|
||||||
num_items_z=kwargs.pop('num_items_z', 1),
|
|
||||||
content_type=content_type,
|
|
||||||
**kwargs)
|
|
||||||
self._unilabos_state = {}
|
|
||||||
if material_info:
|
|
||||||
self._unilabos_state["Material"] = material_info
|
|
||||||
|
|
||||||
|
|
||||||
def assign_child_resource(self, resource, location=Coordinate(0, 0, 0), reassign=True, spot=None):
|
|
||||||
"""重写 assign_child_resource 方法,对于适配器位置,不使用索引分配"""
|
|
||||||
# 直接调用 Resource 的 assign_child_resource,避免 ItemizedCarrier 的索引逻辑
|
|
||||||
from pylabrobot.resources.resource import Resource
|
|
||||||
Resource.assign_child_resource(self, resource, location=location, reassign=reassign)
|
|
||||||
|
|
||||||
def unassign_child_resource(self, resource):
|
|
||||||
"""重写 unassign_child_resource 方法,对于适配器位置,不使用 sites 列表"""
|
|
||||||
# 直接调用 Resource 的 unassign_child_resource,避免 ItemizedCarrier 的 sites 逻辑
|
|
||||||
from pylabrobot.resources.resource import Resource
|
|
||||||
Resource.unassign_child_resource(self, resource)
|
|
||||||
|
|
||||||
def serialize_state(self) -> Dict[str, Dict[str, Any]]:
|
|
||||||
try:
|
|
||||||
data = super().serialize_state()
|
|
||||||
except AttributeError:
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
# 包含 sites 配置信息,但避免序列化 ResourceHolder 对象
|
|
||||||
if hasattr(self, 'sites') and self.sites:
|
|
||||||
# 只保存 sites 的基本信息,不保存 ResourceHolder 对象本身
|
|
||||||
sites_info = []
|
|
||||||
for site in self.sites:
|
|
||||||
if hasattr(site, '__class__') and 'pylabrobot' in str(site.__class__.__module__):
|
|
||||||
# 对于 pylabrobot 对象,只保存基本信息
|
|
||||||
sites_info.append({
|
|
||||||
"__pylabrobot_object__": True,
|
|
||||||
"class": site.__class__.__name__,
|
|
||||||
"module": site.__class__.__module__,
|
|
||||||
"name": getattr(site, 'name', str(site))
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
sites_info.append(site)
|
|
||||||
data['sites'] = sites_info
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def load_state(self, state: Dict[str, Any]) -> None:
|
|
||||||
"""加载状态,包括 sites 配置信息"""
|
|
||||||
super().load_state(state)
|
|
||||||
|
|
||||||
# 从状态中恢复 sites 配置信息
|
|
||||||
if 'sites' in state:
|
|
||||||
self.sites = [state['sites']]
|
|
||||||
|
|
||||||
class PRCXI9300PlateAdapter(PlateAdapter):
|
class PRCXI9300PlateAdapter(PlateAdapter):
|
||||||
"""
|
"""
|
||||||
@@ -660,51 +510,16 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
|
|||||||
step_mode=False,
|
step_mode=False,
|
||||||
matrix_id="",
|
matrix_id="",
|
||||||
is_9320=False,
|
is_9320=False,
|
||||||
start_rail=2,
|
|
||||||
rail_nums=4,
|
|
||||||
rail_interval=0,
|
|
||||||
x_increase = -0.003636,
|
|
||||||
y_increase = -0.003636,
|
|
||||||
x_offset = -0.8,
|
|
||||||
y_offset = -37.98,
|
|
||||||
deck_z = 300,
|
|
||||||
deck_y = 400,
|
|
||||||
rail_width=27.5,
|
|
||||||
xy_coupling = -0.0045,
|
|
||||||
):
|
):
|
||||||
|
tablets_info = []
|
||||||
self.deck_x = (start_rail + rail_nums*5 + (rail_nums-1)*rail_interval) * rail_width
|
count = 0
|
||||||
self.deck_y = deck_y
|
|
||||||
self.deck_z = deck_z
|
|
||||||
self.x_increase = x_increase
|
|
||||||
self.y_increase = y_increase
|
|
||||||
self.x_offset = x_offset
|
|
||||||
self.y_offset = y_offset
|
|
||||||
self.xy_coupling = xy_coupling
|
|
||||||
|
|
||||||
tablets_info = {}
|
|
||||||
plate_positions = []
|
|
||||||
for child in deck.children:
|
for child in deck.children:
|
||||||
number = int(child.name.replace("T", ""))
|
|
||||||
|
|
||||||
if child.children:
|
if child.children:
|
||||||
if "Material" in child.children[0]._unilabos_state:
|
if "Material" in child.children[0]._unilabos_state:
|
||||||
tablets_info[number] = child.children[0]._unilabos_state["Material"].get("uuid", "730067cf07ae43849ddf4034299030e9")
|
number = int(child.name.replace("T", ""))
|
||||||
else:
|
tablets_info.append(
|
||||||
tablets_info[number] = "730067cf07ae43849ddf4034299030e9"
|
WorkTablets(Number=number, Code=f"T{number}", Material=child.children[0]._unilabos_state["Material"])
|
||||||
else:
|
)
|
||||||
tablets_info[number] = "730067cf07ae43849ddf4034299030e9"
|
|
||||||
pos = self.plr_pos_to_prcxi(child)
|
|
||||||
plate_positions.append(
|
|
||||||
{
|
|
||||||
"Number": number,
|
|
||||||
"XPos": pos.x,
|
|
||||||
"YPos": pos.y,
|
|
||||||
"ZPos": pos.z
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if is_9320:
|
if is_9320:
|
||||||
print("当前设备是9320")
|
print("当前设备是9320")
|
||||||
# 始终初始化 step_mode 属性
|
# 始终初始化 step_mode 属性
|
||||||
@@ -714,38 +529,10 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
|
|||||||
self.step_mode = step_mode
|
self.step_mode = step_mode
|
||||||
else:
|
else:
|
||||||
print("9300设备不支持 单点动作模式")
|
print("9300设备不支持 单点动作模式")
|
||||||
|
|
||||||
self._unilabos_backend = PRCXI9300Backend(
|
self._unilabos_backend = PRCXI9300Backend(
|
||||||
tablets_info, plate_positions, host, port, timeout, channel_num, axis, setup, debug, matrix_id, is_9320,
|
tablets_info, host, port, timeout, channel_num, axis, setup, debug, matrix_id, is_9320
|
||||||
x_increase, y_increase, x_offset, y_offset,
|
|
||||||
deck_z, deck_x=self.deck_x, deck_y=self.deck_y, xy_coupling=xy_coupling
|
|
||||||
)
|
)
|
||||||
|
|
||||||
super().__init__(backend=self._unilabos_backend, deck=deck, simulator=simulator, channel_num=channel_num)
|
super().__init__(backend=self._unilabos_backend, deck=deck, simulator=simulator, channel_num=channel_num)
|
||||||
|
|
||||||
def plr_pos_to_prcxi(self, resource: Resource):
|
|
||||||
resource_pos = resource.get_absolute_location(x="c",y="c",z="t")
|
|
||||||
x = resource_pos.x
|
|
||||||
y = resource_pos.y
|
|
||||||
z = resource_pos.z
|
|
||||||
# 如果z等于0,则递归resource.parent的高度并向z加,使用get_size_z方法
|
|
||||||
|
|
||||||
parent = resource.parent
|
|
||||||
res_z = resource.location.z
|
|
||||||
while not isinstance(parent, LiquidHandlerAbstract) and (res_z == 0) and parent is not None:
|
|
||||||
z += parent.get_size_z()
|
|
||||||
res_z = parent.location.z
|
|
||||||
parent = getattr(parent, "parent", None)
|
|
||||||
|
|
||||||
prcxi_x = (self.deck_x - x)*(1+self.x_increase) + self.x_offset + self.xy_coupling * (self.deck_y - y)
|
|
||||||
prcxi_y = (self.deck_y - y)*(1+self.y_increase) + self.y_offset
|
|
||||||
prcxi_z = self.deck_z - z
|
|
||||||
|
|
||||||
prcxi_x = min(max(0, prcxi_x),self.deck_x)
|
|
||||||
prcxi_y = min(max(0, prcxi_y),self.deck_y)
|
|
||||||
prcxi_z = min(max(0, prcxi_z),self.deck_z)
|
|
||||||
|
|
||||||
return Coordinate(prcxi_x, prcxi_y, prcxi_z)
|
|
||||||
|
|
||||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||||
super().post_init(ros_node)
|
super().post_init(ros_node)
|
||||||
@@ -1026,7 +813,7 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
|
|||||||
**backend_kwargs,
|
**backend_kwargs,
|
||||||
):
|
):
|
||||||
|
|
||||||
res = await super().move_plate(
|
return await super().move_plate(
|
||||||
plate,
|
plate,
|
||||||
to,
|
to,
|
||||||
intermediate_locations,
|
intermediate_locations,
|
||||||
@@ -1038,12 +825,6 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
|
|||||||
target_plate_number = to,
|
target_plate_number = to,
|
||||||
**backend_kwargs,
|
**backend_kwargs,
|
||||||
)
|
)
|
||||||
plate.unassign()
|
|
||||||
to.assign_child_resource(plate, location=Coordinate(0, 0, 0))
|
|
||||||
ROS2DeviceNode.run_async_func(self._ros_node.update_resource, True, **{
|
|
||||||
"resources": [self.deck]
|
|
||||||
})
|
|
||||||
return res
|
|
||||||
|
|
||||||
class PRCXI9300Backend(LiquidHandlerBackend):
|
class PRCXI9300Backend(LiquidHandlerBackend):
|
||||||
"""PRCXI 9300 的后端实现,继承自 LiquidHandlerBackend。
|
"""PRCXI 9300 的后端实现,继承自 LiquidHandlerBackend。
|
||||||
@@ -1067,7 +848,6 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
tablets_info: list[WorkTablets],
|
tablets_info: list[WorkTablets],
|
||||||
plate_positions: dict[int, Coordinate],
|
|
||||||
host: str = "127.0.0.1",
|
host: str = "127.0.0.1",
|
||||||
port: int = 9999,
|
port: int = 9999,
|
||||||
timeout: float = 10.0,
|
timeout: float = 10.0,
|
||||||
@@ -1076,19 +856,10 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
setup=True,
|
setup=True,
|
||||||
debug=False,
|
debug=False,
|
||||||
matrix_id="",
|
matrix_id="",
|
||||||
is_9320=False,
|
is_9320=False,
|
||||||
x_increase = 0,
|
|
||||||
y_increase = 0,
|
|
||||||
x_offset = 0,
|
|
||||||
y_offset = 0,
|
|
||||||
deck_z = 300,
|
|
||||||
deck_x = 0,
|
|
||||||
deck_y = 0,
|
|
||||||
xy_coupling = 0.0,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.tablets_info = tablets_info
|
self.tablets_info = tablets_info
|
||||||
self.plate_positions = plate_positions
|
|
||||||
self.matrix_id = matrix_id
|
self.matrix_id = matrix_id
|
||||||
self.api_client = PRCXI9300Api(host, port, timeout, axis, debug, is_9320)
|
self.api_client = PRCXI9300Api(host, port, timeout, axis, debug, is_9320)
|
||||||
self.host, self.port, self.timeout = host, port, timeout
|
self.host, self.port, self.timeout = host, port, timeout
|
||||||
@@ -1096,15 +867,6 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
self._execute_setup = setup
|
self._execute_setup = setup
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.axis = "Left"
|
self.axis = "Left"
|
||||||
self.x_increase = x_increase
|
|
||||||
self.y_increase = y_increase
|
|
||||||
self.xy_coupling = xy_coupling
|
|
||||||
self.x_offset = x_offset
|
|
||||||
self.y_offset = y_offset
|
|
||||||
self.deck_x = deck_x
|
|
||||||
self.deck_y = deck_y
|
|
||||||
self.deck_z = deck_z
|
|
||||||
self.tip_length = 0
|
|
||||||
|
|
||||||
async def shaker_action(self, time: int, module_no: int, amplitude: int, is_wait: bool):
|
async def shaker_action(self, time: int, module_no: int, amplitude: int, is_wait: bool):
|
||||||
step = self.api_client.shaker_action(
|
step = self.api_client.shaker_action(
|
||||||
@@ -1134,11 +896,13 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
|
|
||||||
async def drop_resource(self, drop: ResourceDrop, **backend_kwargs):
|
async def drop_resource(self, drop: ResourceDrop, **backend_kwargs):
|
||||||
|
|
||||||
|
|
||||||
plate_number = None
|
plate_number = None
|
||||||
target_plate_number = backend_kwargs.get("target_plate_number", None)
|
target_plate_number = backend_kwargs.get("target_plate_number", None)
|
||||||
if target_plate_number is not None:
|
if target_plate_number is not None:
|
||||||
plate_number = int(target_plate_number.name.replace("T", ""))
|
plate_number = int(target_plate_number.name.replace("T", ""))
|
||||||
|
|
||||||
|
|
||||||
is_whole_plate = True
|
is_whole_plate = True
|
||||||
balance_height = 0
|
balance_height = 0
|
||||||
if plate_number is None:
|
if plate_number is None:
|
||||||
@@ -1156,42 +920,29 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
self._ros_node = ros_node
|
self._ros_node = ros_node
|
||||||
|
|
||||||
def create_protocol(self, protocol_name):
|
def create_protocol(self, protocol_name):
|
||||||
if protocol_name == "":
|
|
||||||
protocol_name = f"protocol_{time.time()}"
|
|
||||||
self.protocol_name = protocol_name
|
self.protocol_name = protocol_name
|
||||||
self.steps_todo_list = []
|
self.steps_todo_list = []
|
||||||
|
|
||||||
if not len(self.matrix_id):
|
|
||||||
self.matrix_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
material_list = self.api_client.get_all_materials()
|
|
||||||
material_dict = {material["uuid"]: material for material in material_list}
|
|
||||||
|
|
||||||
work_tablets = []
|
|
||||||
for num, material_id in self.tablets_info.items():
|
|
||||||
work_tablets.append({
|
|
||||||
"Number": num,
|
|
||||||
"Material": material_dict[material_id]
|
|
||||||
})
|
|
||||||
|
|
||||||
self.matrix_info = {
|
|
||||||
"MatrixId": self.matrix_id,
|
|
||||||
"MatrixName": self.matrix_id,
|
|
||||||
"WorkTablets": work_tablets,
|
|
||||||
}
|
|
||||||
# print(json.dumps(self.matrix_info, indent=2))
|
|
||||||
res = self.api_client.add_WorkTablet_Matrix(self.matrix_info)
|
|
||||||
if not res["Success"]:
|
|
||||||
self.matrix_id = ""
|
|
||||||
raise AssertionError(f"Failed to create matrix: {res.get('Message', 'Unknown error')}")
|
|
||||||
print(f"PRCXI9300Backend created matrix with ID: {self.matrix_info['MatrixId']}, result: {res}")
|
|
||||||
|
|
||||||
def run_protocol(self):
|
def run_protocol(self):
|
||||||
assert self.is_reset_ok, "PRCXI9300Backend is not reset successfully. Please call setup() first."
|
assert self.is_reset_ok, "PRCXI9300Backend is not reset successfully. Please call setup() first."
|
||||||
run_time = time.time()
|
run_time = time.time()
|
||||||
solution_id = self.api_client.add_solution(
|
self.matrix_info = MatrixInfo(
|
||||||
f"protocol_{run_time}", self.matrix_id, self.steps_todo_list
|
MatrixId=f"{int(run_time)}",
|
||||||
|
MatrixName=f"protocol_{run_time}",
|
||||||
|
MatrixCount=len(self.tablets_info),
|
||||||
|
WorkTablets=self.tablets_info,
|
||||||
)
|
)
|
||||||
|
# print(json.dumps(self.matrix_info, indent=2))
|
||||||
|
if not len(self.matrix_id):
|
||||||
|
res = self.api_client.add_WorkTablet_Matrix(self.matrix_info)
|
||||||
|
assert res["Success"], f"Failed to create matrix: {res.get('Message', 'Unknown error')}"
|
||||||
|
print(f"PRCXI9300Backend created matrix with ID: {self.matrix_info['MatrixId']}, result: {res}")
|
||||||
|
solution_id = self.api_client.add_solution(
|
||||||
|
f"protocol_{run_time}", self.matrix_info["MatrixId"], self.steps_todo_list
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(f"PRCXI9300Backend using predefined worktable {self.matrix_id}, skipping matrix creation.")
|
||||||
|
solution_id = self.api_client.add_solution(f"protocol_{run_time}", self.matrix_id, self.steps_todo_list)
|
||||||
print(f"PRCXI9300Backend created solution with ID: {solution_id}")
|
print(f"PRCXI9300Backend created solution with ID: {solution_id}")
|
||||||
self.api_client.load_solution(solution_id)
|
self.api_client.load_solution(solution_id)
|
||||||
print(json.dumps(self.steps_todo_list, indent=2))
|
print(json.dumps(self.steps_todo_list, indent=2))
|
||||||
@@ -1234,9 +985,6 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
else:
|
else:
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
print("PRCXI9300 reset successfully.")
|
print("PRCXI9300 reset successfully.")
|
||||||
|
|
||||||
self.api_client.update_clamp_jaw_position(self.matrix_id, self.plate_positions)
|
|
||||||
|
|
||||||
except ConnectionRefusedError as e:
|
except ConnectionRefusedError as e:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Failed to connect to PRCXI9300 API at {self.host}:{self.port}. "
|
f"Failed to connect to PRCXI9300 API at {self.host}:{self.port}. "
|
||||||
@@ -1285,7 +1033,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
PlateNo = plate_indexes[0] + 1
|
PlateNo = plate_indexes[0] + 1
|
||||||
hole_col = tip_columns[0] + 1
|
hole_col = tip_columns[0] + 1
|
||||||
hole_row = 1
|
hole_row = 1
|
||||||
if self.num_channels != 8:
|
if self._num_channels == 1:
|
||||||
hole_row = tipspot_index % 8 + 1
|
hole_row = tipspot_index % 8 + 1
|
||||||
|
|
||||||
step = self.api_client.Load(
|
step = self.api_client.Load(
|
||||||
@@ -1298,7 +1046,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
blending_times=0,
|
blending_times=0,
|
||||||
balance_height=0,
|
balance_height=0,
|
||||||
plate_or_hole=f"H{hole_col}-8,T{PlateNo}",
|
plate_or_hole=f"H{hole_col}-8,T{PlateNo}",
|
||||||
hole_numbers=f"{(hole_col - 1) * 8 + hole_row}" if self._num_channels != 8 else "1,2,3,4,5",
|
hole_numbers=f"{(hole_col - 1) * 8 + hole_row}" if self._num_channels == 1 else "1,2,3,4,5",
|
||||||
)
|
)
|
||||||
self.steps_todo_list.append(step)
|
self.steps_todo_list.append(step)
|
||||||
|
|
||||||
@@ -1359,7 +1107,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
PlateNo = plate_indexes[0] + 1
|
PlateNo = plate_indexes[0] + 1
|
||||||
hole_col = tip_columns[0] + 1
|
hole_col = tip_columns[0] + 1
|
||||||
|
|
||||||
if self.num_channels != 8:
|
if self.channel_num == 1:
|
||||||
hole_row = tipspot_index % 8 + 1
|
hole_row = tipspot_index % 8 + 1
|
||||||
|
|
||||||
step = self.api_client.UnLoad(
|
step = self.api_client.UnLoad(
|
||||||
@@ -1411,7 +1159,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
PlateNo = plate_indexes[0] + 1
|
PlateNo = plate_indexes[0] + 1
|
||||||
hole_col = tip_columns[0] + 1
|
hole_col = tip_columns[0] + 1
|
||||||
hole_row = 1
|
hole_row = 1
|
||||||
if self.num_channels != 8:
|
if self.num_channels == 1:
|
||||||
hole_row = tipspot_index % 8 + 1
|
hole_row = tipspot_index % 8 + 1
|
||||||
|
|
||||||
assert mix_time > 0
|
assert mix_time > 0
|
||||||
@@ -1468,7 +1216,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
PlateNo = plate_indexes[0] + 1
|
PlateNo = plate_indexes[0] + 1
|
||||||
hole_col = tip_columns[0] + 1
|
hole_col = tip_columns[0] + 1
|
||||||
hole_row = 1
|
hole_row = 1
|
||||||
if self.num_channels != 8:
|
if self.num_channels == 1:
|
||||||
hole_row = tipspot_index % 8 + 1
|
hole_row = tipspot_index % 8 + 1
|
||||||
|
|
||||||
step = self.api_client.Imbibing(
|
step = self.api_client.Imbibing(
|
||||||
@@ -1526,7 +1274,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
|
|||||||
hole_col = tip_columns[0] + 1
|
hole_col = tip_columns[0] + 1
|
||||||
|
|
||||||
hole_row = 1
|
hole_row = 1
|
||||||
if self.num_channels != 8:
|
if self.num_channels == 1:
|
||||||
hole_row = tipspot_index % 8 + 1
|
hole_row = tipspot_index % 8 + 1
|
||||||
|
|
||||||
step = self.api_client.Tapping(
|
step = self.api_client.Tapping(
|
||||||
@@ -1652,10 +1400,10 @@ class PRCXI9300Api:
|
|||||||
start = False
|
start = False
|
||||||
while not success:
|
while not success:
|
||||||
status = self.step_state_list()
|
status = self.step_state_list()
|
||||||
if status is None:
|
|
||||||
break
|
|
||||||
if len(status) == 1:
|
if len(status) == 1:
|
||||||
start = True
|
start = True
|
||||||
|
if status is None:
|
||||||
|
break
|
||||||
if len(status) == 0:
|
if len(status) == 0:
|
||||||
break
|
break
|
||||||
if status[-1]["State"] == 2 and start:
|
if status[-1]["State"] == 2 and start:
|
||||||
@@ -1735,13 +1483,6 @@ class PRCXI9300Api:
|
|||||||
"""GetWorkTabletMatrixById"""
|
"""GetWorkTabletMatrixById"""
|
||||||
return self.call("IMatrix", "GetWorkTabletMatrixById", [matrix_id])
|
return self.call("IMatrix", "GetWorkTabletMatrixById", [matrix_id])
|
||||||
|
|
||||||
def update_clamp_jaw_position(self, target_matrix_id: str, plate_positions: List[Dict[str, Any]]):
|
|
||||||
position_params = {
|
|
||||||
"MatrixId": target_matrix_id,
|
|
||||||
"WorkTablets": plate_positions
|
|
||||||
}
|
|
||||||
return self.call("IMatrix", "UpdateClampJawPosition", [position_params])
|
|
||||||
|
|
||||||
def add_WorkTablet_Matrix(self, matrix: MatrixInfo):
|
def add_WorkTablet_Matrix(self, matrix: MatrixInfo):
|
||||||
return self.call("IMatrix", "AddWorkTabletMatrix2" if self.is_9320 else "AddWorkTabletMatrix", [matrix])
|
return self.call("IMatrix", "AddWorkTabletMatrix2" if self.is_9320 else "AddWorkTabletMatrix", [matrix])
|
||||||
|
|
||||||
@@ -2015,82 +1756,82 @@ class DefaultLayout:
|
|||||||
{
|
{
|
||||||
"Number": 1,
|
"Number": 1,
|
||||||
"Code": "T1",
|
"Code": "T1",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 2,
|
"Number": 2,
|
||||||
"Code": "T2",
|
"Code": "T2",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 3,
|
"Number": 3,
|
||||||
"Code": "T3",
|
"Code": "T3",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 4,
|
"Number": 4,
|
||||||
"Code": "T4",
|
"Code": "T4",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 5,
|
"Number": 5,
|
||||||
"Code": "T5",
|
"Code": "T5",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 6,
|
"Number": 6,
|
||||||
"Code": "T6",
|
"Code": "T6",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 7,
|
"Number": 7,
|
||||||
"Code": "T7",
|
"Code": "T7",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 8,
|
"Number": 8,
|
||||||
"Code": "T8",
|
"Code": "T8",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 9,
|
"Number": 9,
|
||||||
"Code": "T9",
|
"Code": "T9",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 10,
|
"Number": 10,
|
||||||
"Code": "T10",
|
"Code": "T10",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 11,
|
"Number": 11,
|
||||||
"Code": "T11",
|
"Code": "T11",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 12,
|
"Number": 12,
|
||||||
"Code": "T12",
|
"Code": "T12",
|
||||||
"Material": {"uuid": "730067cf07ae43849ddf4034299030e9"},
|
"Material": {"uuid": "730067cf07ae43849ddf4034299030e9", "materialEnum": 0},
|
||||||
}, # 这个设置成废液槽,用储液槽表示
|
}, # 这个设置成废液槽,用储液槽表示
|
||||||
{
|
{
|
||||||
"Number": 13,
|
"Number": 13,
|
||||||
"Code": "T13",
|
"Code": "T13",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 14,
|
"Number": 14,
|
||||||
"Code": "T14",
|
"Code": "T14",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 15,
|
"Number": 15,
|
||||||
"Code": "T15",
|
"Code": "T15",
|
||||||
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f"},
|
"Material": {"uuid": "57b1e4711e9e4a32b529f3132fc5931f", "materialEnum": 0},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Number": 16,
|
"Number": 16,
|
||||||
"Code": "T16",
|
"Code": "T16",
|
||||||
"Material": {"uuid": "730067cf07ae43849ddf4034299030e9"},
|
"Material": {"uuid": "730067cf07ae43849ddf4034299030e9", "materialEnum": 0},
|
||||||
}, # 这个设置成垃圾桶,用储液槽表示
|
}, # 这个设置成垃圾桶,用储液槽表示
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4019,8 +4019,7 @@ liquid_handler:
|
|||||||
mix_liquid_height: 0.0
|
mix_liquid_height: 0.0
|
||||||
mix_rate: 0
|
mix_rate: 0
|
||||||
mix_stage: ''
|
mix_stage: ''
|
||||||
mix_times:
|
mix_times: 0
|
||||||
- 0
|
|
||||||
mix_vol: 0
|
mix_vol: 0
|
||||||
none_keys:
|
none_keys:
|
||||||
- ''
|
- ''
|
||||||
@@ -4176,11 +4175,9 @@ liquid_handler:
|
|||||||
mix_stage:
|
mix_stage:
|
||||||
type: string
|
type: string
|
||||||
mix_times:
|
mix_times:
|
||||||
items:
|
maximum: 2147483647
|
||||||
maximum: 2147483647
|
minimum: -2147483648
|
||||||
minimum: -2147483648
|
type: integer
|
||||||
type: integer
|
|
||||||
type: array
|
|
||||||
mix_vol:
|
mix_vol:
|
||||||
maximum: 2147483647
|
maximum: 2147483647
|
||||||
minimum: -2147483648
|
minimum: -2147483648
|
||||||
@@ -5043,8 +5040,7 @@ liquid_handler.biomek:
|
|||||||
mix_liquid_height: 0.0
|
mix_liquid_height: 0.0
|
||||||
mix_rate: 0
|
mix_rate: 0
|
||||||
mix_stage: ''
|
mix_stage: ''
|
||||||
mix_times:
|
mix_times: 0
|
||||||
- 0
|
|
||||||
mix_vol: 0
|
mix_vol: 0
|
||||||
none_keys:
|
none_keys:
|
||||||
- ''
|
- ''
|
||||||
@@ -5187,11 +5183,9 @@ liquid_handler.biomek:
|
|||||||
mix_stage:
|
mix_stage:
|
||||||
type: string
|
type: string
|
||||||
mix_times:
|
mix_times:
|
||||||
items:
|
maximum: 2147483647
|
||||||
maximum: 2147483647
|
minimum: -2147483648
|
||||||
minimum: -2147483648
|
type: integer
|
||||||
type: integer
|
|
||||||
type: array
|
|
||||||
mix_vol:
|
mix_vol:
|
||||||
maximum: 2147483647
|
maximum: 2147483647
|
||||||
minimum: -2147483648
|
minimum: -2147483648
|
||||||
@@ -9284,13 +9278,7 @@ liquid_handler.prcxi:
|
|||||||
z: 0.0
|
z: 0.0
|
||||||
sample_id: ''
|
sample_id: ''
|
||||||
type: ''
|
type: ''
|
||||||
handles:
|
handles: {}
|
||||||
input:
|
|
||||||
- data_key: wells
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: input_wells
|
|
||||||
label: InputWells
|
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
wells: unilabos_resources
|
wells: unilabos_resources
|
||||||
result: {}
|
result: {}
|
||||||
@@ -9677,8 +9665,7 @@ liquid_handler.prcxi:
|
|||||||
mix_liquid_height: 0.0
|
mix_liquid_height: 0.0
|
||||||
mix_rate: 0
|
mix_rate: 0
|
||||||
mix_stage: ''
|
mix_stage: ''
|
||||||
mix_times:
|
mix_times: 0
|
||||||
- 0
|
|
||||||
mix_vol: 0
|
mix_vol: 0
|
||||||
none_keys:
|
none_keys:
|
||||||
- ''
|
- ''
|
||||||
@@ -9750,34 +9737,7 @@ liquid_handler.prcxi:
|
|||||||
touch_tip: false
|
touch_tip: false
|
||||||
use_channels:
|
use_channels:
|
||||||
- 0
|
- 0
|
||||||
handles:
|
handles: {}
|
||||||
input:
|
|
||||||
- data_key: sources
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: sources_identifier
|
|
||||||
label: 待移动液体
|
|
||||||
- data_key: targets
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: targets_identifier
|
|
||||||
label: 转移目标
|
|
||||||
- data_key: tip_rack
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: tip_rack_identifier
|
|
||||||
label: 墙头盒
|
|
||||||
output:
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: handle
|
|
||||||
data_type: resource
|
|
||||||
handler_key: sources_out
|
|
||||||
label: sources
|
|
||||||
- data_key: liquid
|
|
||||||
data_source: executor
|
|
||||||
data_type: resource
|
|
||||||
handler_key: targets_out
|
|
||||||
label: targets
|
|
||||||
placeholder_keys:
|
placeholder_keys:
|
||||||
sources: unilabos_resources
|
sources: unilabos_resources
|
||||||
targets: unilabos_resources
|
targets: unilabos_resources
|
||||||
@@ -9834,11 +9794,9 @@ liquid_handler.prcxi:
|
|||||||
mix_stage:
|
mix_stage:
|
||||||
type: string
|
type: string
|
||||||
mix_times:
|
mix_times:
|
||||||
items:
|
maximum: 2147483647
|
||||||
maximum: 2147483647
|
minimum: -2147483648
|
||||||
minimum: -2147483648
|
type: integer
|
||||||
type: integer
|
|
||||||
type: array
|
|
||||||
mix_vol:
|
mix_vol:
|
||||||
maximum: 2147483647
|
maximum: 2147483647
|
||||||
minimum: -2147483648
|
minimum: -2147483648
|
||||||
|
|||||||
@@ -5792,381 +5792,3 @@ virtual_vacuum_pump:
|
|||||||
- status
|
- status
|
||||||
type: object
|
type: object
|
||||||
version: 1.0.0
|
version: 1.0.0
|
||||||
virtual_workbench:
|
|
||||||
category:
|
|
||||||
- virtual_device
|
|
||||||
class:
|
|
||||||
action_value_mappings:
|
|
||||||
auto-move_to_heating_station:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default:
|
|
||||||
material_number: null
|
|
||||||
handles:
|
|
||||||
input:
|
|
||||||
- data_key: material_number
|
|
||||||
data_source: handle
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: material_input
|
|
||||||
label: 物料编号
|
|
||||||
output:
|
|
||||||
- data_key: station_id
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_station
|
|
||||||
handler_key: heating_station_output
|
|
||||||
label: 加热台ID
|
|
||||||
- data_key: material_number
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: material_number_output
|
|
||||||
label: 物料编号
|
|
||||||
placeholder_keys: {}
|
|
||||||
result: {}
|
|
||||||
schema:
|
|
||||||
description: 将物料从An位置移动到空闲加热台,返回分配的加热台ID
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
material_number:
|
|
||||||
description: 物料编号,1-5,物料ID自动生成为A{n}
|
|
||||||
type: integer
|
|
||||||
required:
|
|
||||||
- material_number
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
description: move_to_heating_station 返回类型
|
|
||||||
properties:
|
|
||||||
material_id:
|
|
||||||
title: Material Id
|
|
||||||
type: string
|
|
||||||
material_number:
|
|
||||||
title: Material Number
|
|
||||||
type: integer
|
|
||||||
message:
|
|
||||||
title: Message
|
|
||||||
type: string
|
|
||||||
station_id:
|
|
||||||
description: 分配的加热台ID
|
|
||||||
title: Station Id
|
|
||||||
type: integer
|
|
||||||
success:
|
|
||||||
title: Success
|
|
||||||
type: boolean
|
|
||||||
required:
|
|
||||||
- success
|
|
||||||
- station_id
|
|
||||||
- material_id
|
|
||||||
- material_number
|
|
||||||
- message
|
|
||||||
title: MoveToHeatingStationResult
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: move_to_heating_station参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
auto-move_to_output:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default:
|
|
||||||
material_number: null
|
|
||||||
station_id: null
|
|
||||||
handles:
|
|
||||||
input:
|
|
||||||
- data_key: station_id
|
|
||||||
data_source: handle
|
|
||||||
data_type: workbench_station
|
|
||||||
handler_key: output_station_input
|
|
||||||
label: 加热台ID
|
|
||||||
- data_key: material_number
|
|
||||||
data_source: handle
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: output_material_input
|
|
||||||
label: 物料编号
|
|
||||||
placeholder_keys: {}
|
|
||||||
result: {}
|
|
||||||
schema:
|
|
||||||
description: 将物料从加热台移动到输出位置Cn
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
material_number:
|
|
||||||
description: 物料编号,用于确定输出位置Cn
|
|
||||||
type: integer
|
|
||||||
station_id:
|
|
||||||
description: 加热台ID,1-3,从上一节点传入
|
|
||||||
type: integer
|
|
||||||
required:
|
|
||||||
- station_id
|
|
||||||
- material_number
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
description: move_to_output 返回类型
|
|
||||||
properties:
|
|
||||||
material_id:
|
|
||||||
title: Material Id
|
|
||||||
type: string
|
|
||||||
station_id:
|
|
||||||
title: Station Id
|
|
||||||
type: integer
|
|
||||||
success:
|
|
||||||
title: Success
|
|
||||||
type: boolean
|
|
||||||
required:
|
|
||||||
- success
|
|
||||||
- station_id
|
|
||||||
- material_id
|
|
||||||
title: MoveToOutputResult
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: move_to_output参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
auto-prepare_materials:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default:
|
|
||||||
count: 5
|
|
||||||
handles:
|
|
||||||
output:
|
|
||||||
- data_key: material_1
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: channel_1
|
|
||||||
label: 实验1
|
|
||||||
- data_key: material_2
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: channel_2
|
|
||||||
label: 实验2
|
|
||||||
- data_key: material_3
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: channel_3
|
|
||||||
label: 实验3
|
|
||||||
- data_key: material_4
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: channel_4
|
|
||||||
label: 实验4
|
|
||||||
- data_key: material_5
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: channel_5
|
|
||||||
label: 实验5
|
|
||||||
placeholder_keys: {}
|
|
||||||
result: {}
|
|
||||||
schema:
|
|
||||||
description: 批量准备物料 - 虚拟起始节点,生成A1-A5物料,输出5个handle供后续节点使用
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
count:
|
|
||||||
default: 5
|
|
||||||
description: 待生成的物料数量,默认5 (生成 A1-A5)
|
|
||||||
type: integer
|
|
||||||
required: []
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
description: prepare_materials 返回类型 - 批量准备物料
|
|
||||||
properties:
|
|
||||||
count:
|
|
||||||
title: Count
|
|
||||||
type: integer
|
|
||||||
material_1:
|
|
||||||
title: Material 1
|
|
||||||
type: integer
|
|
||||||
material_2:
|
|
||||||
title: Material 2
|
|
||||||
type: integer
|
|
||||||
material_3:
|
|
||||||
title: Material 3
|
|
||||||
type: integer
|
|
||||||
material_4:
|
|
||||||
title: Material 4
|
|
||||||
type: integer
|
|
||||||
material_5:
|
|
||||||
title: Material 5
|
|
||||||
type: integer
|
|
||||||
message:
|
|
||||||
title: Message
|
|
||||||
type: string
|
|
||||||
success:
|
|
||||||
title: Success
|
|
||||||
type: boolean
|
|
||||||
required:
|
|
||||||
- success
|
|
||||||
- count
|
|
||||||
- material_1
|
|
||||||
- material_2
|
|
||||||
- material_3
|
|
||||||
- material_4
|
|
||||||
- material_5
|
|
||||||
- message
|
|
||||||
title: PrepareMaterialsResult
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: prepare_materials参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
auto-start_heating:
|
|
||||||
feedback: {}
|
|
||||||
goal: {}
|
|
||||||
goal_default:
|
|
||||||
material_number: null
|
|
||||||
station_id: null
|
|
||||||
handles:
|
|
||||||
input:
|
|
||||||
- data_key: station_id
|
|
||||||
data_source: handle
|
|
||||||
data_type: workbench_station
|
|
||||||
handler_key: station_id_input
|
|
||||||
label: 加热台ID
|
|
||||||
- data_key: material_number
|
|
||||||
data_source: handle
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: material_number_input
|
|
||||||
label: 物料编号
|
|
||||||
output:
|
|
||||||
- data_key: station_id
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_station
|
|
||||||
handler_key: heating_done_station
|
|
||||||
label: 加热完成-加热台ID
|
|
||||||
- data_key: material_number
|
|
||||||
data_source: executor
|
|
||||||
data_type: workbench_material
|
|
||||||
handler_key: heating_done_material
|
|
||||||
label: 加热完成-物料编号
|
|
||||||
placeholder_keys: {}
|
|
||||||
result: {}
|
|
||||||
schema:
|
|
||||||
description: 启动指定加热台的加热程序
|
|
||||||
properties:
|
|
||||||
feedback: {}
|
|
||||||
goal:
|
|
||||||
properties:
|
|
||||||
material_number:
|
|
||||||
description: 物料编号,从上一节点传入
|
|
||||||
type: integer
|
|
||||||
station_id:
|
|
||||||
description: 加热台ID,1-3,从上一节点传入
|
|
||||||
type: integer
|
|
||||||
required:
|
|
||||||
- station_id
|
|
||||||
- material_number
|
|
||||||
type: object
|
|
||||||
result:
|
|
||||||
description: start_heating 返回类型
|
|
||||||
properties:
|
|
||||||
material_id:
|
|
||||||
title: Material Id
|
|
||||||
type: string
|
|
||||||
material_number:
|
|
||||||
title: Material Number
|
|
||||||
type: integer
|
|
||||||
message:
|
|
||||||
title: Message
|
|
||||||
type: string
|
|
||||||
station_id:
|
|
||||||
title: Station Id
|
|
||||||
type: integer
|
|
||||||
success:
|
|
||||||
title: Success
|
|
||||||
type: boolean
|
|
||||||
required:
|
|
||||||
- success
|
|
||||||
- station_id
|
|
||||||
- material_id
|
|
||||||
- material_number
|
|
||||||
- message
|
|
||||||
title: StartHeatingResult
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- goal
|
|
||||||
title: start_heating参数
|
|
||||||
type: object
|
|
||||||
type: UniLabJsonCommand
|
|
||||||
module: unilabos.devices.virtual.workbench:VirtualWorkbench
|
|
||||||
status_types:
|
|
||||||
active_tasks_count: int
|
|
||||||
arm_current_task: str
|
|
||||||
arm_state: str
|
|
||||||
heating_station_1_material: str
|
|
||||||
heating_station_1_progress: float
|
|
||||||
heating_station_1_state: str
|
|
||||||
heating_station_2_material: str
|
|
||||||
heating_station_2_progress: float
|
|
||||||
heating_station_2_state: str
|
|
||||||
heating_station_3_material: str
|
|
||||||
heating_station_3_progress: float
|
|
||||||
heating_station_3_state: str
|
|
||||||
message: str
|
|
||||||
status: str
|
|
||||||
type: python
|
|
||||||
config_info: []
|
|
||||||
description: Virtual Workbench with 1 robotic arm and 3 heating stations for concurrent
|
|
||||||
material processing
|
|
||||||
handles: []
|
|
||||||
icon: ''
|
|
||||||
init_param_schema:
|
|
||||||
config:
|
|
||||||
properties:
|
|
||||||
config:
|
|
||||||
type: string
|
|
||||||
device_id:
|
|
||||||
type: string
|
|
||||||
required: []
|
|
||||||
type: object
|
|
||||||
data:
|
|
||||||
properties:
|
|
||||||
active_tasks_count:
|
|
||||||
type: integer
|
|
||||||
arm_current_task:
|
|
||||||
type: string
|
|
||||||
arm_state:
|
|
||||||
type: string
|
|
||||||
heating_station_1_material:
|
|
||||||
type: string
|
|
||||||
heating_station_1_progress:
|
|
||||||
type: number
|
|
||||||
heating_station_1_state:
|
|
||||||
type: string
|
|
||||||
heating_station_2_material:
|
|
||||||
type: string
|
|
||||||
heating_station_2_progress:
|
|
||||||
type: number
|
|
||||||
heating_station_2_state:
|
|
||||||
type: string
|
|
||||||
heating_station_3_material:
|
|
||||||
type: string
|
|
||||||
heating_station_3_progress:
|
|
||||||
type: number
|
|
||||||
heating_station_3_state:
|
|
||||||
type: string
|
|
||||||
message:
|
|
||||||
type: string
|
|
||||||
status:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- status
|
|
||||||
- arm_state
|
|
||||||
- arm_current_task
|
|
||||||
- heating_station_1_state
|
|
||||||
- heating_station_1_material
|
|
||||||
- heating_station_1_progress
|
|
||||||
- heating_station_2_state
|
|
||||||
- heating_station_2_material
|
|
||||||
- heating_station_2_progress
|
|
||||||
- heating_station_3_state
|
|
||||||
- heating_station_3_material
|
|
||||||
- heating_station_3_progress
|
|
||||||
- active_tasks_count
|
|
||||||
- message
|
|
||||||
type: object
|
|
||||||
version: 1.0.0
|
|
||||||
|
|||||||
@@ -124,32 +124,17 @@ class Registry:
|
|||||||
"output": [
|
"output": [
|
||||||
{
|
{
|
||||||
"handler_key": "labware",
|
"handler_key": "labware",
|
||||||
"data_type": "resource",
|
|
||||||
"label": "Labware",
|
"label": "Labware",
|
||||||
"data_source": "executor",
|
|
||||||
"data_key": "created_resource_tree.@flatten",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"handler_key": "liquid_slots",
|
|
||||||
"data_type": "resource",
|
"data_type": "resource",
|
||||||
"label": "LiquidSlots",
|
"data_source": "handle",
|
||||||
"data_source": "executor",
|
"data_key": "liquid",
|
||||||
"data_key": "liquid_input_resource_tree.@flatten",
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"handler_key": "materials",
|
|
||||||
"data_type": "resource",
|
|
||||||
"label": "AllMaterials",
|
|
||||||
"data_source": "executor",
|
|
||||||
"data_key": "[created_resource_tree,liquid_input_resource_tree].@flatten.@flatten",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"placeholder_keys": {
|
"placeholder_keys": {
|
||||||
"res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择
|
"res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择
|
||||||
"device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择
|
"device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择
|
||||||
"parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择
|
"parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择
|
||||||
"class_name": "unilabos_class",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"test_latency": {
|
"test_latency": {
|
||||||
@@ -201,17 +186,7 @@ class Registry:
|
|||||||
"resources": "unilabos_resources",
|
"resources": "unilabos_resources",
|
||||||
},
|
},
|
||||||
"goal_default": {},
|
"goal_default": {},
|
||||||
"handles": {
|
"handles": {},
|
||||||
"input": [
|
|
||||||
{
|
|
||||||
"handler_key": "input_resources",
|
|
||||||
"data_type": "resource",
|
|
||||||
"label": "InputResources",
|
|
||||||
"data_source": "handle",
|
|
||||||
"data_key": "resources", # 不为空
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -247,7 +222,7 @@ class Registry:
|
|||||||
abs_path = Path(path).absolute()
|
abs_path = Path(path).absolute()
|
||||||
resource_path = abs_path / "resources"
|
resource_path = abs_path / "resources"
|
||||||
files = list(resource_path.glob("*/*.yaml"))
|
files = list(resource_path.glob("*/*.yaml"))
|
||||||
logger.trace(f"[UniLab Registry] load resources? {resource_path.exists()}, total: {len(files)}")
|
logger.debug(f"[UniLab Registry] resources: {resource_path.exists()}, total: {len(files)}")
|
||||||
current_resource_number = len(self.resource_type_registry) + 1
|
current_resource_number = len(self.resource_type_registry) + 1
|
||||||
for i, file in enumerate(files):
|
for i, file in enumerate(files):
|
||||||
with open(file, encoding="utf-8", mode="r") as f:
|
with open(file, encoding="utf-8", mode="r") as f:
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ class RegularContainer(Container):
|
|||||||
def get_regular_container(name="container"):
|
def get_regular_container(name="container"):
|
||||||
r = RegularContainer(name=name)
|
r = RegularContainer(name=name)
|
||||||
r.category = "container"
|
r.category = "container"
|
||||||
return r
|
return RegularContainer(name=name)
|
||||||
|
|
||||||
#
|
#
|
||||||
# class RegularContainer(object):
|
# class RegularContainer(object):
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from unilabos.config.config import BasicConfig
|
|||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
from unilabos.resources.itemized_carrier import ItemizedCarrier, BottleCarrier
|
||||||
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
from unilabos.ros.msgs.message_converter import convert_to_ros_msg
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
)
|
)
|
||||||
@@ -42,7 +42,7 @@ def canonicalize_nodes_data(
|
|||||||
Returns:
|
Returns:
|
||||||
ResourceTreeSet: 标准化后的资源树集合
|
ResourceTreeSet: 标准化后的资源树集合
|
||||||
"""
|
"""
|
||||||
print_status(f"{len(nodes)} Resources loaded", "info")
|
print_status(f"{len(nodes)} Resources loaded:", "info")
|
||||||
|
|
||||||
# 第一步:基本预处理(处理graphml的label字段)
|
# 第一步:基本预处理(处理graphml的label字段)
|
||||||
outer_host_node_id = None
|
outer_host_node_id = None
|
||||||
@@ -597,8 +597,6 @@ def resource_plr_to_ulab(resource_plr: "ResourcePLR", parent_name: str = None, w
|
|||||||
"tube": "tube",
|
"tube": "tube",
|
||||||
"bottle_carrier": "bottle_carrier",
|
"bottle_carrier": "bottle_carrier",
|
||||||
"plate_adapter": "plate_adapter",
|
"plate_adapter": "plate_adapter",
|
||||||
"electrode_sheet": "electrode_sheet",
|
|
||||||
"material_hole": "material_hole",
|
|
||||||
}
|
}
|
||||||
if source in replace_info:
|
if source in replace_info:
|
||||||
return replace_info[source]
|
return replace_info[source]
|
||||||
@@ -1153,7 +1151,11 @@ def initialize_resource(resource_config: dict, resource_type: Any = None) -> Uni
|
|||||||
if resource_class_config["type"] == "pylabrobot":
|
if resource_class_config["type"] == "pylabrobot":
|
||||||
resource_plr = RESOURCE(name=resource_config["name"])
|
resource_plr = RESOURCE(name=resource_config["name"])
|
||||||
if resource_type != ResourcePLR:
|
if resource_type != ResourcePLR:
|
||||||
tree_sets = ResourceTreeSet.from_plr_resources([resource_plr], known_newly_created=True)
|
tree_sets = ResourceTreeSet.from_plr_resources([resource_plr])
|
||||||
|
# r = resource_plr_to_ulab(resource_plr=resource_plr, parent_name=resource_config.get("parent", None))
|
||||||
|
# # r = resource_plr_to_ulab(resource_plr=resource_plr)
|
||||||
|
# if resource_config.get("position") is not None:
|
||||||
|
# r["position"] = resource_config["position"]
|
||||||
r = tree_sets.dump()
|
r = tree_sets.dump()
|
||||||
else:
|
else:
|
||||||
r = resource_plr
|
r = resource_plr
|
||||||
|
|||||||
@@ -79,7 +79,6 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
category: Optional[str] = "carrier",
|
category: Optional[str] = "carrier",
|
||||||
model: Optional[str] = None,
|
model: Optional[str] = None,
|
||||||
invisible_slots: Optional[str] = None,
|
invisible_slots: Optional[str] = None,
|
||||||
content_type: Optional[List[str]] = ["bottle", "container", "tube", "bottle_carrier", "tip_rack"],
|
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
name=name,
|
name=name,
|
||||||
@@ -93,7 +92,6 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
self.num_items_x, self.num_items_y, self.num_items_z = num_items_x, num_items_y, num_items_z
|
self.num_items_x, self.num_items_y, self.num_items_z = num_items_x, num_items_y, num_items_z
|
||||||
self.invisible_slots = [] if invisible_slots is None else invisible_slots
|
self.invisible_slots = [] if invisible_slots is None else invisible_slots
|
||||||
self.layout = "z-y" if self.num_items_z > 1 and self.num_items_x == 1 else "x-z" if self.num_items_z > 1 and self.num_items_y == 1 else "x-y"
|
self.layout = "z-y" if self.num_items_z > 1 and self.num_items_x == 1 else "x-z" if self.num_items_z > 1 and self.num_items_y == 1 else "x-y"
|
||||||
self.content_type = content_type
|
|
||||||
|
|
||||||
if isinstance(sites, dict):
|
if isinstance(sites, dict):
|
||||||
sites = sites or {}
|
sites = sites or {}
|
||||||
@@ -151,7 +149,6 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
|
|
||||||
if not reassign and self.sites[idx] is not None:
|
if not reassign and self.sites[idx] is not None:
|
||||||
raise ValueError(f"a site with index {idx} already exists")
|
raise ValueError(f"a site with index {idx} already exists")
|
||||||
location = list(self.child_locations.values())[idx]
|
|
||||||
super().assign_child_resource(resource, location=location, reassign=reassign)
|
super().assign_child_resource(resource, location=location, reassign=reassign)
|
||||||
self.sites[idx] = resource
|
self.sites[idx] = resource
|
||||||
|
|
||||||
@@ -421,7 +418,7 @@ class ItemizedCarrier(ResourcePLR):
|
|||||||
self[identifier] if isinstance(self[identifier], str) else None,
|
self[identifier] if isinstance(self[identifier], str) else None,
|
||||||
"position": {"x": location.x, "y": location.y, "z": location.z},
|
"position": {"x": location.x, "y": location.y, "z": location.z},
|
||||||
"size": self.child_size[identifier],
|
"size": self.child_size[identifier],
|
||||||
"content_type": self.content_type
|
"content_type": ["bottle", "container", "tube", "bottle_carrier", "tip_rack"]
|
||||||
} for identifier, location in self.child_locations.items()]
|
} for identifier, location in self.child_locations.items()]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
get_action_type,
|
get_action_type,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import init_wrapper, ROS2DeviceNode
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
|
import copy
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
from unilabos.ros.device_node_wrapper import ros2_device_node
|
from unilabos.ros.device_node_wrapper import ros2_device_node
|
||||||
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode, DeviceInitError
|
||||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.exception import DeviceClassInvalid
|
from unilabos.utils.exception import DeviceClassInvalid
|
||||||
from unilabos.utils.import_manager import default_manager
|
from unilabos.utils.import_manager import default_manager
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# from nt import device_encoding
|
# from nt import device_encoding
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
@@ -11,7 +10,7 @@ from unilabos_msgs.srv._serial_command import SerialCommand_Response
|
|||||||
|
|
||||||
from unilabos.app.register import register_devices_and_resources
|
from unilabos.app.register import register_devices_and_resources
|
||||||
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
from unilabos.ros.nodes.presets.resource_mesh_manager import ResourceMeshManager
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet
|
||||||
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
from unilabos.devices.ros_dev.liquid_handler_joint_publisher import LiquidHandlerJointPublisher
|
||||||
from unilabos_msgs.srv import SerialCommand # type: ignore
|
from unilabos_msgs.srv import SerialCommand # type: ignore
|
||||||
from rclpy.executors import MultiThreadedExecutor
|
from rclpy.executors import MultiThreadedExecutor
|
||||||
@@ -56,11 +55,7 @@ def main(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""主函数"""
|
"""主函数"""
|
||||||
|
|
||||||
# Support restart - check if rclpy is already initialized
|
rclpy.init(args=rclpy_init_args)
|
||||||
if not rclpy.ok():
|
|
||||||
rclpy.init(args=rclpy_init_args)
|
|
||||||
else:
|
|
||||||
logger.info("[ROS] rclpy already initialized, reusing context")
|
|
||||||
executor = rclpy.__executor = MultiThreadedExecutor()
|
executor = rclpy.__executor = MultiThreadedExecutor()
|
||||||
# 创建主机节点
|
# 创建主机节点
|
||||||
host_node = HostNode(
|
host_node = HostNode(
|
||||||
@@ -93,7 +88,7 @@ def main(
|
|||||||
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
joint_republisher = JointRepublisher("joint_republisher", host_node.resource_tracker)
|
||||||
# lh_joint_pub = LiquidHandlerJointPublisher(
|
# lh_joint_pub = LiquidHandlerJointPublisher(
|
||||||
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
# resources_config=resources_list, resource_tracker=host_node.resource_tracker
|
||||||
# )
|
# )
|
||||||
executor.add_node(resource_mesh_manager)
|
executor.add_node(resource_mesh_manager)
|
||||||
executor.add_node(joint_republisher)
|
executor.add_node(joint_republisher)
|
||||||
# executor.add_node(lh_joint_pub)
|
# executor.add_node(lh_joint_pub)
|
||||||
|
|||||||
@@ -159,14 +159,10 @@ _msg_converter: Dict[Type, Any] = {
|
|||||||
else Pose()
|
else Pose()
|
||||||
),
|
),
|
||||||
config=json.dumps(x.get("config", {})),
|
config=json.dumps(x.get("config", {})),
|
||||||
data=json.dumps(obtain_data_with_uuid(x)),
|
data=json.dumps(x.get("data", {})),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
def obtain_data_with_uuid(x: dict):
|
|
||||||
data = x.get("data", {})
|
|
||||||
data["unilabos_uuid"] = x.get("uuid", None)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def json_or_yaml_loads(data: str) -> Any:
|
def json_or_yaml_loads(data: str) -> Any:
|
||||||
try:
|
try:
|
||||||
@@ -361,14 +357,7 @@ def convert_to_ros_msg(ros_msg_type: Union[Type, Any], obj: Any) -> Any:
|
|||||||
if hasattr(ros_msg, key):
|
if hasattr(ros_msg, key):
|
||||||
attr = getattr(ros_msg, key)
|
attr = getattr(ros_msg, key)
|
||||||
if isinstance(attr, (float, int, str, bool)):
|
if isinstance(attr, (float, int, str, bool)):
|
||||||
# 处理list类型的值,取第一个元素或抛出错误
|
setattr(ros_msg, key, type(attr)(value))
|
||||||
if isinstance(value, list):
|
|
||||||
if len(value) > 0:
|
|
||||||
setattr(ros_msg, key, type(attr)(value[0]))
|
|
||||||
else:
|
|
||||||
setattr(ros_msg, key, type(attr)()) # 使用默认值
|
|
||||||
else:
|
|
||||||
setattr(ros_msg, key, type(attr)(value))
|
|
||||||
elif isinstance(attr, (list, tuple)) and isinstance(value, Iterable):
|
elif isinstance(attr, (list, tuple)) and isinstance(value, Iterable):
|
||||||
td = ros_msg.SLOT_TYPES[ind].value_type
|
td = ros_msg.SLOT_TYPES[ind].value_type
|
||||||
if isinstance(td, NamespacedType):
|
if isinstance(td, NamespacedType):
|
||||||
@@ -381,35 +370,9 @@ def convert_to_ros_msg(ros_msg_type: Union[Type, Any], obj: Any) -> Any:
|
|||||||
setattr(ros_msg, key, []) # FIXME
|
setattr(ros_msg, key, []) # FIXME
|
||||||
elif "array.array" in str(type(attr)):
|
elif "array.array" in str(type(attr)):
|
||||||
if attr.typecode == "f" or attr.typecode == "d":
|
if attr.typecode == "f" or attr.typecode == "d":
|
||||||
# 如果是单个值,转换为列表
|
|
||||||
if value is None:
|
|
||||||
value = []
|
|
||||||
elif not isinstance(value, Iterable) or isinstance(value, (str, bytes)):
|
|
||||||
value = [value]
|
|
||||||
setattr(ros_msg, key, [float(i) for i in value])
|
setattr(ros_msg, key, [float(i) for i in value])
|
||||||
else:
|
else:
|
||||||
# 对于整数数组,需要确保是序列且每个值在有效范围内
|
setattr(ros_msg, key, value)
|
||||||
if value is None:
|
|
||||||
value = []
|
|
||||||
elif not isinstance(value, Iterable) or isinstance(value, (str, bytes)):
|
|
||||||
# 如果是单个值,转换为列表
|
|
||||||
value = [value]
|
|
||||||
# 确保每个整数值在有效范围内(-2147483648 到 2147483647)
|
|
||||||
converted_value = []
|
|
||||||
for i in value:
|
|
||||||
if i is None:
|
|
||||||
continue # 跳过 None 值
|
|
||||||
if isinstance(i, (int, float)):
|
|
||||||
int_val = int(i)
|
|
||||||
# 确保在 int32 范围内
|
|
||||||
if int_val < -2147483648:
|
|
||||||
int_val = -2147483648
|
|
||||||
elif int_val > 2147483647:
|
|
||||||
int_val = 2147483647
|
|
||||||
converted_value.append(int_val)
|
|
||||||
else:
|
|
||||||
converted_value.append(i)
|
|
||||||
setattr(ros_msg, key, converted_value)
|
|
||||||
else:
|
else:
|
||||||
nested_ros_msg = convert_to_ros_msg(type(attr)(), value)
|
nested_ros_msg = convert_to_ros_msg(type(attr)(), value)
|
||||||
setattr(ros_msg, key, nested_ros_msg)
|
setattr(ros_msg, key, nested_ros_msg)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import copy
|
||||||
import inspect
|
import inspect
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
@@ -12,6 +13,7 @@ import asyncio
|
|||||||
|
|
||||||
import rclpy
|
import rclpy
|
||||||
import yaml
|
import yaml
|
||||||
|
from msgcenterpy import ROS2MessageInstance
|
||||||
from rclpy.node import Node
|
from rclpy.node import Node
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
@@ -20,13 +22,15 @@ from rclpy.callback_groups import ReentrantCallbackGroup
|
|||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
from unilabos_msgs.action import SendCmd
|
from unilabos_msgs.action import SendCmd
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
from unilabos.config.config import BasicConfig
|
|
||||||
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
from unilabos.utils.decorator import get_topic_config, get_all_subscriptions
|
||||||
|
|
||||||
from unilabos.resources.container import RegularContainer
|
from unilabos.resources.container import RegularContainer
|
||||||
from unilabos.resources.graphio import (
|
from unilabos.resources.graphio import (
|
||||||
|
resource_ulab_to_plr,
|
||||||
initialize_resources,
|
initialize_resources,
|
||||||
|
dict_to_tree,
|
||||||
|
resource_plr_to_ulab,
|
||||||
|
tree_to_list,
|
||||||
)
|
)
|
||||||
from unilabos.resources.plr_additional_res_reg import register
|
from unilabos.resources.plr_additional_res_reg import register
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
@@ -43,7 +47,7 @@ from unilabos_msgs.srv import (
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
DeviceNodeResourceTracker,
|
DeviceNodeResourceTracker,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
ResourceTreeInstance,
|
ResourceTreeInstance,
|
||||||
@@ -359,6 +363,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
async def append_resource(req: SerialCommand_Request, res: SerialCommand_Response):
|
||||||
|
from pylabrobot.resources.resource import Resource as ResourcePLR
|
||||||
from pylabrobot.resources.deck import Deck
|
from pylabrobot.resources.deck import Deck
|
||||||
from pylabrobot.resources import Coordinate
|
from pylabrobot.resources import Coordinate
|
||||||
from pylabrobot.resources import Plate
|
from pylabrobot.resources import Plate
|
||||||
@@ -392,12 +397,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
parent_resource = self.resource_tracker.figure_resource(
|
parent_resource = self.resource_tracker.figure_resource(
|
||||||
{"name": bind_parent_id}
|
{"name": bind_parent_id}
|
||||||
)
|
)
|
||||||
for r in rts.root_nodes:
|
for r in rts.root_nodes:
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
r.res_content.parent_uuid = parent_resource.unilabos_uuid
|
r.res_content.parent_uuid = parent_resource.unilabos_uuid
|
||||||
else:
|
|
||||||
for r in rts.root_nodes:
|
|
||||||
r.res_content.parent_uuid = self.uuid
|
|
||||||
|
|
||||||
if len(LIQUID_INPUT_SLOT) and LIQUID_INPUT_SLOT[0] == -1 and len(rts.root_nodes) == 1 and isinstance(rts.root_nodes[0], RegularContainer):
|
if len(LIQUID_INPUT_SLOT) and LIQUID_INPUT_SLOT[0] == -1 and len(rts.root_nodes) == 1 and isinstance(rts.root_nodes[0], RegularContainer):
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
@@ -433,14 +435,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
})
|
})
|
||||||
tree_response: SerialCommand.Response = await client.call_async(request)
|
tree_response: SerialCommand.Response = await client.call_async(request)
|
||||||
uuid_maps = json.loads(tree_response.response)
|
uuid_maps = json.loads(tree_response.response)
|
||||||
plr_instances = rts.to_plr_resources()
|
self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
|
||||||
for plr_instance in plr_instances:
|
|
||||||
self.resource_tracker.loop_update_uuid(plr_instance, uuid_maps)
|
|
||||||
rts: ResourceTreeSet = ResourceTreeSet.from_plr_resources(plr_instances)
|
|
||||||
self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes")
|
self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes")
|
||||||
final_response = {
|
final_response = {
|
||||||
"created_resource_tree": rts.dump(),
|
"created_resources": rts.dump(),
|
||||||
"liquid_input_resource_tree": [],
|
"liquid_input_resources": [],
|
||||||
}
|
}
|
||||||
res.response = json.dumps(final_response)
|
res.response = json.dumps(final_response)
|
||||||
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
||||||
@@ -466,7 +465,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
return res
|
return res
|
||||||
try:
|
try:
|
||||||
if len(rts.root_nodes) == 1 and parent_resource is not None:
|
if len(rts.root_nodes) == 1 and parent_resource is not None:
|
||||||
plr_instance = plr_instances[0]
|
plr_instance = rts.to_plr_resources()[0]
|
||||||
if isinstance(plr_instance, Plate):
|
if isinstance(plr_instance, Plate):
|
||||||
empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items
|
empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items
|
||||||
if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1:
|
if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1:
|
||||||
@@ -491,7 +490,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
input_wells = []
|
input_wells = []
|
||||||
for r in LIQUID_INPUT_SLOT:
|
for r in LIQUID_INPUT_SLOT:
|
||||||
input_wells.append(plr_instance.children[r])
|
input_wells.append(plr_instance.children[r])
|
||||||
final_response["liquid_input_resource_tree"] = ResourceTreeSet.from_plr_resources(input_wells).dump()
|
final_response["liquid_input_resources"] = ResourceTreeSet.from_plr_resources(input_wells).dump()
|
||||||
res.response = json.dumps(final_response)
|
res.response = json.dumps(final_response)
|
||||||
if issubclass(parent_resource.__class__, Deck) and hasattr(parent_resource, "assign_child_at_slot") and "slot" in other_calling_param:
|
if issubclass(parent_resource.__class__, Deck) and hasattr(parent_resource, "assign_child_at_slot") and "slot" in other_calling_param:
|
||||||
other_calling_param["slot"] = int(other_calling_param["slot"])
|
other_calling_param["slot"] = int(other_calling_param["slot"])
|
||||||
@@ -659,71 +658,61 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def transfer_to_new_resource(
|
def transfer_to_new_resource(
|
||||||
self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any]
|
self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any]
|
||||||
) -> Optional["ResourcePLR"]:
|
):
|
||||||
parent_uuid = tree.root_node.res_content.parent_uuid
|
parent_uuid = tree.root_node.res_content.parent_uuid
|
||||||
if not parent_uuid:
|
if parent_uuid:
|
||||||
self.lab_logger().warning(
|
parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
|
||||||
f"物料{plr_resource} parent未知,挂载到当前节点下,额外参数:{additional_add_params}"
|
if parent_resource is None:
|
||||||
)
|
|
||||||
return None
|
|
||||||
if parent_uuid == self.uuid:
|
|
||||||
self.lab_logger().warning(
|
|
||||||
f"物料{plr_resource}请求挂载到{self.identifier},额外参数:{additional_add_params}"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
|
|
||||||
if parent_resource is None:
|
|
||||||
self.lab_logger().warning(
|
|
||||||
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
# 特殊兼容所有plr的物料的assign方法,和create_resource append_resource后期同步
|
|
||||||
additional_params = {}
|
|
||||||
extra = getattr(plr_resource, "unilabos_extra", {})
|
|
||||||
if len(extra):
|
|
||||||
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
|
|
||||||
if "update_resource_site" in extra:
|
|
||||||
additional_add_params["site"] = extra["update_resource_site"]
|
|
||||||
site = additional_add_params.get("site", None)
|
|
||||||
spec = inspect.signature(parent_resource.assign_child_resource)
|
|
||||||
if "spot" in spec.parameters:
|
|
||||||
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
|
|
||||||
if ordering_dict:
|
|
||||||
site = list(ordering_dict.keys()).index(site)
|
|
||||||
additional_params["spot"] = site
|
|
||||||
old_parent = plr_resource.parent
|
|
||||||
if old_parent is not None:
|
|
||||||
# plr并不支持同一个deck的加载和卸载
|
|
||||||
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
|
|
||||||
old_parent.unassign_child_resource(plr_resource)
|
|
||||||
self.lab_logger().warning(
|
self.lab_logger().warning(
|
||||||
f"物料{plr_resource}请求挂载到{parent_resource},额外参数:{additional_params}"
|
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# 特殊兼容所有plr的物料的assign方法,和create_resource append_resource后期同步
|
||||||
|
additional_params = {}
|
||||||
|
extra = getattr(plr_resource, "unilabos_extra", {})
|
||||||
|
if len(extra):
|
||||||
|
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
|
||||||
|
if "update_resource_site" in extra:
|
||||||
|
additional_add_params["site"] = extra["update_resource_site"]
|
||||||
|
site = additional_add_params.get("site", None)
|
||||||
|
spec = inspect.signature(parent_resource.assign_child_resource)
|
||||||
|
if "spot" in spec.parameters:
|
||||||
|
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
|
||||||
|
if ordering_dict:
|
||||||
|
site = list(ordering_dict.keys()).index(site)
|
||||||
|
additional_params["spot"] = site
|
||||||
|
old_parent = plr_resource.parent
|
||||||
|
if old_parent is not None:
|
||||||
|
# plr并不支持同一个deck的加载和卸载
|
||||||
|
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
|
||||||
|
old_parent.unassign_child_resource(plr_resource)
|
||||||
|
self.lab_logger().warning(
|
||||||
|
f"物料{plr_resource}请求挂载到{parent_resource},额外参数:{additional_params}"
|
||||||
|
)
|
||||||
|
|
||||||
# ⭐ assign 之前,需要从 resources 列表中移除
|
# ⭐ assign 之前,需要从 resources 列表中移除
|
||||||
# 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源
|
# 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源
|
||||||
# 如果不移除,figure_resource 会找到两次:一次在 resources,一次在 parent 的 children
|
# 如果不移除,figure_resource 会找到两次:一次在 resources,一次在 parent 的 children
|
||||||
resource_id = id(plr_resource)
|
resource_id = id(plr_resource)
|
||||||
for i, r in enumerate(self.resource_tracker.resources):
|
for i, r in enumerate(self.resource_tracker.resources):
|
||||||
if id(r) == resource_id:
|
if id(r) == resource_id:
|
||||||
self.resource_tracker.resources.pop(i)
|
self.resource_tracker.resources.pop(i)
|
||||||
self.lab_logger().debug(
|
self.lab_logger().debug(
|
||||||
f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)"
|
f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
parent_resource.assign_child_resource(plr_resource, location=None, **additional_params)
|
parent_resource.assign_child_resource(plr_resource, location=None, **additional_params)
|
||||||
|
|
||||||
func = getattr(self.driver_instance, "resource_tree_transfer", None)
|
func = getattr(self.driver_instance, "resource_tree_transfer", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
# 分别是 物料的原来父节点,当前物料的状态,物料的新父节点(此时物料已经重新assign了)
|
# 分别是 物料的原来父节点,当前物料的状态,物料的新父节点(此时物料已经重新assign了)
|
||||||
func(old_parent, plr_resource, parent_resource)
|
func(old_parent, plr_resource, parent_resource)
|
||||||
return parent_resource
|
except Exception as e:
|
||||||
except Exception as e:
|
self.lab_logger().warning(
|
||||||
self.lab_logger().warning(
|
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}"
|
||||||
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}"
|
)
|
||||||
)
|
|
||||||
|
|
||||||
async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response):
|
async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response):
|
||||||
"""
|
"""
|
||||||
@@ -738,7 +727,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def _handle_add(
|
def _handle_add(
|
||||||
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
||||||
) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
处理资源添加操作的内部函数
|
处理资源添加操作的内部函数
|
||||||
|
|
||||||
@@ -750,20 +739,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
操作结果字典
|
操作结果字典
|
||||||
"""
|
"""
|
||||||
parents = [] # 放的是被变更的物料 / 被变更的物料父级
|
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
self.resource_tracker.add_resource(plr_resource)
|
self.resource_tracker.add_resource(plr_resource)
|
||||||
parent = self.transfer_to_new_resource(plr_resource, tree, additional_add_params)
|
self.transfer_to_new_resource(plr_resource, tree, additional_add_params)
|
||||||
if parent is not None:
|
|
||||||
parents.append(parent)
|
|
||||||
else:
|
|
||||||
parents.append(plr_resource)
|
|
||||||
|
|
||||||
func = getattr(self.driver_instance, "resource_tree_add", None)
|
func = getattr(self.driver_instance, "resource_tree_add", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
func(plr_resources)
|
func(plr_resources)
|
||||||
|
|
||||||
return {"success": True, "action": "add"}, parents
|
return {"success": True, "action": "add"}
|
||||||
|
|
||||||
def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]:
|
def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
@@ -798,11 +782,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if plr_resource.parent is not None:
|
if plr_resource.parent is not None:
|
||||||
plr_resource.parent.unassign_child_resource(plr_resource)
|
plr_resource.parent.unassign_child_resource(plr_resource)
|
||||||
self.resource_tracker.remove_resource(plr_resource)
|
self.resource_tracker.remove_resource(plr_resource)
|
||||||
self.lab_logger().info(f"[资源同步] 移除物料 {plr_resource} 及其子节点")
|
self.lab_logger().info(f"移除物料 {plr_resource} 及其子节点")
|
||||||
|
|
||||||
for other_plr_resource in other_plr_resources:
|
for other_plr_resource in other_plr_resources:
|
||||||
self.resource_tracker.remove_resource(other_plr_resource)
|
self.resource_tracker.remove_resource(other_plr_resource)
|
||||||
self.lab_logger().info(f"[资源同步] 移除物料 {other_plr_resource} 及其子节点")
|
self.lab_logger().info(f"移除物料 {other_plr_resource} 及其子节点")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"success": True,
|
"success": True,
|
||||||
@@ -813,7 +797,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
|
|
||||||
def _handle_update(
|
def _handle_update(
|
||||||
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
|
||||||
) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
处理资源更新操作的内部函数
|
处理资源更新操作的内部函数
|
||||||
|
|
||||||
@@ -825,7 +809,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
操作结果字典
|
操作结果字典
|
||||||
"""
|
"""
|
||||||
original_instances = []
|
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
||||||
if isinstance(plr_resource, ResourceDictInstance):
|
if isinstance(plr_resource, ResourceDictInstance):
|
||||||
self._lab_logger.info(f"跳过 非资源{plr_resource.res_content.name} 的更新")
|
self._lab_logger.info(f"跳过 非资源{plr_resource.res_content.name} 的更新")
|
||||||
@@ -834,16 +817,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
original_instance: ResourcePLR = self.resource_tracker.figure_resource(
|
original_instance: ResourcePLR = self.resource_tracker.figure_resource(
|
||||||
{"uuid": tree.root_node.res_content.uuid}, try_mode=False
|
{"uuid": tree.root_node.res_content.uuid}, try_mode=False
|
||||||
)
|
)
|
||||||
original_parent_resource = original_instance.parent
|
|
||||||
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
|
|
||||||
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
|
|
||||||
not_same_parent = original_parent_resource_uuid != target_parent_resource_uuid and original_parent_resource is not None
|
|
||||||
old_name = original_instance.name
|
|
||||||
new_name = plr_resource.name
|
|
||||||
parent_appended = False
|
|
||||||
|
|
||||||
# Update操作中包含改名:需要先remove再add,这里更新父节点即可
|
# Update操作中包含改名:需要先remove再add
|
||||||
if not not_same_parent and old_name != new_name:
|
if original_instance.name != plr_resource.name:
|
||||||
|
old_name = original_instance.name
|
||||||
|
new_name = plr_resource.name
|
||||||
self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}")
|
self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}")
|
||||||
|
|
||||||
# 收集所有相关的uuid(包括子节点)
|
# 收集所有相关的uuid(包括子节点)
|
||||||
@@ -852,10 +830,12 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
_handle_add([original_instance], tree_set, additional_add_params)
|
_handle_add([original_instance], tree_set, additional_add_params)
|
||||||
|
|
||||||
self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}")
|
self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}")
|
||||||
original_instances.append(original_parent_resource)
|
|
||||||
parent_appended = True
|
|
||||||
|
|
||||||
# 常规更新:不涉及改名
|
# 常规更新:不涉及改名
|
||||||
|
original_parent_resource = original_instance.parent
|
||||||
|
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
|
||||||
|
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
|
||||||
|
|
||||||
self.lab_logger().info(
|
self.lab_logger().info(
|
||||||
f"物料{original_instance} 原始父节点{original_parent_resource_uuid} "
|
f"物料{original_instance} 原始父节点{original_parent_resource_uuid} "
|
||||||
f"目标父节点{target_parent_resource_uuid} 更新"
|
f"目标父节点{target_parent_resource_uuid} 更新"
|
||||||
@@ -866,23 +846,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501
|
original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501
|
||||||
|
|
||||||
# 如果父节点变化,需要重新挂载
|
# 如果父节点变化,需要重新挂载
|
||||||
if not_same_parent:
|
if (
|
||||||
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
original_parent_resource_uuid != target_parent_resource_uuid
|
||||||
original_instances.append(parent)
|
and original_parent_resource is not None
|
||||||
parent_appended = True
|
):
|
||||||
else:
|
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
||||||
# 判断是否变更了resource_site,重新登记
|
|
||||||
target_site = original_instance.unilabos_extra.get("update_resource_site")
|
|
||||||
sites = original_instance.parent.sites if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else None
|
|
||||||
site_names = list(original_instance.parent._ordering.keys()) if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else []
|
|
||||||
if target_site is not None and sites is not None and site_names is not None:
|
|
||||||
site_index = sites.index(original_instance)
|
|
||||||
site_name = site_names[site_index]
|
|
||||||
if site_name != target_site:
|
|
||||||
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params)
|
|
||||||
if parent is not None:
|
|
||||||
original_instances.append(parent)
|
|
||||||
parent_appended = True
|
|
||||||
|
|
||||||
# 加载状态
|
# 加载状态
|
||||||
original_instance.load_all_state(states)
|
original_instance.load_all_state(states)
|
||||||
@@ -890,15 +858,13 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
self.lab_logger().info(
|
self.lab_logger().info(
|
||||||
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count} 个"
|
||||||
)
|
)
|
||||||
if not parent_appended:
|
|
||||||
original_instances.append(original_instance)
|
|
||||||
|
|
||||||
# 调用driver的update回调
|
# 调用driver的update回调
|
||||||
func = getattr(self.driver_instance, "resource_tree_update", None)
|
func = getattr(self.driver_instance, "resource_tree_update", None)
|
||||||
if callable(func):
|
if callable(func):
|
||||||
func(original_instances)
|
func(plr_resources)
|
||||||
|
|
||||||
return {"success": True, "action": "update"}, original_instances
|
return {"success": True, "action": "update"}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(req.command)
|
data = json.loads(req.command)
|
||||||
@@ -908,8 +874,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
action = i.get("action") # remove, add, update
|
action = i.get("action") # remove, add, update
|
||||||
resources_uuid: List[str] = i.get("data") # 资源数据
|
resources_uuid: List[str] = i.get("data") # 资源数据
|
||||||
additional_add_params = i.get("additional_add_params", {}) # 额外参数
|
additional_add_params = i.get("additional_add_params", {}) # 额外参数
|
||||||
self.lab_logger().trace(
|
self.lab_logger().info(
|
||||||
f"[资源同步] 处理 {action}, " f"resources count: {len(resources_uuid)}"
|
f"[Resource Tree Update] Processing {action} operation, " f"resources count: {len(resources_uuid)}"
|
||||||
)
|
)
|
||||||
tree_set = None
|
tree_set = None
|
||||||
if action in ["add", "update"]:
|
if action in ["add", "update"]:
|
||||||
@@ -921,26 +887,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if tree_set is None:
|
if tree_set is None:
|
||||||
raise ValueError("tree_set不能为None")
|
raise ValueError("tree_set不能为None")
|
||||||
plr_resources = tree_set.to_plr_resources()
|
plr_resources = tree_set.to_plr_resources()
|
||||||
result, parents = _handle_add(plr_resources, tree_set, additional_add_params)
|
result = _handle_add(plr_resources, tree_set, additional_add_params)
|
||||||
parents: List[Optional["ResourcePLR"]] = [i for i in parents if i is not None]
|
|
||||||
# de_dupe_parents = list(set(parents))
|
|
||||||
# Fix unhashable type error for WareHouse
|
|
||||||
de_dupe_parents = []
|
|
||||||
_seen_ids = set()
|
|
||||||
for p in parents:
|
|
||||||
if id(p) not in _seen_ids:
|
|
||||||
_seen_ids.add(id(p))
|
|
||||||
de_dupe_parents.append(p)
|
|
||||||
new_tree_set = ResourceTreeSet.from_plr_resources(de_dupe_parents) # 去重
|
|
||||||
for tree in new_tree_set.trees:
|
|
||||||
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
|
|
||||||
tree.root_node.res_content.parent_uuid = self.uuid
|
|
||||||
r = SerialCommand.Request()
|
|
||||||
r.command = json.dumps(
|
|
||||||
{"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
|
|
||||||
response: SerialCommand_Response = await self._resource_clients[
|
|
||||||
"c2s_update_resource_tree"].call_async(r) # type: ignore
|
|
||||||
self.lab_logger().info(f"确认资源云端 Add 结果: {response.response}")
|
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "update":
|
elif action == "update":
|
||||||
if tree_set is None:
|
if tree_set is None:
|
||||||
@@ -951,18 +898,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
plr_resources.append(tree.root_node)
|
plr_resources.append(tree.root_node)
|
||||||
else:
|
else:
|
||||||
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
|
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
|
||||||
result, original_instances = _handle_update(plr_resources, tree_set, additional_add_params)
|
result = _handle_update(plr_resources, tree_set, additional_add_params)
|
||||||
if not BasicConfig.no_update_feedback:
|
|
||||||
new_tree_set = ResourceTreeSet.from_plr_resources(original_instances) # 去重
|
|
||||||
for tree in new_tree_set.trees:
|
|
||||||
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
|
|
||||||
tree.root_node.res_content.parent_uuid = self.uuid
|
|
||||||
r = SerialCommand.Request()
|
|
||||||
r.command = json.dumps(
|
|
||||||
{"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
|
|
||||||
response: SerialCommand_Response = await self._resource_clients[
|
|
||||||
"c2s_update_resource_tree"].call_async(r) # type: ignore
|
|
||||||
self.lab_logger().info(f"确认资源云端 Update 结果: {response.response}")
|
|
||||||
results.append(result)
|
results.append(result)
|
||||||
elif action == "remove":
|
elif action == "remove":
|
||||||
result = _handle_remove(resources_uuid)
|
result = _handle_remove(resources_uuid)
|
||||||
@@ -976,15 +912,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
# 返回处理结果
|
# 返回处理结果
|
||||||
result_json = {"results": results, "total": len(data)}
|
result_json = {"results": results, "total": len(data)}
|
||||||
res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder)
|
res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder)
|
||||||
# self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations")
|
self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations")
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
error_msg = f"Invalid JSON format: {str(e)}"
|
error_msg = f"Invalid JSON format: {str(e)}"
|
||||||
self.lab_logger().error(f"[资源同步] {error_msg}")
|
self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
|
||||||
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"Unexpected error: {str(e)}"
|
error_msg = f"Unexpected error: {str(e)}"
|
||||||
self.lab_logger().error(f"[资源同步] {error_msg}")
|
self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
|
||||||
self.lab_logger().error(traceback.format_exc())
|
self.lab_logger().error(traceback.format_exc())
|
||||||
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
|
||||||
|
|
||||||
@@ -1305,8 +1241,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name)
|
ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name)
|
||||||
|
|
||||||
action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"])
|
action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"])
|
||||||
self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {str(action_kwargs)[:1000]}")
|
self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
|
||||||
self.lab_logger().trace(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
|
|
||||||
error_skip = False
|
error_skip = False
|
||||||
# 向Host查询物料当前状态,如果是host本身的增加物料的请求,则直接跳过
|
# 向Host查询物料当前状态,如果是host本身的增加物料的请求,则直接跳过
|
||||||
if action_name not in ["create_resource_detailed", "create_resource"]:
|
if action_name not in ["create_resource_detailed", "create_resource"]:
|
||||||
@@ -1320,32 +1255,14 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]]
|
resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]]
|
||||||
|
|
||||||
# 批量查询资源
|
# 批量查询资源
|
||||||
queried_resources: list = [None] * len(resource_inputs)
|
queried_resources = []
|
||||||
uuid_indices: list[tuple[int, str, dict]] = [] # (index, uuid, resource_data)
|
for resource_data in resource_inputs:
|
||||||
|
plr_resource = await self.get_resource_with_dir(
|
||||||
# 第一遍:处理没有uuid的资源,收集有uuid的资源信息
|
resource_id=resource_data["id"], with_children=True
|
||||||
for idx, resource_data in enumerate(resource_inputs):
|
)
|
||||||
unilabos_uuid = resource_data.get("data", {}).get("unilabos_uuid")
|
if "sample_id" in resource_data:
|
||||||
if unilabos_uuid is None:
|
plr_resource.unilabos_extra["sample_uuid"] = resource_data["sample_id"]
|
||||||
plr_resource = await self.get_resource_with_dir(
|
queried_resources.append(plr_resource)
|
||||||
resource_id=resource_data["id"], with_children=True
|
|
||||||
)
|
|
||||||
if "sample_id" in resource_data:
|
|
||||||
plr_resource.unilabos_extra["sample_uuid"] = resource_data["sample_id"]
|
|
||||||
queried_resources[idx] = plr_resource
|
|
||||||
else:
|
|
||||||
uuid_indices.append((idx, unilabos_uuid, resource_data))
|
|
||||||
|
|
||||||
# 第二遍:批量查询有uuid的资源
|
|
||||||
if uuid_indices:
|
|
||||||
uuids = [item[1] for item in uuid_indices]
|
|
||||||
resource_tree = await self.get_resource(uuids)
|
|
||||||
plr_resources = resource_tree.to_plr_resources()
|
|
||||||
for i, (idx, _, resource_data) in enumerate(uuid_indices):
|
|
||||||
plr_resource = plr_resources[i]
|
|
||||||
if "sample_id" in resource_data:
|
|
||||||
plr_resource.unilabos_extra["sample_uuid"] = resource_data["sample_id"]
|
|
||||||
queried_resources[idx] = plr_resource
|
|
||||||
|
|
||||||
self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源")
|
self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源")
|
||||||
|
|
||||||
@@ -1391,8 +1308,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
execution_success = True
|
execution_success = True
|
||||||
except Exception as _:
|
except Exception as _:
|
||||||
execution_error = traceback.format_exc()
|
execution_error = traceback.format_exc()
|
||||||
error(f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}")
|
error(
|
||||||
trace(f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}")
|
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
||||||
|
)
|
||||||
|
|
||||||
future = ROS2DeviceNode.run_async_func(ACTION, trace_error=False, **action_kwargs)
|
future = ROS2DeviceNode.run_async_func(ACTION, trace_error=False, **action_kwargs)
|
||||||
future.add_done_callback(_handle_future_exception)
|
future.add_done_callback(_handle_future_exception)
|
||||||
@@ -1412,9 +1330,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
except Exception as _:
|
except Exception as _:
|
||||||
execution_error = traceback.format_exc()
|
execution_error = traceback.format_exc()
|
||||||
error(
|
error(
|
||||||
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}")
|
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
|
||||||
trace(
|
)
|
||||||
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}")
|
|
||||||
|
|
||||||
future.add_done_callback(_handle_future_exception)
|
future.add_done_callback(_handle_future_exception)
|
||||||
|
|
||||||
@@ -1481,10 +1398,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if isinstance(rs, list):
|
if isinstance(rs, list):
|
||||||
for r in rs:
|
for r in rs:
|
||||||
res = self.resource_tracker.parent_resource(r) # 获取 resource 对象
|
res = self.resource_tracker.parent_resource(r) # 获取 resource 对象
|
||||||
elif type(rs).__name__ == "ResourceHolder":
|
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
res = self.resource_tracker.parent_resource(rs)
|
res = self.resource_tracker.parent_resource(r)
|
||||||
if id(res) not in seen:
|
if id(res) not in seen:
|
||||||
seen.add(id(res))
|
seen.add(id(res))
|
||||||
unique_resources.append(res)
|
unique_resources.append(res)
|
||||||
@@ -1560,7 +1475,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_data = function_args[arg_name]
|
resource_data = function_args[arg_name]
|
||||||
if isinstance(resource_data, dict) and "id" in resource_data:
|
if isinstance(resource_data, dict) and "id" in resource_data:
|
||||||
try:
|
try:
|
||||||
function_args[arg_name] = self._convert_resources_sync(resource_data["uuid"])[0]
|
converted_resource = self._convert_resource_sync(resource_data)
|
||||||
|
function_args[arg_name] = converted_resource
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.lab_logger().error(
|
self.lab_logger().error(
|
||||||
f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
||||||
@@ -1574,8 +1490,12 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
resource_list = function_args[arg_name]
|
resource_list = function_args[arg_name]
|
||||||
if isinstance(resource_list, list):
|
if isinstance(resource_list, list):
|
||||||
try:
|
try:
|
||||||
uuids = [r["uuid"] for r in resource_list if isinstance(r, dict) and "id" in r]
|
converted_resources = []
|
||||||
function_args[arg_name] = self._convert_resources_sync(*uuids) if uuids else []
|
for resource_data in resource_list:
|
||||||
|
if isinstance(resource_data, dict) and "id" in resource_data:
|
||||||
|
converted_resource = self._convert_resource_sync(resource_data)
|
||||||
|
converted_resources.append(converted_resource)
|
||||||
|
function_args[arg_name] = converted_resources
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.lab_logger().error(
|
self.lab_logger().error(
|
||||||
f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
|
||||||
@@ -1588,27 +1508,20 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}"
|
f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def _convert_resources_sync(self, *uuids: str) -> List["ResourcePLR"]:
|
def _convert_resource_sync(self, resource_data: Dict[str, Any]):
|
||||||
"""同步转换资源 UUID 为实例
|
"""同步转换资源数据为实例"""
|
||||||
|
# 创建资源查询请求
|
||||||
|
r = SerialCommand.Request()
|
||||||
|
r.command = json.dumps(
|
||||||
|
{
|
||||||
|
"id": resource_data.get("id", None),
|
||||||
|
"uuid": resource_data.get("uuid", None),
|
||||||
|
"with_children": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
Args:
|
# 同步调用资源查询服务
|
||||||
*uuids: 一个或多个资源 UUID
|
future = self._resource_clients["resource_get"].call_async(r)
|
||||||
|
|
||||||
Returns:
|
|
||||||
单个 UUID 时返回单个资源实例,多个 UUID 时返回资源实例列表
|
|
||||||
"""
|
|
||||||
if not uuids:
|
|
||||||
raise ValueError("至少需要提供一个 UUID")
|
|
||||||
|
|
||||||
uuids_list = list(uuids)
|
|
||||||
future = self._resource_clients["c2s_update_resource_tree"].call_async(SerialCommand.Request(
|
|
||||||
command=json.dumps(
|
|
||||||
{
|
|
||||||
"data": {"data": uuids_list, "with_children": True},
|
|
||||||
"action": "get",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
))
|
|
||||||
|
|
||||||
# 等待结果(使用while循环,每次sleep 0.05秒,最多等待30秒)
|
# 等待结果(使用while循环,每次sleep 0.05秒,最多等待30秒)
|
||||||
timeout = 30.0
|
timeout = 30.0
|
||||||
@@ -1618,40 +1531,27 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
elapsed += 0.05
|
elapsed += 0.05
|
||||||
|
|
||||||
if not future.done():
|
if not future.done():
|
||||||
raise Exception(f"资源查询超时: {uuids_list}")
|
raise Exception(f"资源查询超时: {resource_data}")
|
||||||
|
|
||||||
response = future.result()
|
response = future.result()
|
||||||
if response is None:
|
if response is None:
|
||||||
raise Exception(f"资源查询返回空结果: {uuids_list}")
|
raise Exception(f"资源查询返回空结果: {resource_data}")
|
||||||
|
|
||||||
raw_data = json.loads(response.response)
|
raw_data = json.loads(response.response)
|
||||||
|
|
||||||
# 转换为 PLR 资源
|
# 转换为 PLR 资源
|
||||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||||
if not len(tree_set.trees):
|
plr_resource = tree_set.to_plr_resources()[0]
|
||||||
raise Exception(f"资源查询返回空树: {raw_data}")
|
|
||||||
plr_resources = tree_set.to_plr_resources()
|
|
||||||
|
|
||||||
# 通过资源跟踪器获取本地实例
|
# 通过资源跟踪器获取本地实例
|
||||||
figured_resources: List[ResourcePLR] = []
|
res = self.resource_tracker.figure_resource(plr_resource, try_mode=True)
|
||||||
for plr_resource, tree in zip(plr_resources, tree_set.trees):
|
if len(res) == 0:
|
||||||
res = self.resource_tracker.figure_resource(plr_resource, try_mode=True)
|
self.lab_logger().warning(f"资源转换未能索引到实例: {resource_data},返回新建实例")
|
||||||
if len(res) == 0:
|
return plr_resource
|
||||||
self.lab_logger().warning(f"资源转换未能索引到实例: {tree.root_node.res_content},返回新建实例")
|
elif len(res) == 1:
|
||||||
figured_resources.append(plr_resource)
|
return res[0]
|
||||||
elif len(res) == 1:
|
else:
|
||||||
figured_resources.append(res[0])
|
raise ValueError(f"资源转换得到多个实例: {res}")
|
||||||
else:
|
|
||||||
raise ValueError(f"资源转换得到多个实例: {res}")
|
|
||||||
|
|
||||||
mapped_plr_resources = []
|
|
||||||
for uuid in uuids_list:
|
|
||||||
for plr_resource in figured_resources:
|
|
||||||
r = self.resource_tracker.loop_find_with_uuid(plr_resource, uuid)
|
|
||||||
mapped_plr_resources.append(r)
|
|
||||||
break
|
|
||||||
|
|
||||||
return mapped_plr_resources
|
|
||||||
|
|
||||||
async def _execute_driver_command_async(self, string: str):
|
async def _execute_driver_command_async(self, string: str):
|
||||||
try:
|
try:
|
||||||
@@ -1865,7 +1765,6 @@ class ROS2DeviceNode:
|
|||||||
or driver_class.__name__ == "LiquidHandlerBiomek"
|
or driver_class.__name__ == "LiquidHandlerBiomek"
|
||||||
or driver_class.__name__ == "PRCXI9300Handler"
|
or driver_class.__name__ == "PRCXI9300Handler"
|
||||||
or driver_class.__name__ == "TransformXYZHandler"
|
or driver_class.__name__ == "TransformXYZHandler"
|
||||||
or driver_class.__name__ == "OpcUaClient"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 创建设备类实例
|
# 创建设备类实例
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, Type
|
|||||||
from action_msgs.msg import GoalStatus
|
from action_msgs.msg import GoalStatus
|
||||||
from geometry_msgs.msg import Point
|
from geometry_msgs.msg import Point
|
||||||
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
|
||||||
|
from rclpy.callback_groups import ReentrantCallbackGroup
|
||||||
from rclpy.service import Service
|
from rclpy.service import Service
|
||||||
from unilabos_msgs.msg import Resource # type: ignore
|
from unilabos_msgs.msg import Resource # type: ignore
|
||||||
from unilabos_msgs.srv import (
|
from unilabos_msgs.srv import (
|
||||||
@@ -18,12 +19,12 @@ from unilabos_msgs.srv import (
|
|||||||
ResourceUpdate,
|
ResourceUpdate,
|
||||||
ResourceList,
|
ResourceList,
|
||||||
SerialCommand,
|
SerialCommand,
|
||||||
|
ResourceGet,
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
from unique_identifier_msgs.msg import UUID
|
from unique_identifier_msgs.msg import UUID
|
||||||
|
|
||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
from unilabos.resources.container import RegularContainer
|
|
||||||
from unilabos.resources.graphio import initialize_resource
|
from unilabos.resources.graphio import initialize_resource
|
||||||
from unilabos.resources.registry import add_schema
|
from unilabos.resources.registry import add_schema
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
@@ -36,7 +37,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
|
||||||
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
from unilabos.ros.nodes.presets.controller_node import ControllerNode
|
||||||
from unilabos.resources.resource_tracker import (
|
from unilabos.ros.nodes.resource_tracker import (
|
||||||
ResourceDict,
|
ResourceDict,
|
||||||
ResourceDictInstance,
|
ResourceDictInstance,
|
||||||
ResourceTreeSet,
|
ResourceTreeSet,
|
||||||
@@ -71,8 +72,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
_instance: ClassVar[Optional["HostNode"]] = None
|
_instance: ClassVar[Optional["HostNode"]] = None
|
||||||
_ready_event: ClassVar[threading.Event] = threading.Event()
|
_ready_event: ClassVar[threading.Event] = threading.Event()
|
||||||
_shutting_down: ClassVar[bool] = False # Flag to signal shutdown to background threads
|
|
||||||
_background_threads: ClassVar[List[threading.Thread]] = [] # Track all background threads for cleanup
|
|
||||||
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
_device_action_status: ClassVar[collections.defaultdict[str, DeviceActionStatus]] = collections.defaultdict(
|
||||||
DeviceActionStatus
|
DeviceActionStatus
|
||||||
)
|
)
|
||||||
@@ -84,48 +83,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
return cls._instance
|
return cls._instance
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def shutdown_background_threads(cls, timeout: float = 5.0) -> None:
|
|
||||||
"""
|
|
||||||
Gracefully shutdown all background threads for clean exit or restart.
|
|
||||||
|
|
||||||
This method:
|
|
||||||
1. Sets shutdown flag to stop background operations
|
|
||||||
2. Waits for background threads to finish with timeout
|
|
||||||
3. Cleans up finished threads from tracking list
|
|
||||||
|
|
||||||
Args:
|
|
||||||
timeout: Maximum time to wait for each thread (seconds)
|
|
||||||
"""
|
|
||||||
cls._shutting_down = True
|
|
||||||
|
|
||||||
# Wait for background threads to finish
|
|
||||||
active_threads = []
|
|
||||||
for t in cls._background_threads:
|
|
||||||
if t.is_alive():
|
|
||||||
t.join(timeout=timeout)
|
|
||||||
if t.is_alive():
|
|
||||||
active_threads.append(t.name)
|
|
||||||
|
|
||||||
if active_threads:
|
|
||||||
logger.warning(f"[Host Node] Some background threads still running: {active_threads}")
|
|
||||||
|
|
||||||
# Clear the thread list
|
|
||||||
cls._background_threads.clear()
|
|
||||||
logger.info(f"[Host Node] Background threads shutdown complete")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def reset_state(cls) -> None:
|
|
||||||
"""
|
|
||||||
Reset the HostNode singleton state for restart or clean exit.
|
|
||||||
Call this after destroying the instance.
|
|
||||||
"""
|
|
||||||
cls._instance = None
|
|
||||||
cls._ready_event.clear()
|
|
||||||
cls._shutting_down = False
|
|
||||||
cls._background_threads.clear()
|
|
||||||
logger.info("[Host Node] State reset complete")
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
@@ -339,36 +296,12 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
bridge.publish_host_ready()
|
bridge.publish_host_ready()
|
||||||
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
self.lab_logger().debug(f"Host ready signal sent via {bridge.__class__.__name__}")
|
||||||
|
|
||||||
def _send_re_register(self, sclient, device_namespace: str):
|
def _send_re_register(self, sclient):
|
||||||
"""
|
sclient.wait_for_service()
|
||||||
Send re-register command to a device. This is a one-time operation.
|
request = SerialCommand.Request()
|
||||||
|
request.command = ""
|
||||||
Args:
|
future = sclient.call_async(request)
|
||||||
sclient: The service client
|
response = future.result()
|
||||||
device_namespace: The device namespace for logging
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Use timeout to prevent indefinite blocking
|
|
||||||
if not sclient.wait_for_service(timeout_sec=10.0):
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register timeout for {device_namespace}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check shutdown flag after wait
|
|
||||||
if self._shutting_down:
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (shutdown)")
|
|
||||||
return
|
|
||||||
|
|
||||||
request = SerialCommand.Request()
|
|
||||||
request.command = ""
|
|
||||||
future = sclient.call_async(request)
|
|
||||||
# Use timeout for result as well
|
|
||||||
future.result()
|
|
||||||
except Exception as e:
|
|
||||||
# Gracefully handle destruction during shutdown
|
|
||||||
if "destruction was requested" in str(e) or self._shutting_down:
|
|
||||||
self.lab_logger().debug(f"[Host Node] Re-register aborted for {device_namespace} (cleanup)")
|
|
||||||
else:
|
|
||||||
self.lab_logger().warning(f"[Host Node] Re-register failed for {device_namespace}: {e}")
|
|
||||||
|
|
||||||
def _discover_devices(self) -> None:
|
def _discover_devices(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -400,27 +333,23 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self._create_action_clients_for_device(device_id, namespace)
|
self._create_action_clients_for_device(device_id, namespace)
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
t = threading.Thread(
|
threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient, namespace),
|
args=(sclient,),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
)
|
).start()
|
||||||
self._background_threads.append(t)
|
|
||||||
t.start()
|
|
||||||
elif device_key not in self._online_devices:
|
elif device_key not in self._online_devices:
|
||||||
# 设备重新上线
|
# 设备重新上线
|
||||||
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
self.lab_logger().info(f"[Host Node] Device reconnected: {device_key}")
|
||||||
self._online_devices.add(device_key)
|
self._online_devices.add(device_key)
|
||||||
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
sclient = self.create_client(SerialCommand, f"/srv{namespace}/re_register_device")
|
||||||
t = threading.Thread(
|
threading.Thread(
|
||||||
target=self._send_re_register,
|
target=self._send_re_register,
|
||||||
args=(sclient, namespace),
|
args=(sclient,),
|
||||||
daemon=True,
|
daemon=True,
|
||||||
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
name=f"ROSDevice{self.device_id}_re_register_device_{namespace}",
|
||||||
)
|
).start()
|
||||||
self._background_threads.append(t)
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
# 检测离线设备
|
# 检测离线设备
|
||||||
offline_devices = self._online_devices - current_devices
|
offline_devices = self._online_devices - current_devices
|
||||||
@@ -586,10 +515,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert len(response) == 1, "Create Resource应当只返回一个结果"
|
new_li = []
|
||||||
for i in response:
|
for i in response:
|
||||||
res = json.loads(i)
|
res = json.loads(i)
|
||||||
return res
|
new_li.append(res)
|
||||||
|
return {"resources": new_li, "liquid_input_resources": new_li}
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
pass
|
pass
|
||||||
_n = "\n"
|
_n = "\n"
|
||||||
@@ -777,14 +707,13 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
raise ValueError(f"ActionClient {action_id} not found.")
|
raise ValueError(f"ActionClient {action_id} not found.")
|
||||||
|
|
||||||
action_client: ActionClient = self._action_clients[action_id]
|
action_client: ActionClient = self._action_clients[action_id]
|
||||||
|
|
||||||
# 遍历action_kwargs下的所有子dict,将"sample_uuid"的值赋给"sample_id"
|
# 遍历action_kwargs下的所有子dict,将"sample_uuid"的值赋给"sample_id"
|
||||||
def assign_sample_id(obj):
|
def assign_sample_id(obj):
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
if "sample_uuid" in obj:
|
if "sample_uuid" in obj:
|
||||||
obj["sample_id"] = obj["sample_uuid"]
|
obj["sample_id"] = obj["sample_uuid"]
|
||||||
obj.pop("sample_uuid")
|
obj.pop("sample_uuid")
|
||||||
for k, v in obj.items():
|
for k,v in obj.items():
|
||||||
if k != "unilabos_extra":
|
if k != "unilabos_extra":
|
||||||
assign_sample_id(v)
|
assign_sample_id(v)
|
||||||
elif isinstance(obj, list):
|
elif isinstance(obj, list):
|
||||||
@@ -794,9 +723,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
assign_sample_id(action_kwargs)
|
assign_sample_id(action_kwargs)
|
||||||
goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs)
|
goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs)
|
||||||
|
|
||||||
self.lab_logger().info(f"[Host Node] Sending goal for {action_id}: {str(goal_msg)[:1000]}")
|
self.lab_logger().info(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
|
||||||
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {action_kwargs}")
|
|
||||||
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
|
|
||||||
action_client.wait_for_server()
|
action_client.wait_for_server()
|
||||||
goal_uuid_obj = UUID(uuid=list(u.bytes))
|
goal_uuid_obj = UUID(uuid=list(u.bytes))
|
||||||
|
|
||||||
@@ -817,7 +744,9 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
self.lab_logger().info(f"[Host Node] Goal {action_id} ({item.job_id}) accepted")
|
||||||
self._goals[item.job_id] = goal_handle
|
self._goals[item.job_id] = goal_handle
|
||||||
goal_future = goal_handle.get_result_async()
|
goal_future = goal_handle.get_result_async()
|
||||||
goal_future.add_done_callback(lambda f: self.get_result_callback(item, action_id, f))
|
goal_future.add_done_callback(
|
||||||
|
lambda f: self.get_result_callback(item, action_id, f)
|
||||||
|
)
|
||||||
goal_future.result()
|
goal_future.result()
|
||||||
|
|
||||||
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
def feedback_callback(self, item: "QueueItem", action_id: str, feedback_msg) -> None:
|
||||||
@@ -1134,11 +1063,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
接收序列化的 ResourceTreeSet 数据并进行处理
|
接收序列化的 ResourceTreeSet 数据并进行处理
|
||||||
"""
|
"""
|
||||||
|
self.lab_logger().info(f"[Host Node-Resource] Resource tree add request received")
|
||||||
try:
|
try:
|
||||||
# 解析请求数据
|
# 解析请求数据
|
||||||
data = json.loads(request.command)
|
data = json.loads(request.command)
|
||||||
action = data["action"]
|
action = data["action"]
|
||||||
self.lab_logger().info(f"[Host Node-Resource] Resource tree {action} request received")
|
|
||||||
data = data["data"]
|
data = data["data"]
|
||||||
if action == "add":
|
if action == "add":
|
||||||
await self._resource_tree_action_add_callback(data, response)
|
await self._resource_tree_action_add_callback(data, response)
|
||||||
@@ -1239,13 +1168,11 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
响应对象,包含查询到的资源
|
响应对象,包含查询到的资源
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from unilabos.app.web import http_client
|
|
||||||
|
|
||||||
data = json.loads(request.command)
|
data = json.loads(request.command)
|
||||||
if "uuid" in data and data["uuid"] is not None:
|
if "uuid" in data and data["uuid"] is not None:
|
||||||
http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"])
|
http_req = self.bridges[-1].resource_tree_get([data["uuid"]], data["with_children"])
|
||||||
elif "id" in data:
|
elif "id" in data and data["id"].startswith("/"):
|
||||||
http_req = http_client.resource_get(data["id"], data["with_children"])
|
http_req = self.bridges[-1].resource_get(data["id"], data["with_children"])
|
||||||
else:
|
else:
|
||||||
raise ValueError("没有使用正确的物料 id 或 uuid")
|
raise ValueError("没有使用正确的物料 id 或 uuid")
|
||||||
response.response = json.dumps(http_req["data"])
|
response.response = json.dumps(http_req["data"])
|
||||||
@@ -1454,16 +1381,10 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def test_resource(
|
def test_resource(
|
||||||
self, resource: ResourceSlot = None, resources: List[ResourceSlot] = None, device: DeviceSlot = None, devices: List[DeviceSlot] = None
|
self, resource: ResourceSlot, resources: List[ResourceSlot], device: DeviceSlot, devices: List[DeviceSlot]
|
||||||
) -> TestResourceReturn:
|
) -> TestResourceReturn:
|
||||||
if resources is None:
|
|
||||||
resources = []
|
|
||||||
if devices is None:
|
|
||||||
devices = []
|
|
||||||
if resource is None:
|
|
||||||
resource = RegularContainer("test_resource传入None")
|
|
||||||
return {
|
return {
|
||||||
"resources": ResourceTreeSet.from_plr_resources([resource, *resources], known_newly_created=True).dump(),
|
"resources": ResourceTreeSet.from_plr_resources([resource, *resources]).dump(),
|
||||||
"devices": [device, *devices],
|
"devices": [device, *devices],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1515,7 +1436,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
# 构建服务地址
|
# 构建服务地址
|
||||||
srv_address = f"/srv{namespace}/s2c_resource_tree"
|
srv_address = f"/srv{namespace}/s2c_resource_tree"
|
||||||
self.lab_logger().trace(f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation started -------")
|
self.lab_logger().info(f"[Host Node-Resource] Notifying {device_id} for resource tree {action} operation")
|
||||||
|
|
||||||
# 创建服务客户端
|
# 创建服务客户端
|
||||||
sclient = self.create_client(SerialCommand, srv_address)
|
sclient = self.create_client(SerialCommand, srv_address)
|
||||||
@@ -1550,7 +1471,9 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
time.sleep(0.05)
|
time.sleep(0.05)
|
||||||
|
|
||||||
response = future.result()
|
response = future.result()
|
||||||
self.lab_logger().trace(f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation completed -------")
|
self.lab_logger().info(
|
||||||
|
f"[Host Node-Resource] Resource tree {action} notification completed for {device_id}"
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -12,10 +12,11 @@ from unilabos_msgs.srv import ResourceUpdate
|
|||||||
from unilabos.messages import * # type: ignore # protocol names
|
from unilabos.messages import * # type: ignore # protocol names
|
||||||
from rclpy.action import ActionServer, ActionClient
|
from rclpy.action import ActionServer, ActionClient
|
||||||
from rclpy.action.server import ServerGoalHandle
|
from rclpy.action.server import ServerGoalHandle
|
||||||
|
from rclpy.callback_groups import ReentrantCallbackGroup
|
||||||
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
|
||||||
|
|
||||||
from unilabos.compile import action_protocol_generators
|
from unilabos.compile import action_protocol_generators
|
||||||
from unilabos.resources.graphio import nested_dict_to_list
|
from unilabos.resources.graphio import list_to_nested_dict, nested_dict_to_list
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
get_action_type,
|
get_action_type,
|
||||||
@@ -23,7 +24,7 @@ from unilabos.ros.msgs.message_converter import (
|
|||||||
convert_from_ros_msg_with_mapping,
|
convert_from_ros_msg_with_mapping,
|
||||||
)
|
)
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker, ROS2DeviceNode
|
||||||
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDictInstance
|
||||||
from unilabos.utils.type_check import get_result_info_str
|
from unilabos.utils.type_check import get_result_info_str
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from pydantic import BaseModel, field_serializer, field_validator, ValidationError
|
from pydantic import BaseModel, field_serializer, field_validator
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union
|
from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union
|
||||||
|
|
||||||
@@ -14,9 +14,9 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionSize(BaseModel):
|
class ResourceDictPositionSize(BaseModel):
|
||||||
depth: float = Field(description="Depth", default=0.0) # z
|
depth: float = Field(description="Depth", default=0.0)
|
||||||
width: float = Field(description="Width", default=0.0) # x
|
width: float = Field(description="Width", default=0.0)
|
||||||
height: float = Field(description="Height", default=0.0) # y
|
height: float = Field(description="Height", default=0.0)
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionScale(BaseModel):
|
class ResourceDictPositionScale(BaseModel):
|
||||||
@@ -66,8 +66,8 @@ class ResourceDict(BaseModel):
|
|||||||
klass: str = Field(alias="class", description="Resource class name")
|
klass: str = Field(alias="class", description="Resource class name")
|
||||||
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
||||||
config: Dict[str, Any] = Field(description="Resource configuration")
|
config: Dict[str, Any] = Field(description="Resource configuration")
|
||||||
data: Dict[str, Any] = Field(description="Resource data, eg: container liquid data")
|
data: Dict[str, Any] = Field(description="Resource data")
|
||||||
extra: Dict[str, Any] = Field(description="Extra data, eg: slot index")
|
extra: Dict[str, Any] = Field(description="Extra data")
|
||||||
|
|
||||||
@field_serializer("parent_uuid")
|
@field_serializer("parent_uuid")
|
||||||
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
||||||
@@ -147,24 +147,20 @@ class ResourceDictInstance(object):
|
|||||||
if not content.get("extra"): # MagicCode
|
if not content.get("extra"): # MagicCode
|
||||||
content["extra"] = {}
|
content["extra"] = {}
|
||||||
if "position" in content:
|
if "position" in content:
|
||||||
pose = content.get("pose", {})
|
pose = content.get("pose",{})
|
||||||
if "position" not in pose:
|
if "position" not in pose :
|
||||||
if "position" in content["position"]:
|
if "position" in content["position"]:
|
||||||
pose["position"] = content["position"]["position"]
|
pose["position"] = content["position"]["position"]
|
||||||
else:
|
else:
|
||||||
pose["position"] = {"x": 0, "y": 0, "z": 0}
|
pose["position"] = {"x": 0, "y": 0, "z": 0}
|
||||||
if "size" not in pose:
|
if "size" not in pose:
|
||||||
pose["size"] = {
|
pose["size"] = {
|
||||||
"width": content["config"].get("size_x", 0),
|
"width": content["config"].get("size_x", 0),
|
||||||
"height": content["config"].get("size_y", 0),
|
"height": content["config"].get("size_y", 0),
|
||||||
"depth": content["config"].get("size_z", 0),
|
"depth": content["config"].get("size_z", 0)
|
||||||
}
|
}
|
||||||
content["pose"] = pose
|
content["pose"] = pose
|
||||||
try:
|
return ResourceDictInstance(ResourceDict.model_validate(content))
|
||||||
res_dict = ResourceDict.model_validate(content)
|
|
||||||
return ResourceDictInstance(res_dict)
|
|
||||||
except ValidationError as err:
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def get_plr_nested_dict(self) -> Dict[str, Any]:
|
def get_plr_nested_dict(self) -> Dict[str, Any]:
|
||||||
"""获取资源实例的嵌套字典表示"""
|
"""获取资源实例的嵌套字典表示"""
|
||||||
@@ -326,7 +322,7 @@ class ResourceTreeSet(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_plr_resources(cls, resources: List["PLRResource"], known_newly_created=False) -> "ResourceTreeSet":
|
def from_plr_resources(cls, resources: List["PLRResource"]) -> "ResourceTreeSet":
|
||||||
"""
|
"""
|
||||||
从plr资源创建ResourceTreeSet
|
从plr资源创建ResourceTreeSet
|
||||||
"""
|
"""
|
||||||
@@ -343,8 +339,6 @@ class ResourceTreeSet(object):
|
|||||||
}
|
}
|
||||||
if source in replace_info:
|
if source in replace_info:
|
||||||
return replace_info[source]
|
return replace_info[source]
|
||||||
elif source is None:
|
|
||||||
return ""
|
|
||||||
else:
|
else:
|
||||||
print("转换pylabrobot的时候,出现未知类型", source)
|
print("转换pylabrobot的时候,出现未知类型", source)
|
||||||
return source
|
return source
|
||||||
@@ -355,8 +349,7 @@ class ResourceTreeSet(object):
|
|||||||
if not uid:
|
if not uid:
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
res.unilabos_uuid = uid
|
res.unilabos_uuid = uid
|
||||||
if not known_newly_created:
|
logger.warning(f"{res}没有uuid,请设置后再传入,默认填充{uid}!\n{traceback.format_exc()}")
|
||||||
logger.warning(f"{res}没有uuid,请设置后再传入,默认填充{uid}!\n{traceback.format_exc()}")
|
|
||||||
|
|
||||||
# 获取unilabos_extra,默认为空字典
|
# 获取unilabos_extra,默认为空字典
|
||||||
extra = getattr(res, "unilabos_extra", {})
|
extra = getattr(res, "unilabos_extra", {})
|
||||||
@@ -455,13 +448,7 @@ class ResourceTreeSet(object):
|
|||||||
from pylabrobot.utils.object_parsing import find_subclass
|
from pylabrobot.utils.object_parsing import find_subclass
|
||||||
|
|
||||||
# 类型映射
|
# 类型映射
|
||||||
TYPE_MAP = {
|
TYPE_MAP = {"plate": "Plate", "well": "Well", "deck": "Deck", "container": "RegularContainer", "tip_spot": "TipSpot"}
|
||||||
"plate": "Plate",
|
|
||||||
"well": "Well",
|
|
||||||
"deck": "Deck",
|
|
||||||
"container": "RegularContainer",
|
|
||||||
"tip_spot": "TipSpot",
|
|
||||||
}
|
|
||||||
|
|
||||||
def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict):
|
def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict):
|
||||||
"""一次遍历收集 name_to_uuid, all_states 和 name_to_extra"""
|
"""一次遍历收集 name_to_uuid, all_states 和 name_to_extra"""
|
||||||
@@ -482,9 +469,9 @@ class ResourceTreeSet(object):
|
|||||||
**res.config,
|
**res.config,
|
||||||
"name": res.name,
|
"name": res.name,
|
||||||
"type": res.config.get("type", plr_type),
|
"type": res.config.get("type", plr_type),
|
||||||
"size_x": res.pose.size.width,
|
"size_x": res.config.get("size_x", 0),
|
||||||
"size_y": res.pose.size.height,
|
"size_y": res.config.get("size_y", 0),
|
||||||
"size_z": res.pose.size.depth,
|
"size_z": res.config.get("size_z", 0),
|
||||||
"location": {
|
"location": {
|
||||||
"x": res.pose.position.x,
|
"x": res.pose.position.x,
|
||||||
"y": res.pose.position.y,
|
"y": res.pose.position.y,
|
||||||
@@ -931,33 +918,6 @@ class DeviceNodeResourceTracker(object):
|
|||||||
|
|
||||||
return self._traverse_and_process(resource, process)
|
return self._traverse_and_process(resource, process)
|
||||||
|
|
||||||
def loop_find_with_uuid(self, resource, target_uuid: str):
|
|
||||||
"""
|
|
||||||
递归遍历资源树,根据 uuid 查找并返回对应的资源
|
|
||||||
|
|
||||||
Args:
|
|
||||||
resource: 资源对象(可以是list、dict或实例)
|
|
||||||
target_uuid: 要查找的uuid
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
找到的资源对象,未找到则返回None
|
|
||||||
"""
|
|
||||||
found_resource = None
|
|
||||||
|
|
||||||
def process(res):
|
|
||||||
nonlocal found_resource
|
|
||||||
if found_resource is not None:
|
|
||||||
return 0 # 已找到,跳过后续处理
|
|
||||||
current_uuid = self._get_resource_attr(res, "uuid", "unilabos_uuid")
|
|
||||||
if current_uuid and current_uuid == target_uuid:
|
|
||||||
found_resource = res
|
|
||||||
logger.trace(f"找到资源UUID: {target_uuid}")
|
|
||||||
return 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
self._traverse_and_process(resource, process)
|
|
||||||
return found_resource
|
|
||||||
|
|
||||||
def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int:
|
def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int:
|
||||||
"""
|
"""
|
||||||
递归遍历资源树,根据 name 设置所有节点的 extra
|
递归遍历资源树,根据 name 设置所有节点的 extra
|
||||||
@@ -1143,7 +1103,7 @@ class DeviceNodeResourceTracker(object):
|
|||||||
for key in keys_to_remove:
|
for key in keys_to_remove:
|
||||||
self.resource2parent_resource.pop(key, None)
|
self.resource2parent_resource.pop(key, None)
|
||||||
|
|
||||||
logger.trace(f"[ResourceTracker] 成功移除资源: {resource}")
|
logger.debug(f"成功移除资源: {resource}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clear_resource(self):
|
def clear_resource(self):
|
||||||
@@ -11,9 +11,10 @@ import traceback
|
|||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
from typing import Type, Any, Dict, Optional, TypeVar, Generic, List
|
||||||
|
|
||||||
from unilabos.resources.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
from unilabos.resources.graphio import nested_dict_to_list, resource_ulab_to_plr
|
||||||
|
from unilabos.ros.nodes.resource_tracker import DeviceNodeResourceTracker, ResourceTreeSet, ResourceDictInstance, \
|
||||||
ResourceTreeInstance
|
ResourceTreeInstance
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger, import_manager
|
||||||
from unilabos.utils.cls_creator import create_instance_from_config
|
from unilabos.utils.cls_creator import create_instance_from_config
|
||||||
|
|
||||||
# 定义泛型类型变量
|
# 定义泛型类型变量
|
||||||
@@ -134,7 +135,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
|
|||||||
Returns:
|
Returns:
|
||||||
处理后的数据
|
处理后的数据
|
||||||
"""
|
"""
|
||||||
from pylabrobot.resources import Resource
|
from pylabrobot.resources import Deck, Resource
|
||||||
|
|
||||||
if states is None:
|
if states is None:
|
||||||
states = {}
|
states = {}
|
||||||
|
|||||||
@@ -1,836 +0,0 @@
|
|||||||
{
|
|
||||||
"nodes": [
|
|
||||||
{
|
|
||||||
"id": "PRCXI",
|
|
||||||
"name": "PRCXI",
|
|
||||||
"type": "device",
|
|
||||||
"class": "liquid_handler.prcxi",
|
|
||||||
"parent": "",
|
|
||||||
"pose": {
|
|
||||||
"size": {
|
|
||||||
"width": 550,
|
|
||||||
"height": 400,
|
|
||||||
"depth": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"axis": "Left",
|
|
||||||
"deck": {
|
|
||||||
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
|
|
||||||
"_resource_child_name": "PRCXI_Deck"
|
|
||||||
},
|
|
||||||
"host": "10.20.30.184",
|
|
||||||
"port": 9999,
|
|
||||||
"debug": false,
|
|
||||||
"setup": false,
|
|
||||||
"is_9320": true,
|
|
||||||
"timeout": 10,
|
|
||||||
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
|
|
||||||
"simulator": false,
|
|
||||||
"channel_num": 2
|
|
||||||
},
|
|
||||||
"data": {
|
|
||||||
"reset_ok": true
|
|
||||||
},
|
|
||||||
"schema": {},
|
|
||||||
"description": "",
|
|
||||||
"model": null,
|
|
||||||
"position": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 700,
|
|
||||||
"z": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "PRCXI_Deck",
|
|
||||||
"name": "PRCXI_Deck",
|
|
||||||
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI",
|
|
||||||
"type": "deck",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300Deck",
|
|
||||||
"size_x": 550,
|
|
||||||
"size_y": 400,
|
|
||||||
"size_z": 17,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "deck",
|
|
||||||
"barcode": null
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T1",
|
|
||||||
"name": "T1",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 5,
|
|
||||||
"y": 301,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T1",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T2",
|
|
||||||
"name": "T2",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 142.5,
|
|
||||||
"y": 301,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T2",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T3",
|
|
||||||
"name": "T3",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 280,
|
|
||||||
"y": 301,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T3",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T4",
|
|
||||||
"name": "T4",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 417.5,
|
|
||||||
"y": 301,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 94,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T4",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T5",
|
|
||||||
"name": "T5",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 5,
|
|
||||||
"y": 205,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T5",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T6",
|
|
||||||
"name": "T6",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 142.5,
|
|
||||||
"y": 205,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T6",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T7",
|
|
||||||
"name": "T7",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 280,
|
|
||||||
"y": 205,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T7",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T8",
|
|
||||||
"name": "T8",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 417.5,
|
|
||||||
"y": 205,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T8",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T9",
|
|
||||||
"name": "T9",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 5,
|
|
||||||
"y": 109,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T9",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T10",
|
|
||||||
"name": "T10",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 142.5,
|
|
||||||
"y": 109,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T10",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T11",
|
|
||||||
"name": "T11",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 280,
|
|
||||||
"y": 109,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T11",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T12",
|
|
||||||
"name": "T12",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 417.5,
|
|
||||||
"y": 109,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T12",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T13",
|
|
||||||
"name": "T13",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 5,
|
|
||||||
"y": 13,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T13",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T14",
|
|
||||||
"name": "T14",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 142.5,
|
|
||||||
"y": 13,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T14",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T15",
|
|
||||||
"name": "T15",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 280,
|
|
||||||
"y": 13,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T15",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "T16",
|
|
||||||
"name": "T16",
|
|
||||||
"children": [],
|
|
||||||
"parent": "PRCXI_Deck",
|
|
||||||
"type": "plate",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 417.5,
|
|
||||||
"y": 13,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300PlateAdapterSite",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 28,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "plate",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
|
|
||||||
"sites": [
|
|
||||||
{
|
|
||||||
"label": "T16",
|
|
||||||
"visible": true,
|
|
||||||
"position": { "x": 0, "y": 0, "z": 0 },
|
|
||||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
|
||||||
"content_type": [
|
|
||||||
"plate",
|
|
||||||
"tip_rack",
|
|
||||||
"plates",
|
|
||||||
"tip_racks",
|
|
||||||
"tube_rack"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "trash",
|
|
||||||
"name": "trash",
|
|
||||||
|
|
||||||
"children": [],
|
|
||||||
"parent": "T16",
|
|
||||||
"type": "trash",
|
|
||||||
"class": "",
|
|
||||||
"position": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0
|
|
||||||
},
|
|
||||||
"config": {
|
|
||||||
"type": "PRCXI9300Trash",
|
|
||||||
"size_x": 127.5,
|
|
||||||
"size_y": 86,
|
|
||||||
"size_z": 10,
|
|
||||||
"rotation": {
|
|
||||||
"x": 0,
|
|
||||||
"y": 0,
|
|
||||||
"z": 0,
|
|
||||||
"type": "Rotation"
|
|
||||||
},
|
|
||||||
"category": "trash",
|
|
||||||
"model": null,
|
|
||||||
"barcode": null,
|
|
||||||
"max_volume": "Infinity",
|
|
||||||
"material_z_thickness": 0,
|
|
||||||
"compute_volume_from_height": null,
|
|
||||||
"compute_height_from_volume": null
|
|
||||||
},
|
|
||||||
"data": {
|
|
||||||
"liquids": [],
|
|
||||||
"pending_liquids": [],
|
|
||||||
"liquid_history": [],
|
|
||||||
"Material": {
|
|
||||||
"uuid": "730067cf07ae43849ddf4034299030e9"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"edges": []
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -24,7 +24,6 @@ class EnvironmentChecker:
|
|||||||
"msgcenterpy": "msgcenterpy",
|
"msgcenterpy": "msgcenterpy",
|
||||||
"opentrons_shared_data": "opentrons_shared_data",
|
"opentrons_shared_data": "opentrons_shared_data",
|
||||||
"typing_extensions": "typing_extensions",
|
"typing_extensions": "typing_extensions",
|
||||||
"crcmod": "crcmod-plus",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# 特殊安装包(需要特殊处理的包)
|
# 特殊安装包(需要特殊处理的包)
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
networkx
|
|
||||||
typing_extensions
|
|
||||||
websockets
|
|
||||||
msgcenterpy>=0.1.5
|
|
||||||
opentrons_shared_data
|
|
||||||
pint
|
|
||||||
fastapi
|
|
||||||
jinja2
|
|
||||||
requests
|
|
||||||
uvicorn
|
|
||||||
pyautogui
|
|
||||||
opcua
|
|
||||||
pyserial
|
|
||||||
pandas
|
|
||||||
crcmod-plus
|
|
||||||
pymodbus
|
|
||||||
matplotlib
|
|
||||||
pylibftdi
|
|
||||||
@@ -1,547 +0,0 @@
|
|||||||
import re
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import networkx as nx
|
|
||||||
from networkx.drawing.nx_agraph import to_agraph
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
from typing import Dict, List, Any, Tuple, Optional
|
|
||||||
|
|
||||||
Json = Dict[str, Any]
|
|
||||||
|
|
||||||
# ---------------- Graph ----------------
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowGraph:
|
|
||||||
"""简单的有向图实现:使用 params 单层参数;inputs 内含连线;支持 node-link 导出"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.nodes: Dict[str, Dict[str, Any]] = {}
|
|
||||||
self.edges: List[Dict[str, Any]] = []
|
|
||||||
|
|
||||||
def add_node(self, node_id: str, **attrs):
|
|
||||||
self.nodes[node_id] = attrs
|
|
||||||
|
|
||||||
def add_edge(self, source: str, target: str, **attrs):
|
|
||||||
# 将 source_port/target_port 映射为服务端期望的 source_handle_key/target_handle_key
|
|
||||||
source_handle_key = attrs.pop("source_port", "") or attrs.pop("source_handle_key", "")
|
|
||||||
target_handle_key = attrs.pop("target_port", "") or attrs.pop("target_handle_key", "")
|
|
||||||
|
|
||||||
edge = {
|
|
||||||
"source": source,
|
|
||||||
"target": target,
|
|
||||||
"source_node_uuid": source,
|
|
||||||
"target_node_uuid": target,
|
|
||||||
"source_handle_key": source_handle_key,
|
|
||||||
"source_handle_io": attrs.pop("source_handle_io", "source"),
|
|
||||||
"target_handle_key": target_handle_key,
|
|
||||||
"target_handle_io": attrs.pop("target_handle_io", "target"),
|
|
||||||
**attrs,
|
|
||||||
}
|
|
||||||
self.edges.append(edge)
|
|
||||||
|
|
||||||
def _materialize_wiring_into_inputs(
|
|
||||||
self,
|
|
||||||
obj: Any,
|
|
||||||
inputs: Dict[str, Any],
|
|
||||||
variable_sources: Dict[str, Dict[str, Any]],
|
|
||||||
target_node_id: str,
|
|
||||||
base_path: List[str],
|
|
||||||
):
|
|
||||||
has_var = False
|
|
||||||
|
|
||||||
def walk(node: Any, path: List[str]):
|
|
||||||
nonlocal has_var
|
|
||||||
if isinstance(node, dict):
|
|
||||||
if "__var__" in node:
|
|
||||||
has_var = True
|
|
||||||
varname = node["__var__"]
|
|
||||||
placeholder = f"${{{varname}}}"
|
|
||||||
src = variable_sources.get(varname)
|
|
||||||
if src:
|
|
||||||
key = ".".join(path) # e.g. "params.foo.bar.0"
|
|
||||||
inputs[key] = {"node": src["node_id"], "output": src.get("output_name", "result")}
|
|
||||||
self.add_edge(
|
|
||||||
str(src["node_id"]),
|
|
||||||
target_node_id,
|
|
||||||
source_handle_io=src.get("output_name", "result"),
|
|
||||||
target_handle_io=key,
|
|
||||||
)
|
|
||||||
return placeholder
|
|
||||||
return {k: walk(v, path + [k]) for k, v in node.items()}
|
|
||||||
if isinstance(node, list):
|
|
||||||
return [walk(v, path + [str(i)]) for i, v in enumerate(node)]
|
|
||||||
return node
|
|
||||||
|
|
||||||
replaced = walk(obj, base_path[:])
|
|
||||||
return replaced, has_var
|
|
||||||
|
|
||||||
def add_workflow_node(
|
|
||||||
self,
|
|
||||||
node_id: int,
|
|
||||||
*,
|
|
||||||
device_key: Optional[str] = None, # 实例名,如 "ser"
|
|
||||||
resource_name: Optional[str] = None, # registry key(原 device_class)
|
|
||||||
module: Optional[str] = None,
|
|
||||||
template_name: Optional[str] = None, # 动作/模板名(原 action_key)
|
|
||||||
params: Dict[str, Any],
|
|
||||||
variable_sources: Dict[str, Dict[str, Any]],
|
|
||||||
add_ready_if_no_vars: bool = True,
|
|
||||||
prev_node_id: Optional[int] = None,
|
|
||||||
**extra_attrs,
|
|
||||||
) -> None:
|
|
||||||
"""添加工作流节点:params 单层;自动变量连线与 ready 串联;支持附加属性"""
|
|
||||||
node_id_str = str(node_id)
|
|
||||||
inputs: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
params, has_var = self._materialize_wiring_into_inputs(
|
|
||||||
params, inputs, variable_sources, node_id_str, base_path=["params"]
|
|
||||||
)
|
|
||||||
|
|
||||||
if add_ready_if_no_vars and not has_var:
|
|
||||||
last_id = str(prev_node_id) if prev_node_id is not None else "-1"
|
|
||||||
inputs["ready"] = {"node": int(last_id), "output": "ready"}
|
|
||||||
self.add_edge(last_id, node_id_str, source_handle_io="ready", target_handle_io="ready")
|
|
||||||
|
|
||||||
node_obj = {
|
|
||||||
"device_key": device_key,
|
|
||||||
"resource_name": resource_name, # ✅ 新名字
|
|
||||||
"module": module,
|
|
||||||
"template_name": template_name, # ✅ 新名字
|
|
||||||
"params": params,
|
|
||||||
"inputs": inputs,
|
|
||||||
}
|
|
||||||
node_obj.update(extra_attrs or {})
|
|
||||||
self.add_node(node_id_str, parameters=node_obj)
|
|
||||||
|
|
||||||
# 顺序工作流导出(连线在 inputs,不返回 edges)
|
|
||||||
def to_dict(self) -> List[Dict[str, Any]]:
|
|
||||||
result = []
|
|
||||||
for node_id, attrs in self.nodes.items():
|
|
||||||
node = {"uuid": node_id}
|
|
||||||
params = dict(attrs.get("parameters", {}) or {})
|
|
||||||
flat = {k: v for k, v in attrs.items() if k != "parameters"}
|
|
||||||
flat.update(params)
|
|
||||||
node.update(flat)
|
|
||||||
result.append(node)
|
|
||||||
return sorted(result, key=lambda n: int(n["uuid"]) if str(n["uuid"]).isdigit() else n["uuid"])
|
|
||||||
|
|
||||||
# node-link 导出(含 edges)
|
|
||||||
def to_node_link_dict(self) -> Dict[str, Any]:
|
|
||||||
nodes_list = []
|
|
||||||
for node_id, attrs in self.nodes.items():
|
|
||||||
node_attrs = attrs.copy()
|
|
||||||
params = node_attrs.pop("parameters", {}) or {}
|
|
||||||
node_attrs.update(params)
|
|
||||||
nodes_list.append({"uuid": node_id, **node_attrs})
|
|
||||||
return {
|
|
||||||
"directed": True,
|
|
||||||
"multigraph": False,
|
|
||||||
"graph": {},
|
|
||||||
"nodes": nodes_list,
|
|
||||||
"edges": self.edges,
|
|
||||||
"links": self.edges,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def refactor_data(
|
|
||||||
data: List[Dict[str, Any]],
|
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
"""统一的数据重构函数,根据操作类型自动选择模板
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始步骤数据列表
|
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
|
||||||
"""
|
|
||||||
refactored_data = []
|
|
||||||
|
|
||||||
# 定义操作映射,包含生物实验和有机化学的所有操作
|
|
||||||
OPERATION_MAPPING = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "transfer_liquid",
|
|
||||||
"transfer": "transfer",
|
|
||||||
"incubation": "incubation",
|
|
||||||
"move_labware": "move_labware",
|
|
||||||
"oscillation": "oscillation",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "HeatChillProtocol",
|
|
||||||
"StopHeatChill": "HeatChillStopProtocol",
|
|
||||||
"StartHeatChill": "HeatChillStartProtocol",
|
|
||||||
"HeatChill": "HeatChillProtocol",
|
|
||||||
"Dissolve": "DissolveProtocol",
|
|
||||||
"Transfer": "TransferProtocol",
|
|
||||||
"Evaporate": "EvaporateProtocol",
|
|
||||||
"Recrystallize": "RecrystallizeProtocol",
|
|
||||||
"Filter": "FilterProtocol",
|
|
||||||
"Dry": "DryProtocol",
|
|
||||||
"Add": "AddProtocol",
|
|
||||||
}
|
|
||||||
|
|
||||||
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
for step in data:
|
|
||||||
operation = step.get("action")
|
|
||||||
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 处理重复操作
|
|
||||||
if operation == "Repeat":
|
|
||||||
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
|
||||||
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
|
||||||
for i in range(int(times)):
|
|
||||||
sub_data = refactor_data(sub_steps, action_resource_mapping)
|
|
||||||
refactored_data.extend(sub_data)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 获取模板名称
|
|
||||||
template_name = OPERATION_MAPPING.get(operation)
|
|
||||||
if not template_name:
|
|
||||||
# 自动推断模板类型
|
|
||||||
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
|
||||||
template_name = f"biomek-{operation}"
|
|
||||||
else:
|
|
||||||
template_name = f"{operation}Protocol"
|
|
||||||
|
|
||||||
# 获取 resource_name
|
|
||||||
resource_name = f"device.{operation.lower()}"
|
|
||||||
if action_resource_mapping:
|
|
||||||
resource_name = action_resource_mapping.get(operation, resource_name)
|
|
||||||
|
|
||||||
# 获取步骤编号,生成 name 字段
|
|
||||||
step_number = step.get("step_number")
|
|
||||||
name = f"Step {step_number}" if step_number is not None else None
|
|
||||||
|
|
||||||
# 创建步骤数据
|
|
||||||
step_data = {
|
|
||||||
"template_name": template_name,
|
|
||||||
"resource_name": resource_name,
|
|
||||||
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
|
||||||
"lab_node_type": "Device",
|
|
||||||
"param": step.get("parameters", step.get("action_args", {})),
|
|
||||||
"footer": f"{template_name}-{resource_name}",
|
|
||||||
}
|
|
||||||
if name:
|
|
||||||
step_data["name"] = name
|
|
||||||
refactored_data.append(step_data)
|
|
||||||
|
|
||||||
return refactored_data
|
|
||||||
|
|
||||||
|
|
||||||
def build_protocol_graph(
|
|
||||||
labware_info: List[Dict[str, Any]],
|
|
||||||
protocol_steps: List[Dict[str, Any]],
|
|
||||||
workstation_name: str,
|
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
|
||||||
) -> WorkflowGraph:
|
|
||||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
|
||||||
|
|
||||||
Args:
|
|
||||||
labware_info: labware 信息字典
|
|
||||||
protocol_steps: 协议步骤列表
|
|
||||||
workstation_name: 工作站名称
|
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
|
||||||
"""
|
|
||||||
G = WorkflowGraph()
|
|
||||||
resource_last_writer = {}
|
|
||||||
|
|
||||||
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
|
||||||
# 有机化学&移液站协议图构建
|
|
||||||
WORKSTATION_ID = workstation_name
|
|
||||||
|
|
||||||
# 为所有labware创建资源节点
|
|
||||||
res_index = 0
|
|
||||||
for labware_id, item in labware_info.items():
|
|
||||||
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# 判断节点类型
|
|
||||||
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
|
|
||||||
lab_node_type = "Labware"
|
|
||||||
description = f"Prepare Labware: {labware_id}"
|
|
||||||
liquid_type = []
|
|
||||||
liquid_volume = []
|
|
||||||
elif item.get("type") == "hardware" or "reactor" in str(labware_id).lower():
|
|
||||||
if "reactor" not in str(labware_id).lower():
|
|
||||||
continue
|
|
||||||
lab_node_type = "Sample"
|
|
||||||
description = f"Prepare Reactor: {labware_id}"
|
|
||||||
liquid_type = []
|
|
||||||
liquid_volume = []
|
|
||||||
else:
|
|
||||||
lab_node_type = "Reagent"
|
|
||||||
description = f"Add Reagent to Flask: {labware_id}"
|
|
||||||
liquid_type = [labware_id]
|
|
||||||
liquid_volume = [1e5]
|
|
||||||
|
|
||||||
res_index += 1
|
|
||||||
G.add_node(
|
|
||||||
node_id,
|
|
||||||
template_name="create_resource",
|
|
||||||
resource_name="host_node",
|
|
||||||
name=f"Res {res_index}",
|
|
||||||
description=description,
|
|
||||||
lab_node_type=lab_node_type,
|
|
||||||
footer="create_resource-host_node",
|
|
||||||
param={
|
|
||||||
"res_id": labware_id,
|
|
||||||
"device_id": WORKSTATION_ID,
|
|
||||||
"class_name": "container",
|
|
||||||
"parent": WORKSTATION_ID,
|
|
||||||
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
|
||||||
"liquid_input_slot": [-1],
|
|
||||||
"liquid_type": liquid_type,
|
|
||||||
"liquid_volume": liquid_volume,
|
|
||||||
"slot_on_deck": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
resource_last_writer[labware_id] = f"{node_id}:labware"
|
|
||||||
|
|
||||||
last_control_node_id = None
|
|
||||||
|
|
||||||
# 处理协议步骤
|
|
||||||
for step in protocol_steps:
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
G.add_node(node_id, **step)
|
|
||||||
|
|
||||||
# 控制流
|
|
||||||
if last_control_node_id is not None:
|
|
||||||
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
|
||||||
last_control_node_id = node_id
|
|
||||||
|
|
||||||
# 物料流
|
|
||||||
params = step.get("param", {})
|
|
||||||
input_resources_possible_names = [
|
|
||||||
"vessel",
|
|
||||||
"to_vessel",
|
|
||||||
"from_vessel",
|
|
||||||
"reagent",
|
|
||||||
"solvent",
|
|
||||||
"compound",
|
|
||||||
"sources",
|
|
||||||
"targets",
|
|
||||||
]
|
|
||||||
|
|
||||||
for target_port in input_resources_possible_names:
|
|
||||||
resource_name = params.get(target_port)
|
|
||||||
if resource_name and resource_name in resource_last_writer:
|
|
||||||
source_node, source_port = resource_last_writer[resource_name].split(":")
|
|
||||||
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
|
||||||
|
|
||||||
output_resources = {
|
|
||||||
"vessel_out": params.get("vessel"),
|
|
||||||
"from_vessel_out": params.get("from_vessel"),
|
|
||||||
"to_vessel_out": params.get("to_vessel"),
|
|
||||||
"filtrate_out": params.get("filtrate_vessel"),
|
|
||||||
"reagent": params.get("reagent"),
|
|
||||||
"solvent": params.get("solvent"),
|
|
||||||
"compound": params.get("compound"),
|
|
||||||
"sources_out": params.get("sources"),
|
|
||||||
"targets_out": params.get("targets"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for source_port, resource_name in output_resources.items():
|
|
||||||
if resource_name:
|
|
||||||
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
|
||||||
|
|
||||||
return G
|
|
||||||
|
|
||||||
|
|
||||||
def draw_protocol_graph(protocol_graph: WorkflowGraph, output_path: str):
|
|
||||||
"""
|
|
||||||
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
G = nx.DiGraph()
|
|
||||||
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
|
|
||||||
G.add_node(node_id, label=label, **attrs)
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
G.add_edge(edge["source"], edge["target"])
|
|
||||||
|
|
||||||
plt.figure(figsize=(20, 15))
|
|
||||||
try:
|
|
||||||
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
|
||||||
except Exception:
|
|
||||||
pos = nx.shell_layout(G) # Fallback layout
|
|
||||||
|
|
||||||
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
|
||||||
nx.draw(
|
|
||||||
G,
|
|
||||||
pos,
|
|
||||||
with_labels=False,
|
|
||||||
node_size=2500,
|
|
||||||
node_color="skyblue",
|
|
||||||
node_shape="o",
|
|
||||||
edge_color="gray",
|
|
||||||
width=1.5,
|
|
||||||
arrowsize=15,
|
|
||||||
)
|
|
||||||
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
|
||||||
|
|
||||||
plt.title("Chemical Protocol Workflow Graph", size=15)
|
|
||||||
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
|
||||||
plt.close()
|
|
||||||
print(f" - Visualization saved to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
COMPASS = {"n", "e", "s", "w", "ne", "nw", "se", "sw", "c"}
|
|
||||||
|
|
||||||
|
|
||||||
def _is_compass(port: str) -> bool:
|
|
||||||
return isinstance(port, str) and port.lower() in COMPASS
|
|
||||||
|
|
||||||
|
|
||||||
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
|
||||||
"""
|
|
||||||
使用 Graphviz 端口语法绘制协议工作流图。
|
|
||||||
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
|
||||||
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
|
||||||
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# 1) 先用 networkx 搭建有向图,保留端口属性
|
|
||||||
G = nx.DiGraph()
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template_name", node_id[:8]))
|
|
||||||
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
|
||||||
G.add_node(node_id, _core_label=str(label), **{k: v for k, v in attrs.items() if k not in ("label",)})
|
|
||||||
|
|
||||||
edges_data = []
|
|
||||||
in_ports_by_node = {} # 收集命名输入端口
|
|
||||||
out_ports_by_node = {} # 收集命名输出端口
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
u = edge["source"]
|
|
||||||
v = edge["target"]
|
|
||||||
sp = edge.get("source_handle_key") or edge.get("source_port")
|
|
||||||
tp = edge.get("target_handle_key") or edge.get("target_port")
|
|
||||||
|
|
||||||
# 记录到图里(保留原始端口信息)
|
|
||||||
G.add_edge(u, v, source_handle_key=sp, target_handle_key=tp)
|
|
||||||
edges_data.append((u, v, sp, tp))
|
|
||||||
|
|
||||||
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
|
||||||
if sp and not _is_compass(sp):
|
|
||||||
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
|
||||||
if tp and not _is_compass(tp):
|
|
||||||
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
|
||||||
|
|
||||||
# 2) 转为 AGraph,使用 Graphviz 渲染
|
|
||||||
A = to_agraph(G)
|
|
||||||
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
|
||||||
A.node_attr.update(
|
|
||||||
shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica"
|
|
||||||
)
|
|
||||||
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
|
||||||
|
|
||||||
# 3) 为需要命名端口的节点设置 record 形状与 label
|
|
||||||
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
|
||||||
for n in A.nodes():
|
|
||||||
node = A.get_node(n)
|
|
||||||
core = G.nodes[n].get("_core_label", n)
|
|
||||||
|
|
||||||
in_ports = sorted(in_ports_by_node.get(n, []))
|
|
||||||
out_ports = sorted(out_ports_by_node.get(n, []))
|
|
||||||
|
|
||||||
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
|
||||||
if in_ports or out_ports:
|
|
||||||
|
|
||||||
def port_fields(ports):
|
|
||||||
if not ports:
|
|
||||||
return " " # 必须留一个空槽占位
|
|
||||||
# 每个端口一个小格子,<p> name
|
|
||||||
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
|
||||||
|
|
||||||
left = port_fields(in_ports)
|
|
||||||
right = port_fields(out_ports)
|
|
||||||
|
|
||||||
# 三栏:左(入) | 中(节点名) | 右(出)
|
|
||||||
record_label = f"{{ {left} | {core} | {right} }}"
|
|
||||||
node.attr.update(shape="record", label=record_label)
|
|
||||||
else:
|
|
||||||
# 没有命名端口:普通盒子,显示核心标签
|
|
||||||
node.attr.update(label=str(core))
|
|
||||||
|
|
||||||
# 4) 给边设置 headport / tailport
|
|
||||||
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
|
||||||
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
|
||||||
for u, v, sp, tp in edges_data:
|
|
||||||
e = A.get_edge(u, v)
|
|
||||||
|
|
||||||
# Graphviz 属性:tail 是源,head 是目标
|
|
||||||
if sp:
|
|
||||||
if _is_compass(sp):
|
|
||||||
e.attr["tailport"] = sp.lower()
|
|
||||||
else:
|
|
||||||
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
|
||||||
e.attr["tailport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(sp))
|
|
||||||
|
|
||||||
if tp:
|
|
||||||
if _is_compass(tp):
|
|
||||||
e.attr["headport"] = tp.lower()
|
|
||||||
else:
|
|
||||||
e.attr["headport"] = re.sub(r"[^A-Za-z0-9_:.|-]", "_", str(tp))
|
|
||||||
|
|
||||||
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
|
||||||
# e.attr["arrowhead"] = "vee"
|
|
||||||
|
|
||||||
# 5) 输出
|
|
||||||
A.draw(output_path, prog="dot")
|
|
||||||
print(f" - Port-aware workflow rendered to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------- Registry Adapter ----------------
|
|
||||||
|
|
||||||
|
|
||||||
class RegistryAdapter:
|
|
||||||
"""根据 module 的类名(冒号右侧)反查 registry 的 resource_name(原 device_class),并抽取参数顺序"""
|
|
||||||
|
|
||||||
def __init__(self, device_registry: Dict[str, Any]):
|
|
||||||
self.device_registry = device_registry or {}
|
|
||||||
self.module_class_to_resource = self._build_module_class_index()
|
|
||||||
|
|
||||||
def _build_module_class_index(self) -> Dict[str, str]:
|
|
||||||
idx = {}
|
|
||||||
for resource_name, info in self.device_registry.items():
|
|
||||||
module = info.get("module")
|
|
||||||
if isinstance(module, str) and ":" in module:
|
|
||||||
cls = module.split(":")[-1]
|
|
||||||
idx[cls] = resource_name
|
|
||||||
idx[cls.lower()] = resource_name
|
|
||||||
return idx
|
|
||||||
|
|
||||||
def resolve_resource_by_classname(self, class_name: str) -> Optional[str]:
|
|
||||||
if not class_name:
|
|
||||||
return None
|
|
||||||
return self.module_class_to_resource.get(class_name) or self.module_class_to_resource.get(class_name.lower())
|
|
||||||
|
|
||||||
def get_device_module(self, resource_name: Optional[str]) -> Optional[str]:
|
|
||||||
if not resource_name:
|
|
||||||
return None
|
|
||||||
return self.device_registry.get(resource_name, {}).get("module")
|
|
||||||
|
|
||||||
def get_actions(self, resource_name: Optional[str]) -> Dict[str, Any]:
|
|
||||||
if not resource_name:
|
|
||||||
return {}
|
|
||||||
return (self.device_registry.get(resource_name, {}).get("class", {}).get("action_value_mappings", {})) or {}
|
|
||||||
|
|
||||||
def get_action_schema(self, resource_name: Optional[str], template_name: str) -> Optional[Json]:
|
|
||||||
return (self.get_actions(resource_name).get(template_name) or {}).get("schema")
|
|
||||||
|
|
||||||
def get_action_goal_default(self, resource_name: Optional[str], template_name: str) -> Json:
|
|
||||||
return (self.get_actions(resource_name).get(template_name) or {}).get("goal_default", {}) or {}
|
|
||||||
|
|
||||||
def get_action_input_keys(self, resource_name: Optional[str], template_name: str) -> List[str]:
|
|
||||||
schema = self.get_action_schema(resource_name, template_name) or {}
|
|
||||||
goal = (schema.get("properties") or {}).get("goal") or {}
|
|
||||||
props = goal.get("properties") or {}
|
|
||||||
required = goal.get("required") or []
|
|
||||||
return list(dict.fromkeys(required + list(props.keys())))
|
|
||||||
@@ -1,356 +0,0 @@
|
|||||||
"""
|
|
||||||
JSON 工作流转换模块
|
|
||||||
|
|
||||||
提供从多种 JSON 格式转换为统一工作流格式的功能。
|
|
||||||
支持的格式:
|
|
||||||
1. workflow/reagent 格式
|
|
||||||
2. steps_info/labware_info 格式
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
from os import PathLike
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
|
||||||
|
|
||||||
from unilabos.workflow.common import WorkflowGraph, build_protocol_graph
|
|
||||||
from unilabos.registry.registry import lab_registry
|
|
||||||
|
|
||||||
|
|
||||||
def get_action_handles(resource_name: str, template_name: str) -> Dict[str, List[str]]:
|
|
||||||
"""
|
|
||||||
从 registry 获取指定设备和动作的 handles 配置
|
|
||||||
|
|
||||||
Args:
|
|
||||||
resource_name: 设备资源名称,如 "liquid_handler.prcxi"
|
|
||||||
template_name: 动作模板名称,如 "transfer_liquid"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
包含 source 和 target handler_keys 的字典:
|
|
||||||
{"source": ["sources_out", "targets_out", ...], "target": ["sources", "targets", ...]}
|
|
||||||
"""
|
|
||||||
result = {"source": [], "target": []}
|
|
||||||
|
|
||||||
device_info = lab_registry.device_type_registry.get(resource_name, {})
|
|
||||||
if not device_info:
|
|
||||||
return result
|
|
||||||
|
|
||||||
action_mappings = device_info.get("class", {}).get("action_value_mappings", {})
|
|
||||||
action_config = action_mappings.get(template_name, {})
|
|
||||||
handles = action_config.get("handles", {})
|
|
||||||
|
|
||||||
if isinstance(handles, dict):
|
|
||||||
# 处理 input handles (作为 target)
|
|
||||||
for handle in handles.get("input", []):
|
|
||||||
handler_key = handle.get("handler_key", "")
|
|
||||||
if handler_key:
|
|
||||||
result["source"].append(handler_key)
|
|
||||||
# 处理 output handles (作为 source)
|
|
||||||
for handle in handles.get("output", []):
|
|
||||||
handler_key = handle.get("handler_key", "")
|
|
||||||
if handler_key:
|
|
||||||
result["target"].append(handler_key)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def validate_workflow_handles(graph: WorkflowGraph) -> Tuple[bool, List[str]]:
|
|
||||||
"""
|
|
||||||
校验工作流图中所有边的句柄配置是否正确
|
|
||||||
|
|
||||||
Args:
|
|
||||||
graph: 工作流图对象
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(is_valid, errors): 是否有效,错误信息列表
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
nodes = graph.nodes
|
|
||||||
|
|
||||||
for edge in graph.edges:
|
|
||||||
left_uuid = edge.get("source")
|
|
||||||
right_uuid = edge.get("target")
|
|
||||||
# target_handle_key是target, right的输入节点(入节点)
|
|
||||||
# source_handle_key是source, left的输出节点(出节点)
|
|
||||||
right_source_conn_key = edge.get("target_handle_key", "")
|
|
||||||
left_target_conn_key = edge.get("source_handle_key", "")
|
|
||||||
|
|
||||||
# 获取源节点和目标节点信息
|
|
||||||
left_node = nodes.get(left_uuid, {})
|
|
||||||
right_node = nodes.get(right_uuid, {})
|
|
||||||
|
|
||||||
left_res_name = left_node.get("resource_name", "")
|
|
||||||
left_template_name = left_node.get("template_name", "")
|
|
||||||
right_res_name = right_node.get("resource_name", "")
|
|
||||||
right_template_name = right_node.get("template_name", "")
|
|
||||||
|
|
||||||
# 获取源节点的 output handles
|
|
||||||
left_node_handles = get_action_handles(left_res_name, left_template_name)
|
|
||||||
target_valid_keys = left_node_handles.get("target", [])
|
|
||||||
target_valid_keys.append("ready")
|
|
||||||
|
|
||||||
# 获取目标节点的 input handles
|
|
||||||
right_node_handles = get_action_handles(right_res_name, right_template_name)
|
|
||||||
source_valid_keys = right_node_handles.get("source", [])
|
|
||||||
source_valid_keys.append("ready")
|
|
||||||
|
|
||||||
# 如果节点配置了 output handles,则 source_port 必须有效
|
|
||||||
if not right_source_conn_key:
|
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
|
||||||
errors.append(f"源节点 '{node_name}' 的 source_handle_key 为空," f"应设置为: {source_valid_keys}")
|
|
||||||
elif right_source_conn_key not in source_valid_keys:
|
|
||||||
node_name = left_node.get("name", left_uuid[:8])
|
|
||||||
errors.append(
|
|
||||||
f"源节点 '{node_name}' 的 source 端点 '{right_source_conn_key}' 不存在," f"支持的端点: {source_valid_keys}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 如果节点配置了 input handles,则 target_port 必须有效
|
|
||||||
if not left_target_conn_key:
|
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
|
||||||
errors.append(f"目标节点 '{node_name}' 的 target_handle_key 为空," f"应设置为: {target_valid_keys}")
|
|
||||||
elif left_target_conn_key not in target_valid_keys:
|
|
||||||
node_name = right_node.get("name", right_uuid[:8])
|
|
||||||
errors.append(
|
|
||||||
f"目标节点 '{node_name}' 的 target 端点 '{left_target_conn_key}' 不存在,"
|
|
||||||
f"支持的端点: {target_valid_keys}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return len(errors) == 0, errors
|
|
||||||
|
|
||||||
|
|
||||||
# action 到 resource_name 的映射
|
|
||||||
ACTION_RESOURCE_MAPPING: Dict[str, str] = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "liquid_handler.prcxi",
|
|
||||||
"transfer": "liquid_handler.prcxi",
|
|
||||||
"incubation": "incubator.prcxi",
|
|
||||||
"move_labware": "labware_mover.prcxi",
|
|
||||||
"oscillation": "shaker.prcxi",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "heatchill.chemputer",
|
|
||||||
"StopHeatChill": "heatchill.chemputer",
|
|
||||||
"StartHeatChill": "heatchill.chemputer",
|
|
||||||
"HeatChill": "heatchill.chemputer",
|
|
||||||
"Dissolve": "stirrer.chemputer",
|
|
||||||
"Transfer": "liquid_handler.chemputer",
|
|
||||||
"Evaporate": "rotavap.chemputer",
|
|
||||||
"Recrystallize": "reactor.chemputer",
|
|
||||||
"Filter": "filter.chemputer",
|
|
||||||
"Dry": "dryer.chemputer",
|
|
||||||
"Add": "liquid_handler.chemputer",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_steps(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将不同格式的步骤数据规范化为统一格式
|
|
||||||
|
|
||||||
支持的输入格式:
|
|
||||||
- action + parameters
|
|
||||||
- action + action_args
|
|
||||||
- operation + parameters
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始步骤数据列表
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
规范化后的步骤列表,格式为 [{"action": str, "parameters": dict, "description": str?, "step_number": int?}, ...]
|
|
||||||
"""
|
|
||||||
normalized = []
|
|
||||||
for idx, step in enumerate(data):
|
|
||||||
# 获取动作名称(支持 action 或 operation 字段)
|
|
||||||
action = step.get("action") or step.get("operation")
|
|
||||||
if not action:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 获取参数(支持 parameters 或 action_args 字段)
|
|
||||||
raw_params = step.get("parameters") or step.get("action_args") or {}
|
|
||||||
params = dict(raw_params)
|
|
||||||
|
|
||||||
# 规范化 source/target -> sources/targets
|
|
||||||
if "source" in raw_params and "sources" not in raw_params:
|
|
||||||
params["sources"] = raw_params["source"]
|
|
||||||
if "target" in raw_params and "targets" not in raw_params:
|
|
||||||
params["targets"] = raw_params["target"]
|
|
||||||
|
|
||||||
# 获取描述(支持 description 或 purpose 字段)
|
|
||||||
description = step.get("description") or step.get("purpose")
|
|
||||||
|
|
||||||
# 获取步骤编号(优先使用原始数据中的 step_number,否则使用索引+1)
|
|
||||||
step_number = step.get("step_number", idx + 1)
|
|
||||||
|
|
||||||
step_dict = {"action": action, "parameters": params, "step_number": step_number}
|
|
||||||
if description:
|
|
||||||
step_dict["description"] = description
|
|
||||||
|
|
||||||
normalized.append(step_dict)
|
|
||||||
|
|
||||||
return normalized
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_labware(data: List[Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将不同格式的 labware 数据规范化为统一的字典格式
|
|
||||||
|
|
||||||
支持的输入格式:
|
|
||||||
- reagent_name + material_name + positions
|
|
||||||
- name + labware + slot
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: 原始 labware 数据列表
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
规范化后的 labware 字典,格式为 {name: {"slot": int, "labware": str, "well": list, "type": str, "role": str, "name": str}, ...}
|
|
||||||
"""
|
|
||||||
labware = {}
|
|
||||||
for item in data:
|
|
||||||
# 获取 key 名称(优先使用 reagent_name,其次是 material_name 或 name)
|
|
||||||
reagent_name = item.get("reagent_name")
|
|
||||||
key = reagent_name or item.get("material_name") or item.get("name")
|
|
||||||
if not key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
key = str(key)
|
|
||||||
|
|
||||||
# 处理重复 key,自动添加后缀
|
|
||||||
idx = 1
|
|
||||||
original_key = key
|
|
||||||
while key in labware:
|
|
||||||
idx += 1
|
|
||||||
key = f"{original_key}_{idx}"
|
|
||||||
|
|
||||||
labware[key] = {
|
|
||||||
"slot": item.get("positions") or item.get("slot"),
|
|
||||||
"labware": item.get("material_name") or item.get("labware"),
|
|
||||||
"well": item.get("well", []),
|
|
||||||
"type": item.get("type", "reagent"),
|
|
||||||
"role": item.get("role", ""),
|
|
||||||
"name": key,
|
|
||||||
}
|
|
||||||
|
|
||||||
return labware
|
|
||||||
|
|
||||||
|
|
||||||
def convert_from_json(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
validate: bool = True,
|
|
||||||
) -> WorkflowGraph:
|
|
||||||
"""
|
|
||||||
从 JSON 数据或文件转换为 WorkflowGraph
|
|
||||||
|
|
||||||
支持的 JSON 格式:
|
|
||||||
1. {"workflow": [...], "reagent": {...}} - 直接格式
|
|
||||||
2. {"steps_info": [...], "labware_info": [...]} - 需要规范化的格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
validate: 是否校验句柄配置,默认 True
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
WorkflowGraph: 构建好的工作流图
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: 不支持的 JSON 格式 或 句柄校验失败
|
|
||||||
FileNotFoundError: 文件不存在
|
|
||||||
json.JSONDecodeError: JSON 解析失败
|
|
||||||
"""
|
|
||||||
# 处理输入数据
|
|
||||||
if isinstance(data, (str, PathLike)):
|
|
||||||
path = Path(data)
|
|
||||||
if path.exists():
|
|
||||||
with path.open("r", encoding="utf-8") as fp:
|
|
||||||
json_data = json.load(fp)
|
|
||||||
elif isinstance(data, str):
|
|
||||||
# 尝试作为 JSON 字符串解析
|
|
||||||
json_data = json.loads(data)
|
|
||||||
else:
|
|
||||||
raise FileNotFoundError(f"文件不存在: {data}")
|
|
||||||
elif isinstance(data, dict):
|
|
||||||
json_data = data
|
|
||||||
else:
|
|
||||||
raise TypeError(f"不支持的数据类型: {type(data)}")
|
|
||||||
|
|
||||||
# 根据格式解析数据
|
|
||||||
if "workflow" in json_data and "reagent" in json_data:
|
|
||||||
# 格式1: workflow/reagent(已经是规范格式)
|
|
||||||
protocol_steps = json_data["workflow"]
|
|
||||||
labware_info = json_data["reagent"]
|
|
||||||
elif "steps_info" in json_data and "labware_info" in json_data:
|
|
||||||
# 格式2: steps_info/labware_info(需要规范化)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps_info"])
|
|
||||||
labware_info = normalize_labware(json_data["labware_info"])
|
|
||||||
elif "steps" in json_data and "labware" in json_data:
|
|
||||||
# 格式3: steps/labware(另一种常见格式)
|
|
||||||
protocol_steps = normalize_steps(json_data["steps"])
|
|
||||||
if isinstance(json_data["labware"], list):
|
|
||||||
labware_info = normalize_labware(json_data["labware"])
|
|
||||||
else:
|
|
||||||
labware_info = json_data["labware"]
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
"不支持的 JSON 格式。支持的格式:\n"
|
|
||||||
"1. {'workflow': [...], 'reagent': {...}}\n"
|
|
||||||
"2. {'steps_info': [...], 'labware_info': [...]}\n"
|
|
||||||
"3. {'steps': [...], 'labware': [...]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# 构建工作流图
|
|
||||||
graph = build_protocol_graph(
|
|
||||||
labware_info=labware_info,
|
|
||||||
protocol_steps=protocol_steps,
|
|
||||||
workstation_name=workstation_name,
|
|
||||||
action_resource_mapping=ACTION_RESOURCE_MAPPING,
|
|
||||||
)
|
|
||||||
|
|
||||||
# 校验句柄配置
|
|
||||||
if validate:
|
|
||||||
is_valid, errors = validate_workflow_handles(graph)
|
|
||||||
if not is_valid:
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
for error in errors:
|
|
||||||
warnings.warn(f"句柄校验警告: {error}")
|
|
||||||
|
|
||||||
return graph
|
|
||||||
|
|
||||||
|
|
||||||
def convert_json_to_node_link(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
将 JSON 数据转换为 node-link 格式的字典
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: node-link 格式的工作流数据
|
|
||||||
"""
|
|
||||||
graph = convert_from_json(data, workstation_name)
|
|
||||||
return graph.to_node_link_dict()
|
|
||||||
|
|
||||||
|
|
||||||
def convert_json_to_workflow_list(
|
|
||||||
data: Union[str, PathLike, Dict[str, Any]],
|
|
||||||
workstation_name: str = "PRCXi",
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
将 JSON 数据转换为工作流列表格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: JSON 文件路径、字典数据、或 JSON 字符串
|
|
||||||
workstation_name: 工作站名称,默认 "PRCXi"
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List: 工作流节点列表
|
|
||||||
"""
|
|
||||||
graph = convert_from_json(data, workstation_name)
|
|
||||||
return graph.to_dict()
|
|
||||||
|
|
||||||
|
|
||||||
# 为了向后兼容,保留下划线前缀的别名
|
|
||||||
_normalize_steps = normalize_steps
|
|
||||||
_normalize_labware = normalize_labware
|
|
||||||
@@ -1,241 +0,0 @@
|
|||||||
import ast
|
|
||||||
import json
|
|
||||||
from typing import Dict, List, Any, Tuple, Optional
|
|
||||||
|
|
||||||
from .common import WorkflowGraph, RegistryAdapter
|
|
||||||
|
|
||||||
Json = Dict[str, Any]
|
|
||||||
|
|
||||||
# ---------------- Converter ----------------
|
|
||||||
|
|
||||||
class DeviceMethodConverter:
|
|
||||||
"""
|
|
||||||
- 字段统一:resource_name(原 device_class)、template_name(原 action_key)
|
|
||||||
- params 单层;inputs 使用 'params.' 前缀
|
|
||||||
- SimpleGraph.add_workflow_node 负责变量连线与边
|
|
||||||
"""
|
|
||||||
def __init__(self, device_registry: Optional[Dict[str, Any]] = None):
|
|
||||||
self.graph = WorkflowGraph()
|
|
||||||
self.variable_sources: Dict[str, Dict[str, Any]] = {} # var -> {node_id, output_name}
|
|
||||||
self.instance_to_resource: Dict[str, Optional[str]] = {} # 实例名 -> resource_name
|
|
||||||
self.node_id_counter: int = 0
|
|
||||||
self.registry = RegistryAdapter(device_registry or {})
|
|
||||||
|
|
||||||
# ---- helpers ----
|
|
||||||
def _new_node_id(self) -> int:
|
|
||||||
nid = self.node_id_counter
|
|
||||||
self.node_id_counter += 1
|
|
||||||
return nid
|
|
||||||
|
|
||||||
def _assign_targets(self, targets) -> List[str]:
|
|
||||||
names: List[str] = []
|
|
||||||
import ast
|
|
||||||
if isinstance(targets, ast.Tuple):
|
|
||||||
for elt in targets.elts:
|
|
||||||
if isinstance(elt, ast.Name):
|
|
||||||
names.append(elt.id)
|
|
||||||
elif isinstance(targets, ast.Name):
|
|
||||||
names.append(targets.id)
|
|
||||||
return names
|
|
||||||
|
|
||||||
def _extract_device_instantiation(self, node) -> Optional[Tuple[str, str]]:
|
|
||||||
import ast
|
|
||||||
if not isinstance(node.value, ast.Call):
|
|
||||||
return None
|
|
||||||
callee = node.value.func
|
|
||||||
if isinstance(callee, ast.Name):
|
|
||||||
class_name = callee.id
|
|
||||||
elif isinstance(callee, ast.Attribute) and isinstance(callee.value, ast.Name):
|
|
||||||
class_name = callee.attr
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
if isinstance(node.targets[0], ast.Name):
|
|
||||||
instance = node.targets[0].id
|
|
||||||
return instance, class_name
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _extract_call(self, call) -> Tuple[str, str, Dict[str, Any], str]:
|
|
||||||
import ast
|
|
||||||
owner_name, method_name, call_kind = "", "", "func"
|
|
||||||
if isinstance(call.func, ast.Attribute):
|
|
||||||
method_name = call.func.attr
|
|
||||||
if isinstance(call.func.value, ast.Name):
|
|
||||||
owner_name = call.func.value.id
|
|
||||||
call_kind = "instance" if owner_name in self.instance_to_resource else "class_or_module"
|
|
||||||
elif isinstance(call.func.value, ast.Attribute) and isinstance(call.func.value.value, ast.Name):
|
|
||||||
owner_name = call.func.value.attr
|
|
||||||
call_kind = "class_or_module"
|
|
||||||
elif isinstance(call.func, ast.Name):
|
|
||||||
method_name = call.func.id
|
|
||||||
call_kind = "func"
|
|
||||||
|
|
||||||
def pack(node):
|
|
||||||
if isinstance(node, ast.Name):
|
|
||||||
return {"type": "variable", "value": node.id}
|
|
||||||
if isinstance(node, ast.Constant):
|
|
||||||
return {"type": "constant", "value": node.value}
|
|
||||||
if isinstance(node, ast.Dict):
|
|
||||||
return {"type": "dict", "value": self._parse_dict(node)}
|
|
||||||
if isinstance(node, ast.List):
|
|
||||||
return {"type": "list", "value": self._parse_list(node)}
|
|
||||||
return {"type": "raw", "value": ast.unparse(node) if hasattr(ast, "unparse") else str(node)}
|
|
||||||
|
|
||||||
args: Dict[str, Any] = {}
|
|
||||||
pos: List[Any] = []
|
|
||||||
for a in call.args:
|
|
||||||
pos.append(pack(a))
|
|
||||||
for kw in call.keywords:
|
|
||||||
args[kw.arg] = pack(kw.value)
|
|
||||||
if pos:
|
|
||||||
args["_positional"] = pos
|
|
||||||
return owner_name, method_name, args, call_kind
|
|
||||||
|
|
||||||
def _parse_dict(self, node) -> Dict[str, Any]:
|
|
||||||
import ast
|
|
||||||
out: Dict[str, Any] = {}
|
|
||||||
for k, v in zip(node.keys, node.values):
|
|
||||||
if isinstance(k, ast.Constant):
|
|
||||||
key = str(k.value)
|
|
||||||
if isinstance(v, ast.Name):
|
|
||||||
out[key] = f"var:{v.id}"
|
|
||||||
elif isinstance(v, ast.Constant):
|
|
||||||
out[key] = v.value
|
|
||||||
elif isinstance(v, ast.Dict):
|
|
||||||
out[key] = self._parse_dict(v)
|
|
||||||
elif isinstance(v, ast.List):
|
|
||||||
out[key] = self._parse_list(v)
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _parse_list(self, node) -> List[Any]:
|
|
||||||
import ast
|
|
||||||
out: List[Any] = []
|
|
||||||
for elt in node.elts:
|
|
||||||
if isinstance(elt, ast.Name):
|
|
||||||
out.append(f"var:{elt.id}")
|
|
||||||
elif isinstance(elt, ast.Constant):
|
|
||||||
out.append(elt.value)
|
|
||||||
elif isinstance(elt, ast.Dict):
|
|
||||||
out.append(self._parse_dict(elt))
|
|
||||||
elif isinstance(elt, ast.List):
|
|
||||||
out.append(self._parse_list(elt))
|
|
||||||
return out
|
|
||||||
|
|
||||||
def _normalize_var_tokens(self, x: Any) -> Any:
|
|
||||||
if isinstance(x, str) and x.startswith("var:"):
|
|
||||||
return {"__var__": x[4:]}
|
|
||||||
if isinstance(x, list):
|
|
||||||
return [self._normalize_var_tokens(i) for i in x]
|
|
||||||
if isinstance(x, dict):
|
|
||||||
return {k: self._normalize_var_tokens(v) for k, v in x.items()}
|
|
||||||
return x
|
|
||||||
|
|
||||||
def _make_params_payload(self, resource_name: Optional[str], template_name: str, call_args: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
input_keys = self.registry.get_action_input_keys(resource_name, template_name) if resource_name else []
|
|
||||||
defaults = self.registry.get_action_goal_default(resource_name, template_name) if resource_name else {}
|
|
||||||
params: Dict[str, Any] = dict(defaults)
|
|
||||||
|
|
||||||
def unpack(p):
|
|
||||||
t, v = p.get("type"), p.get("value")
|
|
||||||
if t == "variable":
|
|
||||||
return {"__var__": v}
|
|
||||||
if t == "dict":
|
|
||||||
return self._normalize_var_tokens(v)
|
|
||||||
if t == "list":
|
|
||||||
return self._normalize_var_tokens(v)
|
|
||||||
return v
|
|
||||||
|
|
||||||
for k, p in call_args.items():
|
|
||||||
if k == "_positional":
|
|
||||||
continue
|
|
||||||
params[k] = unpack(p)
|
|
||||||
|
|
||||||
pos = call_args.get("_positional", [])
|
|
||||||
if pos:
|
|
||||||
if input_keys:
|
|
||||||
for i, p in enumerate(pos):
|
|
||||||
if i >= len(input_keys):
|
|
||||||
break
|
|
||||||
name = input_keys[i]
|
|
||||||
if name in params:
|
|
||||||
continue
|
|
||||||
params[name] = unpack(p)
|
|
||||||
else:
|
|
||||||
for i, p in enumerate(pos):
|
|
||||||
params[f"arg_{i}"] = unpack(p)
|
|
||||||
return params
|
|
||||||
|
|
||||||
# ---- handlers ----
|
|
||||||
def _on_assign(self, stmt):
|
|
||||||
import ast
|
|
||||||
inst = self._extract_device_instantiation(stmt)
|
|
||||||
if inst:
|
|
||||||
instance, code_class = inst
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(code_class)
|
|
||||||
self.instance_to_resource[instance] = resource_name
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(stmt.value, ast.Call):
|
|
||||||
owner, method, call_args, kind = self._extract_call(stmt.value)
|
|
||||||
if kind == "instance":
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.instance_to_resource.get(owner)
|
|
||||||
else:
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(owner)
|
|
||||||
|
|
||||||
module = self.registry.get_device_module(resource_name)
|
|
||||||
params = self._make_params_payload(resource_name, method, call_args)
|
|
||||||
|
|
||||||
nid = self._new_node_id()
|
|
||||||
self.graph.add_workflow_node(
|
|
||||||
nid,
|
|
||||||
device_key=device_key,
|
|
||||||
resource_name=resource_name, # ✅
|
|
||||||
module=module,
|
|
||||||
template_name=method, # ✅
|
|
||||||
params=params,
|
|
||||||
variable_sources=self.variable_sources,
|
|
||||||
add_ready_if_no_vars=True,
|
|
||||||
prev_node_id=(nid - 1) if nid > 0 else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
out_vars = self._assign_targets(stmt.targets[0])
|
|
||||||
for var in out_vars:
|
|
||||||
self.variable_sources[var] = {"node_id": nid, "output_name": "result"}
|
|
||||||
|
|
||||||
def _on_expr(self, stmt):
|
|
||||||
import ast
|
|
||||||
if not isinstance(stmt.value, ast.Call):
|
|
||||||
return
|
|
||||||
owner, method, call_args, kind = self._extract_call(stmt.value)
|
|
||||||
if kind == "instance":
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.instance_to_resource.get(owner)
|
|
||||||
else:
|
|
||||||
device_key = owner
|
|
||||||
resource_name = self.registry.resolve_resource_by_classname(owner)
|
|
||||||
|
|
||||||
module = self.registry.get_device_module(resource_name)
|
|
||||||
params = self._make_params_payload(resource_name, method, call_args)
|
|
||||||
|
|
||||||
nid = self._new_node_id()
|
|
||||||
self.graph.add_workflow_node(
|
|
||||||
nid,
|
|
||||||
device_key=device_key,
|
|
||||||
resource_name=resource_name, # ✅
|
|
||||||
module=module,
|
|
||||||
template_name=method, # ✅
|
|
||||||
params=params,
|
|
||||||
variable_sources=self.variable_sources,
|
|
||||||
add_ready_if_no_vars=True,
|
|
||||||
prev_node_id=(nid - 1) if nid > 0 else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def convert(self, python_code: str):
|
|
||||||
tree = ast.parse(python_code)
|
|
||||||
for stmt in tree.body:
|
|
||||||
if isinstance(stmt, ast.Assign):
|
|
||||||
self._on_assign(stmt)
|
|
||||||
elif isinstance(stmt, ast.Expr):
|
|
||||||
self._on_expr(stmt)
|
|
||||||
return self
|
|
||||||
@@ -1,131 +0,0 @@
|
|||||||
from typing import List, Any, Dict
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_type(val: str) -> Any:
|
|
||||||
"""将字符串值转换为适当的数据类型"""
|
|
||||||
if val == "True":
|
|
||||||
return True
|
|
||||||
if val == "False":
|
|
||||||
return False
|
|
||||||
if val == "?":
|
|
||||||
return None
|
|
||||||
if val.endswith(" g"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
if val.endswith("mg"):
|
|
||||||
return float(val.split("mg")[0])
|
|
||||||
elif val.endswith("mmol"):
|
|
||||||
return float(val.split("mmol")[0]) / 1000
|
|
||||||
elif val.endswith("mol"):
|
|
||||||
return float(val.split("mol")[0])
|
|
||||||
elif val.endswith("ml"):
|
|
||||||
return float(val.split("ml")[0])
|
|
||||||
elif val.endswith("RPM"):
|
|
||||||
return float(val.split("RPM")[0])
|
|
||||||
elif val.endswith(" °C"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
elif val.endswith(" %"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
|
||||||
"""展平嵌套的XDL程序结构"""
|
|
||||||
flattened_operations = []
|
|
||||||
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
def extract_operations(element: ET.Element):
|
|
||||||
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
|
||||||
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
|
||||||
flattened_operations.append(element)
|
|
||||||
|
|
||||||
for child in element:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
for child in procedure_elem:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
return flattened_operations
|
|
||||||
|
|
||||||
|
|
||||||
def parse_xdl_content(xdl_content: str) -> tuple:
|
|
||||||
"""解析XDL内容"""
|
|
||||||
try:
|
|
||||||
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
|
||||||
root = ET.fromstring(xdl_content_cleaned)
|
|
||||||
|
|
||||||
synthesis_elem = root.find("Synthesis")
|
|
||||||
if synthesis_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
# 解析硬件组件
|
|
||||||
hardware_elem = synthesis_elem.find("Hardware")
|
|
||||||
hardware = []
|
|
||||||
if hardware_elem is not None:
|
|
||||||
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
|
||||||
|
|
||||||
# 解析试剂
|
|
||||||
reagents_elem = synthesis_elem.find("Reagents")
|
|
||||||
reagents = []
|
|
||||||
if reagents_elem is not None:
|
|
||||||
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
|
||||||
|
|
||||||
# 解析程序
|
|
||||||
procedure_elem = synthesis_elem.find("Procedure")
|
|
||||||
if procedure_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
|
||||||
return hardware, reagents, flattened_operations
|
|
||||||
|
|
||||||
except ET.ParseError as e:
|
|
||||||
raise ValueError(f"Invalid XDL format: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
将XDL XML格式转换为标准的字典格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
xdl_content: XDL XML内容
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
转换结果,包含步骤和器材信息
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
|
||||||
if hardware is None:
|
|
||||||
return {"error": "Failed to parse XDL content", "success": False}
|
|
||||||
|
|
||||||
# 将XDL元素转换为字典格式
|
|
||||||
steps_data = []
|
|
||||||
for elem in flattened_operations:
|
|
||||||
# 转换参数类型
|
|
||||||
parameters = {}
|
|
||||||
for key, val in elem.attrib.items():
|
|
||||||
converted_val = convert_to_type(val)
|
|
||||||
if converted_val is not None:
|
|
||||||
parameters[key] = converted_val
|
|
||||||
|
|
||||||
step_dict = {
|
|
||||||
"operation": elem.tag,
|
|
||||||
"parameters": parameters,
|
|
||||||
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
|
||||||
}
|
|
||||||
steps_data.append(step_dict)
|
|
||||||
|
|
||||||
# 合并硬件和试剂为统一的labware_info格式
|
|
||||||
labware_data = []
|
|
||||||
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
|
||||||
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"steps": steps_data,
|
|
||||||
"labware": labware_data,
|
|
||||||
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"XDL conversion failed: {str(e)}"
|
|
||||||
return {"error": error_msg, "success": False}
|
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="3">
|
<package format="3">
|
||||||
<name>unilabos_msgs</name>
|
<name>unilabos_msgs</name>
|
||||||
<version>0.10.17</version>
|
<version>0.10.14</version>
|
||||||
<description>ROS2 Messages package for unilabos devices</description>
|
<description>ROS2 Messages package for unilabos devices</description>
|
||||||
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
||||||
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
||||||
|
|||||||
Reference in New Issue
Block a user