Compare commits
209 Commits
workstatio
...
64f02ff129
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
64f02ff129 | ||
|
|
7d097b8222 | ||
|
|
d266d21104 | ||
|
|
b6d0bbcb17 | ||
|
|
31ebff8e37 | ||
|
|
2132895ba2 | ||
|
|
850eeae55a | ||
|
|
d869c14233 | ||
|
|
24101b3cec | ||
|
|
3bf8aad4d5 | ||
|
|
a599eb70e5 | ||
|
|
0bf6994f95 | ||
|
|
c36f53791c | ||
|
|
eb4d2d96c5 | ||
|
|
8233c41b1d | ||
|
|
0dfd4ce8a8 | ||
|
|
7953b3820e | ||
|
|
eed233fa76 | ||
|
|
0c55147ee4 | ||
|
|
ce6267b8e0 | ||
|
|
975e51cd96 | ||
|
|
c5056b381c | ||
|
|
c35da65b15 | ||
|
|
659cf05be6 | ||
|
|
3b8deb4d1d | ||
|
|
c796615f9f | ||
|
|
a5bad6074f | ||
|
|
1d3a07a736 | ||
|
|
cc2cd57cdf | ||
|
|
39bb7dc627 | ||
|
|
0fda155f55 | ||
|
|
6e3eacd2f0 | ||
|
|
062f1a2153 | ||
|
|
61e8d67800 | ||
|
|
d0884cdbd8 | ||
|
|
545ea45024 | ||
|
|
b9ddee8f2c | ||
|
|
a0c5095304 | ||
|
|
e504505137 | ||
|
|
4d9d5701e9 | ||
|
|
6016c4b588 | ||
|
|
be02bef9c4 | ||
|
|
e62f0c2585 | ||
|
|
b6de0623e2 | ||
|
|
9d081e9fcd | ||
|
|
85a58e3464 | ||
|
|
85590672d8 | ||
|
|
1d4018196d | ||
|
|
5d34f742af | ||
|
|
5bef19e6d6 | ||
|
|
f816799753 | ||
|
|
a45d841769 | ||
|
|
7f0b33b3e3 | ||
|
|
2006406a24 | ||
|
|
f94985632b | ||
|
|
12ba110569 | ||
|
|
97212be8b7 | ||
|
|
9bdd42f12f | ||
|
|
627140da03 | ||
|
|
5ceedb0565 | ||
|
|
8c77a20c43 | ||
|
|
3ff894feee | ||
|
|
fa5896ffdb | ||
|
|
eb504803ac | ||
|
|
8b0c845661 | ||
|
|
693873bfa9 | ||
|
|
57da2d8da2 | ||
|
|
8d1fd01259 | ||
|
|
388259e64b | ||
|
|
2c130e7f37 | ||
|
|
9f7c3f02f9 | ||
|
|
19dd80dcdb | ||
|
|
9d5ed627a2 | ||
|
|
2d0ff87bc8 | ||
|
|
d78475de9a | ||
|
|
88ae56806c | ||
|
|
95dd8beb81 | ||
|
|
4ab3fadbec | ||
|
|
229888f834 | ||
|
|
b443b39ebf | ||
|
|
0434bbc15b | ||
|
|
5791b81954 | ||
|
|
bd51c74fab | ||
|
|
ba81cbddf8 | ||
|
|
4e92a26057 | ||
|
|
c2895bb197 | ||
|
|
0423f4f452 | ||
|
|
41390fbef9 | ||
|
|
98bdb4e7e4 | ||
|
|
30037a077a | ||
|
|
6972680099 | ||
|
|
9d2c93807d | ||
|
|
e728007bc5 | ||
|
|
9c5ecda7cc | ||
|
|
2d26c3fac6 | ||
|
|
f5753afb7c | ||
|
|
398b2dde3f | ||
|
|
62c4135938 | ||
|
|
027b4269c4 | ||
|
|
3757bd9c58 | ||
|
|
c75b7d5aae | ||
|
|
dfc635189c | ||
|
|
d8f3ebac15 | ||
|
|
4a1e703a3a | ||
|
|
55d22a7c29 | ||
|
|
03a4e4ecba | ||
|
|
2316c34cb5 | ||
|
|
a8887161d3 | ||
|
|
25834f5ba0 | ||
|
|
a1e9332b51 | ||
|
|
357fc038ef | ||
|
|
fd58ef07f3 | ||
|
|
93dee2c1dc | ||
|
|
70fbf19009 | ||
|
|
9149155232 | ||
|
|
1ca1792e3c | ||
|
|
485e7e8dd2 | ||
|
|
4ddabdcb65 | ||
|
|
a5b0325301 | ||
|
|
50b44938c7 | ||
|
|
df0d2235b0 | ||
|
|
4e434eeb97 | ||
|
|
ca027bf0eb | ||
|
|
635a332b4e | ||
|
|
edf7a117ca | ||
|
|
70b2715996 | ||
|
|
7e8dfc2dc5 | ||
|
|
9b626489a8 | ||
|
|
03fe208743 | ||
|
|
e913e540a3 | ||
|
|
aed39b648d | ||
|
|
8c8359fab3 | ||
|
|
5d20be0762 | ||
|
|
09f745d300 | ||
|
|
bbcbcde9a4 | ||
|
|
42b437cdea | ||
|
|
ffd0f2d26a | ||
|
|
32422c0b3d | ||
|
|
c44e597dc0 | ||
|
|
4eef012a8e | ||
|
|
ac69452f3c | ||
|
|
57b30f627b | ||
|
|
2d2a4ca067 | ||
|
|
a2613aad4c | ||
|
|
54f75183ff | ||
|
|
735be067dc | ||
|
|
0fe62d64f0 | ||
|
|
2d4ecec1e1 | ||
|
|
0f976a1874 | ||
|
|
b263a7e679 | ||
|
|
7c7f1b31c5 | ||
|
|
00e668e140 | ||
|
|
4989f65a0b | ||
|
|
9fa3688196 | ||
|
|
40fb1ea49c | ||
|
|
18b0bb397e | ||
|
|
65abc5dbf7 | ||
|
|
2455ca15ba | ||
|
|
05a3ff607a | ||
|
|
ec882df36d | ||
|
|
43b992e3eb | ||
|
|
6422fa5a9a | ||
|
|
434b9e98e0 | ||
|
|
040073f430 | ||
|
|
3d95c9896a | ||
|
|
9aa97ed01e | ||
|
|
0b8bdf5e0a | ||
|
|
299f010754 | ||
|
|
15ce0d6883 | ||
|
|
dec474e1a7 | ||
|
|
172599adcf | ||
|
|
84cc3a421c | ||
|
|
5f187899fc | ||
|
|
c8d16c7024 | ||
|
|
25d46dc9d5 | ||
|
|
88c4d1a9d1 | ||
|
|
81fd8291c5 | ||
|
|
3a11eb90d4 | ||
|
|
387866b9c9 | ||
|
|
7f40f141f6 | ||
|
|
6fc7ed1b88 | ||
|
|
93f0e08d75 | ||
|
|
4b43734b55 | ||
|
|
174b1914d4 | ||
|
|
704e13f030 | ||
|
|
0c42d60cf2 | ||
|
|
df33e1a214 | ||
|
|
1f49924966 | ||
|
|
609b6006e8 | ||
|
|
67c01271b7 | ||
|
|
a1783f489e | ||
|
|
a8f6527de9 | ||
|
|
5610c28b67 | ||
|
|
cfc1ee6e79 | ||
|
|
709eb0d91c | ||
|
|
14b7d52825 | ||
|
|
c6c2da69ba | ||
|
|
622e579063 | ||
|
|
e5aa4d940a | ||
|
|
4771ff2347 | ||
|
|
8bcc92a394 | ||
|
|
49354fcf39 | ||
|
|
a8973ea92b | ||
|
|
0bfb52df00 | ||
|
|
a555c59dc2 | ||
|
|
9ac0ad49cb | ||
|
|
daa46aaf50 | ||
|
|
bbd9629f98 | ||
|
|
2d560a8182 |
@@ -1,6 +1,6 @@
|
||||
package:
|
||||
name: unilabos
|
||||
version: 0.10.6
|
||||
version: 0.10.10
|
||||
|
||||
source:
|
||||
path: ../unilabos
|
||||
@@ -31,11 +31,14 @@ requirements:
|
||||
- python ==3.11.11
|
||||
- pip
|
||||
- setuptools
|
||||
- zstd
|
||||
- zstandard
|
||||
run:
|
||||
- conda-forge::python ==3.11.11
|
||||
- compilers
|
||||
- cmake
|
||||
- zstd
|
||||
- zstandard
|
||||
- ninja
|
||||
- if: unix
|
||||
then:
|
||||
|
||||
340
.github/workflows/conda-pack-build.yml
vendored
Normal file
@@ -0,0 +1,340 @@
|
||||
name: Build Conda-Pack Environment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: '选择要构建的分支'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: string
|
||||
platforms:
|
||||
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
||||
required: false
|
||||
default: 'win-64'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build-conda-pack:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
platform: linux-64
|
||||
env_file: unilabos-linux-64.yaml
|
||||
script_ext: sh
|
||||
- os: macos-13 # Intel
|
||||
platform: osx-64
|
||||
env_file: unilabos-osx-64.yaml
|
||||
script_ext: sh
|
||||
- os: macos-latest # ARM64
|
||||
platform: osx-arm64
|
||||
env_file: unilabos-osx-arm64.yaml
|
||||
script_ext: sh
|
||||
- os: windows-latest
|
||||
platform: win-64
|
||||
env_file: unilabos-win64.yaml
|
||||
script_ext: bat
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
# Windows uses cmd for better conda/mamba compatibility, Unix uses bash
|
||||
shell: ${{ matrix.platform == 'win-64' && 'cmd' || 'bash' }}
|
||||
|
||||
steps:
|
||||
- name: Check if platform should be built
|
||||
id: should_build
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ github.event.inputs.platforms }}" ]]; then
|
||||
echo "should_build=true" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.event.inputs.platforms }}" == *"${{ matrix.platform }}"* ]]; then
|
||||
echo "should_build=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should_build=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Miniforge (with mamba)
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
uses: conda-incubator/setup-miniconda@v3
|
||||
with:
|
||||
miniforge-version: latest
|
||||
use-mamba: true
|
||||
python-version: '3.11.11'
|
||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||
channel-priority: flexible
|
||||
activate-environment: unilab
|
||||
auto-update-conda: false
|
||||
show-channel-urls: true
|
||||
|
||||
- name: Install conda-pack, unilabos and dependencies (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo Installing unilabos and dependencies to unilab environment...
|
||||
echo Using mamba for faster and more reliable dependency resolution...
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
|
||||
- name: Install conda-pack, unilabos and dependencies (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Installing unilabos and dependencies to unilab environment..."
|
||||
echo "Using mamba for faster and more reliable dependency resolution..."
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
|
||||
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
id: msgs_version_win
|
||||
run: |
|
||||
echo Checking installed ros-humble-unilabos-msgs version...
|
||||
conda list -n unilab ros-humble-unilabos-msgs
|
||||
for /f "tokens=2" %%i in ('conda list -n unilab ros-humble-unilabos-msgs --json ^| python -c "import sys, json; pkgs=json.load(sys.stdin); print(pkgs[0]['version'] if pkgs else 'not-found')"') do set VERSION=%%i
|
||||
echo installed_version=%VERSION% >> %GITHUB_OUTPUT%
|
||||
echo Installed ros-humble-unilabos-msgs version: %VERSION%
|
||||
|
||||
- name: Get latest ros-humble-unilabos-msgs version (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
id: msgs_version_unix
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking installed ros-humble-unilabos-msgs version..."
|
||||
VERSION=$(conda list -n unilab ros-humble-unilabos-msgs --json | python -c "import sys, json; pkgs=json.load(sys.stdin); print(pkgs[0]['version'] if pkgs else 'not-found')")
|
||||
echo "installed_version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Installed ros-humble-unilabos-msgs version: $VERSION"
|
||||
|
||||
- name: Check for newer ros-humble-unilabos-msgs (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo Checking for available ros-humble-unilabos-msgs versions...
|
||||
mamba search ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-forge || echo Search completed
|
||||
echo.
|
||||
echo Updating ros-humble-unilabos-msgs to latest version...
|
||||
mamba update -n unilab ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-forge -y || echo Already at latest version
|
||||
|
||||
- name: Check for newer ros-humble-unilabos-msgs (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking for available ros-humble-unilabos-msgs versions..."
|
||||
mamba search ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-forge || echo "Search completed"
|
||||
echo ""
|
||||
echo "Updating ros-humble-unilabos-msgs to latest version..."
|
||||
mamba update -n unilab ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-forge -y || echo "Already at latest version"
|
||||
|
||||
- name: Install latest unilabos from source (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo Uninstalling existing unilabos...
|
||||
mamba run -n unilab pip uninstall unilabos -y || echo unilabos not installed via pip
|
||||
echo Installing unilabos from source (branch: ${{ github.event.inputs.branch }})...
|
||||
mamba run -n unilab pip install .
|
||||
echo Verifying installation...
|
||||
mamba run -n unilab pip show unilabos
|
||||
|
||||
- name: Install latest unilabos from source (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Uninstalling existing unilabos..."
|
||||
mamba run -n unilab pip uninstall unilabos -y || echo "unilabos not installed via pip"
|
||||
echo "Installing unilabos from source (branch: ${{ github.event.inputs.branch }})..."
|
||||
mamba run -n unilab pip install .
|
||||
echo "Verifying installation..."
|
||||
mamba run -n unilab pip show unilabos
|
||||
|
||||
- name: Display environment info (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo === Environment Information ===
|
||||
mamba env list
|
||||
echo.
|
||||
echo === Installed Packages ===
|
||||
mamba list -n unilab | findstr /C:"unilabos" /C:"ros-humble-unilabos-msgs" || mamba list -n unilab
|
||||
echo.
|
||||
echo === Python Packages ===
|
||||
mamba run -n unilab pip list | findstr unilabos || mamba run -n unilab pip list
|
||||
|
||||
- name: Display environment info (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=== Environment Information ==="
|
||||
mamba env list
|
||||
echo ""
|
||||
echo "=== Installed Packages ==="
|
||||
mamba list -n unilab | grep -E "(unilabos|ros-humble-unilabos-msgs)" || mamba list -n unilab
|
||||
echo ""
|
||||
echo "=== Python Packages ==="
|
||||
mamba run -n unilab pip list | grep unilabos || mamba run -n unilab pip list
|
||||
|
||||
- name: Verify environment integrity (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo Verifying Python version...
|
||||
mamba run -n unilab python -c "import sys; print(f'Python version: {sys.version}')"
|
||||
echo Verifying unilabos import...
|
||||
mamba run -n unilab python -c "import unilabos; print(f'UniLabOS version: {unilabos.__version__}')" || echo Warning: Could not import unilabos
|
||||
echo Checking critical packages...
|
||||
mamba run -n unilab python -c "import rclpy; print('ROS2 rclpy: OK')"
|
||||
echo Running comprehensive verification script...
|
||||
mamba run -n unilab python scripts\verify_installation.py --auto-install || echo Warning: Verification script reported issues
|
||||
echo Environment verification complete!
|
||||
|
||||
- name: Verify environment integrity (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Verifying Python version..."
|
||||
mamba run -n unilab python -c "import sys; print(f'Python version: {sys.version}')"
|
||||
echo "Verifying unilabos import..."
|
||||
mamba run -n unilab python -c "import unilabos; print(f'UniLabOS version: {unilabos.__version__}')" || echo "Warning: Could not import unilabos"
|
||||
echo "Checking critical packages..."
|
||||
mamba run -n unilab python -c "import rclpy; print('ROS2 rclpy: OK')"
|
||||
echo "Running comprehensive verification script..."
|
||||
mamba run -n unilab python scripts/verify_installation.py --auto-install || echo "Warning: Verification script reported issues"
|
||||
echo "Environment verification complete!"
|
||||
|
||||
- name: Pack conda environment (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo Packing unilab environment with conda-pack...
|
||||
mamba activate unilab && conda pack -n unilab -o unilab-env-${{ matrix.platform }}.tar.gz --ignore-missing-files
|
||||
echo Pack file created:
|
||||
dir unilab-env-${{ matrix.platform }}.tar.gz
|
||||
|
||||
- name: Pack conda environment (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Packing unilab environment with conda-pack..."
|
||||
mamba install conda-pack -c conda-forge -y
|
||||
conda pack -n unilab -o unilab-env-${{ matrix.platform }}.tar.gz --ignore-missing-files
|
||||
echo "Pack file created:"
|
||||
ls -lh unilab-env-${{ matrix.platform }}.tar.gz
|
||||
|
||||
- name: Prepare Windows distribution package
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo ==========================================
|
||||
echo Creating distribution package...
|
||||
echo Platform: ${{ matrix.platform }}
|
||||
echo ==========================================
|
||||
mkdir dist-package 2>nul || cd .
|
||||
|
||||
rem Copy packed environment
|
||||
echo Adding: unilab-env-${{ matrix.platform }}.tar.gz
|
||||
copy unilab-env-${{ matrix.platform }}.tar.gz dist-package\
|
||||
|
||||
rem Copy installation script
|
||||
echo Adding: install_unilab.bat
|
||||
copy scripts\install_unilab.bat dist-package\
|
||||
|
||||
rem Copy verification script
|
||||
echo Adding: verify_installation.py
|
||||
copy scripts\verify_installation.py dist-package\
|
||||
|
||||
rem Copy source code repository (including .git)
|
||||
echo Adding: Uni-Lab-OS source repository
|
||||
robocopy . dist-package\Uni-Lab-OS /E /XD dist-package /NFL /NDL /NJH /NJS /NC /NS || if %ERRORLEVEL% LSS 8 exit /b 0
|
||||
|
||||
rem Create README using Python script
|
||||
echo Creating: README.txt
|
||||
python scripts\create_readme.py ${{ matrix.platform }} ${{ github.event.inputs.branch }} dist-package\README.txt
|
||||
|
||||
echo.
|
||||
echo Distribution package contents:
|
||||
dir /b dist-package
|
||||
echo.
|
||||
|
||||
- name: Prepare Unix/Linux distribution package
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Creating distribution package..."
|
||||
echo "Platform: ${{ matrix.platform }}"
|
||||
echo "=========================================="
|
||||
mkdir -p dist-package
|
||||
|
||||
# Copy packed environment
|
||||
echo "Adding: unilab-env-${{ matrix.platform }}.tar.gz"
|
||||
cp unilab-env-${{ matrix.platform }}.tar.gz dist-package/
|
||||
|
||||
# Copy installation script
|
||||
echo "Adding: install_unilab.sh"
|
||||
cp scripts/install_unilab.sh dist-package/
|
||||
chmod +x dist-package/install_unilab.sh
|
||||
|
||||
# Copy verification script
|
||||
echo "Adding: verify_installation.py"
|
||||
cp scripts/verify_installation.py dist-package/
|
||||
|
||||
# Copy source code repository (including .git)
|
||||
echo "Adding: Uni-Lab-OS source repository"
|
||||
rsync -a --exclude='dist-package' . dist-package/Uni-Lab-OS
|
||||
|
||||
# Create README using Python script
|
||||
echo "Creating: README.txt"
|
||||
python scripts/create_readme.py ${{ matrix.platform }} ${{ github.event.inputs.branch }} dist-package/README.txt
|
||||
|
||||
echo ""
|
||||
echo "Distribution package contents:"
|
||||
ls -lh dist-package/
|
||||
echo ""
|
||||
|
||||
- name: Upload distribution package
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
||||
path: dist-package/
|
||||
retention-days: 90
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Display package info (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
run: |
|
||||
echo ==========================================
|
||||
echo Build Summary
|
||||
echo ==========================================
|
||||
echo Platform: ${{ matrix.platform }}
|
||||
echo Branch: ${{ github.event.inputs.branch }}
|
||||
echo Python version: 3.11.11
|
||||
echo.
|
||||
echo Distribution package contents:
|
||||
dir dist-package
|
||||
echo.
|
||||
echo Artifact name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
||||
echo.
|
||||
echo After download, extract the ZIP and run:
|
||||
echo install_unilab.bat
|
||||
echo ==========================================
|
||||
|
||||
- name: Display package info (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Build Summary"
|
||||
echo "=========================================="
|
||||
echo "Platform: ${{ matrix.platform }}"
|
||||
echo "Branch: ${{ github.event.inputs.branch }}"
|
||||
echo "Python version: 3.11.11"
|
||||
echo ""
|
||||
echo "Distribution package contents:"
|
||||
ls -lh dist-package/
|
||||
echo ""
|
||||
echo "Artifact name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}"
|
||||
echo ""
|
||||
echo "After download:"
|
||||
echo " install_unilab.sh"
|
||||
echo "=========================================="
|
||||
113
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
name: Deploy Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: '要部署文档的分支'
|
||||
required: false
|
||||
default: 'main'
|
||||
type: string
|
||||
deploy_to_pages:
|
||||
description: '是否部署到 GitHub Pages'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
# 设置 GITHUB_TOKEN 权限以部署到 GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# 只允许一个并发部署,跳过正在进行和最新排队之间的运行
|
||||
# 但是不取消正在进行的运行,因为我们希望允许这些生产部署完成
|
||||
concurrency:
|
||||
group: 'pages'
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Build documentation
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Miniforge (with mamba)
|
||||
uses: conda-incubator/setup-miniconda@v3
|
||||
with:
|
||||
miniforge-version: latest
|
||||
use-mamba: true
|
||||
python-version: '3.11.11'
|
||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||
channel-priority: flexible
|
||||
activate-environment: unilab
|
||||
auto-update-conda: false
|
||||
show-channel-urls: true
|
||||
|
||||
- name: Install unilabos and dependencies
|
||||
run: |
|
||||
echo "Installing unilabos and dependencies to unilab environment..."
|
||||
echo "Using mamba for faster and more reliable dependency resolution..."
|
||||
mamba install -n unilab uni-lab::unilabos -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
|
||||
- name: Install latest unilabos from source
|
||||
run: |
|
||||
echo "Uninstalling existing unilabos..."
|
||||
mamba run -n unilab pip uninstall unilabos -y || echo "unilabos not installed via pip"
|
||||
echo "Installing unilabos from source..."
|
||||
mamba run -n unilab pip install .
|
||||
echo "Verifying installation..."
|
||||
mamba run -n unilab pip show unilabos
|
||||
|
||||
- name: Install documentation dependencies
|
||||
run: |
|
||||
echo "Installing documentation build dependencies..."
|
||||
mamba run -n unilab pip install -r docs/requirements.txt
|
||||
|
||||
- name: Setup Pages
|
||||
id: pages
|
||||
uses: actions/configure-pages@v4
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
|
||||
- name: Build Sphinx documentation
|
||||
run: |
|
||||
cd docs
|
||||
# Clean previous builds
|
||||
rm -rf _build
|
||||
# Build HTML documentation in conda environment
|
||||
mamba run -n unilab python -m sphinx -b html . _build/html -v
|
||||
|
||||
- name: Check build results
|
||||
run: |
|
||||
echo "Documentation build completed, checking output directory:"
|
||||
ls -la docs/_build/html/
|
||||
echo "Checking for index.html:"
|
||||
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
with:
|
||||
path: docs/_build/html
|
||||
|
||||
# Deploy to GitHub Pages
|
||||
deploy:
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
1
.gitignore
vendored
@@ -2,6 +2,7 @@ configs/
|
||||
temp/
|
||||
output/
|
||||
unilabos_data/
|
||||
pyrightconfig.json
|
||||
## Python
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
||||
18
CONTRIBUTORS
Normal file
@@ -0,0 +1,18 @@
|
||||
56 Xuwznln <18435084+Xuwznln@users.noreply.github.com>
|
||||
10 wznln <18435084+Xuwznln@users.noreply.github.com>
|
||||
6 Junhan Chang <changjh@dp.tech>
|
||||
5 ZiWei <131428629+ZiWei09@users.noreply.github.com>
|
||||
2 Guangxin Zhang <guangxin.zhang.bio@gmail.com>
|
||||
2 Junhan Chang <changjh@pku.edu.cn>
|
||||
2 WenzheG <wenzheguo32@gmail.com>
|
||||
1 Harry Liu <113173203+ALITTLELZ@users.noreply.github.com>
|
||||
1 Harvey Que <103566763+Mile-Away@users.noreply.github.com>
|
||||
1 Junhan Chang <1700011741@pku.edu.cn>
|
||||
1 Xianwei Qi <qxw@stu.pku.edu.cn>
|
||||
1 hh.(SII) <103566763+Mile-Away@users.noreply.github.com>
|
||||
1 lixinyu1011 <61094742+lixinyu1011@users.noreply.github.com>
|
||||
1 q434343 <73513873+q434343@users.noreply.github.com>
|
||||
1 tt <166512503+tt11142023@users.noreply.github.com>
|
||||
1 xyc <49015816+xiaoyu10031@users.noreply.github.com>
|
||||
1 王俊杰 <1800011822@pku.edu.cn>
|
||||
1 王俊杰 <43375851+wjjxxx@users.noreply.github.com>
|
||||
@@ -31,7 +31,7 @@ Join the [Intelligent Organic Chemistry Synthesis Competition](https://bohrium.d
|
||||
|
||||
Detailed documentation can be found at:
|
||||
|
||||
- [Online Documentation](https://readthedocs.dp.tech/Uni-Lab/v0.8.0/)
|
||||
- [Online Documentation](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
||||
|
||||
## Quick Start
|
||||
|
||||
@@ -55,7 +55,7 @@ pip install .
|
||||
|
||||
3. Start Uni-Lab System:
|
||||
|
||||
Please refer to [Documentation - Boot Examples](https://readthedocs.dp.tech/Uni-Lab/v0.8.0/boot_examples/index.html)
|
||||
Please refer to [Documentation - Boot Examples](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
||||
|
||||
## Message Format
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控
|
||||
|
||||
详细文档可在以下位置找到:
|
||||
|
||||
- [在线文档](https://readthedocs.dp.tech/Uni-Lab/v0.8.0/)
|
||||
- [在线文档](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
||||
|
||||
## 快速开始
|
||||
|
||||
@@ -57,7 +57,7 @@ pip install .
|
||||
|
||||
3. 启动 Uni-Lab 系统:
|
||||
|
||||
请见[文档-启动样例](https://readthedocs.dp.tech/Uni-Lab/v0.8.0/boot_examples/index.html)
|
||||
请见[文档-启动样例](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
||||
|
||||
## 消息格式
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
使用以下命令启动模拟反应器:
|
||||
|
||||
```bash
|
||||
unilab -g test/experiments/mock_reactor.json --app_bridges ""
|
||||
unilab -g test/experiments/mock_reactor.json
|
||||
```
|
||||
|
||||
### 2. 执行抽真空和充气操作
|
||||
|
||||
@@ -23,7 +23,8 @@ extensions = [
|
||||
"myst_parser",
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
||||
"sphinx_rtd_theme"
|
||||
"sphinx_rtd_theme",
|
||||
"sphinxcontrib.mermaid"
|
||||
]
|
||||
|
||||
source_suffix = {
|
||||
@@ -42,6 +43,8 @@ myst_enable_extensions = [
|
||||
"substitution",
|
||||
]
|
||||
|
||||
myst_fence_as_directive = ["mermaid"]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
@@ -203,3 +206,5 @@ def generate_action_includes(app):
|
||||
|
||||
def setup(app):
|
||||
app.connect("builder-inited", generate_action_includes)
|
||||
app.add_js_file("https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js")
|
||||
app.add_js_file(None, body="mermaid.initialize({startOnLoad:true});")
|
||||
|
||||
@@ -1,88 +1,26 @@
|
||||
## 简单单变量动作函数
|
||||
|
||||
|
||||
### `SendCmd`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/SendCmd.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `StrSingleInput`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/StrSingleInput.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `IntSingleInput`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/IntSingleInput.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `FloatSingleInput`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/FloatSingleInput.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Point3DSeparateInput`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Point3DSeparateInput.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Wait`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Wait.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
## 常量有机化学操作
|
||||
|
||||
Uni-Lab 常量有机化学指令集多数来自 [XDL](https://croningroup.gitlab.io/chemputer/xdl/standard/full_steps_specification.html#),包含有机合成实验中常见的操作,如加热、搅拌、冷却等。
|
||||
|
||||
|
||||
|
||||
### `Clean`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Clean.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `EvacuateAndRefill`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/EvacuateAndRefill.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Evaporate`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Evaporate.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `HeatChill`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/HeatChill.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `HeatChillStart`
|
||||
|
||||
@@ -90,7 +28,7 @@ Uni-Lab 常量有机化学指令集多数来自 [XDL](https://croningroup.gitlab
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `HeatChillStop`
|
||||
|
||||
@@ -98,7 +36,7 @@ Uni-Lab 常量有机化学指令集多数来自 [XDL](https://croningroup.gitlab
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `PumpTransfer`
|
||||
|
||||
@@ -106,195 +44,12 @@ Uni-Lab 常量有机化学指令集多数来自 [XDL](https://croningroup.gitlab
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Separate`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Separate.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Stir`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Stir.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Add`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Add.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `AddSolid`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/AddSolid.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `AdjustPH`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/AdjustPH.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Centrifuge`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Centrifuge.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `CleanVessel`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/CleanVessel.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Crystallize`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Crystallize.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Dissolve`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Dissolve.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Dry`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Dry.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Filter`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Filter.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `FilterThrough`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/FilterThrough.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Hydrogenate`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Hydrogenate.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Purge`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Purge.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Recrystallize`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Recrystallize.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `RunColumn`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/RunColumn.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `StartPurge`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/StartPurge.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `StartStir`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/StartStir.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `StopPurge`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/StopPurge.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `StopStir`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/StopStir.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `Transfer`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/Transfer.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `WashSolid`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/WashSolid.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
## 移液工作站及相关生物自动化设备操作
|
||||
|
||||
Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.org/user_guide/index.html),包含生物实验中常见的操作,如移液、混匀、离心等。
|
||||
|
||||
### `LiquidHandlerAspirate`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerAspirate.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerDiscardTips`
|
||||
|
||||
@@ -302,15 +57,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerDispense`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerDispense.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerDropTips`
|
||||
|
||||
@@ -318,7 +65,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerDropTips96`
|
||||
|
||||
@@ -326,7 +73,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerMoveLid`
|
||||
|
||||
@@ -334,7 +81,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerMovePlate`
|
||||
|
||||
@@ -342,7 +89,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerMoveResource`
|
||||
|
||||
@@ -350,7 +97,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerPickUpTips`
|
||||
|
||||
@@ -358,7 +105,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerPickUpTips96`
|
||||
|
||||
@@ -366,7 +113,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerReturnTips`
|
||||
|
||||
@@ -374,7 +121,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerReturnTips96`
|
||||
|
||||
@@ -382,7 +129,7 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `LiquidHandlerStamp`
|
||||
|
||||
@@ -390,129 +137,17 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerTransfer`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerTransfer.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerAdd`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerAdd.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerIncubateBiomek`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerIncubateBiomek.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerMix`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerMix.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerMoveBiomek`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerMoveBiomek.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerMoveTo`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerMoveTo.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerOscillateBiomek`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerOscillateBiomek.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerProtocolCreation`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerProtocolCreation.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerRemove`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerRemove.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerSetGroup`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerSetGroup.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerSetLiquid`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerSetLiquid.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerSetTipRack`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerSetTipRack.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerTransferBiomek`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerTransferBiomek.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `LiquidHandlerTransferGroup`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/LiquidHandlerTransferGroup.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
## 多工作站及小车运行、物料转移
|
||||
|
||||
|
||||
### `AGVTransfer`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/AGVTransfer.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
### `WorkStationRun`
|
||||
|
||||
@@ -520,64 +155,12 @@ Uni-Lab 生物操作指令集多数来自 [PyLabRobot](https://docs.pylabrobot.o
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `ResetHandling`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/ResetHandling.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `ResourceCreateFromOuter`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/ResourceCreateFromOuter.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `ResourceCreateFromOuterEasy`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/ResourceCreateFromOuterEasy.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `SetPumpPosition`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/SetPumpPosition.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 固体分配与处理设备操作
|
||||
|
||||
### `SolidDispenseAddPowderTube`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/SolidDispenseAddPowderTube.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 其他设备操作
|
||||
|
||||
### `EmptyIn`
|
||||
|
||||
```{literalinclude} ../../unilabos_msgs/action/EmptyIn.action
|
||||
:language: yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
## 机械臂、夹爪等机器人设备
|
||||
|
||||
Uni-Lab 机械臂、机器人、夹爪和导航指令集沿用 ROS2 的 `control_msgs` 和 `nav2_msgs`:
|
||||
|
||||
|
||||
### `FollowJointTrajectory`
|
||||
|
||||
```yaml
|
||||
@@ -645,8 +228,7 @@ trajectory_msgs/MultiDOFJointTrajectoryPoint multi_dof_error
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `GripperCommand`
|
||||
|
||||
```yaml
|
||||
@@ -664,19 +246,42 @@ bool reached_goal # True iff the gripper position has reached the commanded setp
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `JointTrajectory`
|
||||
|
||||
```yaml
|
||||
trajectory_msgs/JointTrajectory trajectory
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
### `ParallelGripperCommand`
|
||||
|
||||
```yaml
|
||||
# Parallel grippers refer to an end effector where two opposing fingers grasp an object from opposite sides.
|
||||
sensor_msgs/JointState command
|
||||
# name: the name(s) of the joint this command is requesting
|
||||
# position: desired position of each gripper joint (radians or meters)
|
||||
# velocity: (optional, not used if empty) max velocity of the joint allowed while moving (radians or meters / second)
|
||||
# effort: (optional, not used if empty) max effort of the joint allowed while moving (Newtons or Newton-meters)
|
||||
---
|
||||
sensor_msgs/JointState state # The current gripper state.
|
||||
# position of each joint (radians or meters)
|
||||
# optional: velocity of each joint (radians or meters / second)
|
||||
# optional: effort of each joint (Newtons or Newton-meters)
|
||||
bool stalled # True if the gripper is exerting max effort and not moving
|
||||
bool reached_goal # True if the gripper position has reached the commanded setpoint
|
||||
---
|
||||
sensor_msgs/JointState state # The current gripper state.
|
||||
# position of each joint (radians or meters)
|
||||
# optional: velocity of each joint (radians or meters / second)
|
||||
# optional: effort of each joint (Newtons or Newton-meters)
|
||||
|
||||
```
|
||||
|
||||
----
|
||||
### `PointHead`
|
||||
|
||||
```yaml
|
||||
@@ -686,13 +291,12 @@ string pointing_frame
|
||||
builtin_interfaces/Duration min_duration
|
||||
float64 max_velocity
|
||||
---
|
||||
|
||||
---
|
||||
float64 pointing_angle_error
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `SingleJointPosition`
|
||||
|
||||
```yaml
|
||||
@@ -700,16 +304,15 @@ float64 position
|
||||
builtin_interfaces/Duration min_duration
|
||||
float64 max_velocity
|
||||
---
|
||||
|
||||
---
|
||||
std_msgs/Header header
|
||||
float64 position
|
||||
float64 velocity
|
||||
float64 error
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `AssistedTeleop`
|
||||
|
||||
```yaml
|
||||
@@ -721,10 +324,10 @@ builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback
|
||||
builtin_interfaces/Duration current_teleop_duration
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `BackUp`
|
||||
|
||||
```yaml
|
||||
@@ -738,10 +341,10 @@ builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback definition
|
||||
float32 distance_traveled
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `ComputePathThroughPoses`
|
||||
|
||||
```yaml
|
||||
@@ -756,10 +359,10 @@ nav_msgs/Path path
|
||||
builtin_interfaces/Duration planning_time
|
||||
---
|
||||
#feedback definition
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `ComputePathToPose`
|
||||
|
||||
```yaml
|
||||
@@ -774,10 +377,10 @@ nav_msgs/Path path
|
||||
builtin_interfaces/Duration planning_time
|
||||
---
|
||||
#feedback definition
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `DriveOnHeading`
|
||||
|
||||
```yaml
|
||||
@@ -791,10 +394,10 @@ builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback definition
|
||||
float32 distance_traveled
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `DummyBehavior`
|
||||
|
||||
```yaml
|
||||
@@ -805,10 +408,10 @@ std_msgs/String command
|
||||
builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback definition
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `FollowPath`
|
||||
|
||||
```yaml
|
||||
@@ -823,10 +426,10 @@ std_msgs/Empty result
|
||||
#feedback definition
|
||||
float32 distance_to_goal
|
||||
float32 speed
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `FollowWaypoints`
|
||||
|
||||
```yaml
|
||||
@@ -838,10 +441,10 @@ int32[] missed_waypoints
|
||||
---
|
||||
#feedback definition
|
||||
uint32 current_waypoint
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `NavigateThroughPoses`
|
||||
|
||||
```yaml
|
||||
@@ -859,10 +462,10 @@ builtin_interfaces/Duration estimated_time_remaining
|
||||
int16 number_of_recoveries
|
||||
float32 distance_remaining
|
||||
int16 number_of_poses_remaining
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `NavigateToPose`
|
||||
|
||||
```yaml
|
||||
@@ -879,10 +482,10 @@ builtin_interfaces/Duration navigation_time
|
||||
builtin_interfaces/Duration estimated_time_remaining
|
||||
int16 number_of_recoveries
|
||||
float32 distance_remaining
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `SmoothPath`
|
||||
|
||||
```yaml
|
||||
@@ -898,10 +501,10 @@ builtin_interfaces/Duration smoothing_duration
|
||||
bool was_completed
|
||||
---
|
||||
#feedback definition
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `Spin`
|
||||
|
||||
```yaml
|
||||
@@ -914,10 +517,10 @@ builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback definition
|
||||
float32 angular_distance_traveled
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
----
|
||||
### `Wait`
|
||||
|
||||
```yaml
|
||||
@@ -929,6 +532,7 @@ builtin_interfaces/Duration total_elapsed_time
|
||||
---
|
||||
#feedback definition
|
||||
builtin_interfaces/Duration time_left
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
----
|
||||
|
||||
147
docs/developer_guide/add_batteryPLC.md
Normal file
@@ -0,0 +1,147 @@
|
||||
# 电池装配工站接入(PLC)
|
||||
|
||||
本指南将引导你完成电池装配工站(以 PLC 控制为例)的接入流程,包括新建工站文件、编写驱动与寄存器读写、生成注册表、上传及注意事项。
|
||||
|
||||
## 1. 新建工站文件
|
||||
|
||||
### 1.1 创建工站文件
|
||||
|
||||
在 `unilabos/devices/workstation/coin_cell_assembly` 目录下新建工站文件,如 `coin_cell_assembly.py`。工站类需继承 `WorkstationBase`,并在构造函数中初始化通信客户端与寄存器映射。
|
||||
|
||||
```python
|
||||
from typing import Optional
|
||||
# 工站基类
|
||||
from unilabos.devices.workstation.workstation_base import WorkstationBase
|
||||
# Modbus 通讯与寄存器 CSV 支持
|
||||
from unilabos.device_comms.modbus_plc.client import TCPClient, BaseClient
|
||||
|
||||
class CoinCellAssemblyWorkstation(WorkstationBase):
|
||||
def __init__(
|
||||
self,
|
||||
station_resource,
|
||||
address: str = "192.168.1.20",
|
||||
port: str = "502",
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(station_resource=station_resource, *args, **kwargs)
|
||||
self.station_resource = station_resource # 物料台面(Deck)
|
||||
self.success: bool = False
|
||||
self.allow_data_read: bool = False
|
||||
self.csv_export_thread = None
|
||||
self.csv_export_running = False
|
||||
self.csv_export_file: Optional[str] = None
|
||||
|
||||
# 连接 PLC,并注册寄存器节点
|
||||
tcp = TCPClient(addr=address, port=port)
|
||||
tcp.client.connect()
|
||||
self.nodes = BaseClient.load_csv(".../PLC_register.csv")
|
||||
self.client = tcp.register_node_list(self.nodes)
|
||||
```
|
||||
|
||||
|
||||
|
||||
## 2. 编写驱动与寄存器读写
|
||||
|
||||
### 2.1 寄存器示例
|
||||
|
||||
- `COIL_SYS_START_CMD`(BOOL,地址 8010):启动命令(脉冲式)
|
||||
- `COIL_SYS_START_STATUS`(BOOL,地址 8210):启动状态
|
||||
- `REG_DATA_OPEN_CIRCUIT_VOLTAGE`(FLOAT32,地址 10002):开路电压
|
||||
- `REG_DATA_ASSEMBLY_PRESSURE`(INT16,地址 10014):压制扣电压力
|
||||
|
||||
### 2.2 最小驱动示例
|
||||
|
||||
```python
|
||||
from unilabos.device_comms.modbus_plc.modbus import WorderOrder
|
||||
|
||||
def start_and_read_metrics(self):
|
||||
# 1) 下发启动(置 True 再复位 False)
|
||||
self.client.use_node('COIL_SYS_START_CMD').write(True)
|
||||
self.client.use_node('COIL_SYS_START_CMD').write(False)
|
||||
|
||||
# 2) 等待进入启动状态
|
||||
while True:
|
||||
status, _ = self.client.use_node('COIL_SYS_START_STATUS').read(1)
|
||||
if bool(status[0]):
|
||||
break
|
||||
|
||||
# 3) 读取关键数据(FLOAT32 需读 2 个寄存器并指定字节序)
|
||||
voltage, _ = self.client.use_node('REG_DATA_OPEN_CIRCUIT_VOLTAGE').read(
|
||||
2, word_order=WorderOrder.LITTLE
|
||||
)
|
||||
pressure, _ = self.client.use_node('REG_DATA_ASSEMBLY_PRESSURE').read(1)
|
||||
|
||||
return {
|
||||
'open_circuit_voltage': voltage,
|
||||
'assembly_pressure': pressure,
|
||||
}
|
||||
```
|
||||
|
||||
> 提示:若需参数下发,可在 PLC 端设置标志寄存器并完成握手复位,避免粘连与竞争。
|
||||
|
||||
## 3. 本地生成注册表并校验
|
||||
|
||||
完成工站类与驱动后,需要生成(或更新)工站注册表供系统识别。
|
||||
|
||||
|
||||
### 3.1 新增工站设备(或资源)首次生成注册表
|
||||
首先通过以下命令启动unilab。进入unilab系统状态检查页面
|
||||
|
||||
```bash
|
||||
python unilabos\app\main.py -g celljson.json --ak <user的AK> --sk <user的SK>
|
||||
```
|
||||
|
||||
点击注册表编辑,进入注册表编辑页面
|
||||

|
||||
|
||||
按照图示步骤填写自动生成注册表信息:
|
||||

|
||||
|
||||
步骤说明:
|
||||
1. 选择新增的工站`coin_cell_assembly.py`文件
|
||||
2. 点击分析按钮,分析`coin_cell_assembly.py`文件
|
||||
3. 选择`coin_cell_assembly.py`文件中继承`WorkstationBase`类
|
||||
4. 填写新增的工站.py文件与`unilabos`目录的距离。例如,新增的工站文件`coin_cell_assembly.py`路径为`unilabos\devices\workstation\coin_cell_assembly\coin_cell_assembly.py`,则此处填写`unilabos.devices.workstation.coin_cell_assembly`。
|
||||
5. 此处填写新定义工站的类的名字(名称可以自拟)
|
||||
6. 填写新的工站注册表备注信息
|
||||
7. 生成注册表
|
||||
|
||||
以上操作步骤完成,则会生成的新的注册表ymal文件,如下图:
|
||||

|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### 3.2 添加新生成注册表
|
||||
在`unilabos\registry\devices`目录下新建一个yaml文件,此处新建文件命名为`coincellassemblyworkstation_device.yaml`,将上面生成的新的注册表信息粘贴到`coincellassemblyworkstation_device.yaml`文件中。
|
||||
|
||||
在终端输入以下命令进行注册表补全操作。
|
||||
```bash
|
||||
python unilabos\app\register.py --complete_registry
|
||||
```
|
||||
|
||||
|
||||
### 3.3 启动并上传注册表
|
||||
|
||||
新增设备之后,启动unilab需要增加`--upload_registry`参数,来上传注册表信息。
|
||||
|
||||
```bash
|
||||
python unilabos\app\main.py -g celljson.json --ak <user的AK> --sk <user的SK> --upload_registry
|
||||
```
|
||||
|
||||
## 4. 注意事项
|
||||
|
||||
- 在新生成的 YAML 中,确认 `module` 指向新工站类,本例中需检查`coincellassemblyworkstation_device.yaml`文件中是否指向了`coin_cell_assembly.py`文件中定义的`CoinCellAssemblyWorkstation`类文件:
|
||||
|
||||
```
|
||||
module: unilabos.devices.workstation.coin_cell_assembly.coin_cell_assembly:CoinCellAssemblyWorkstation
|
||||
```
|
||||
|
||||
- 首次新增设备(或资源)需要在网页端新增注册表信息,`--complete_registry`补全注册表,`--upload_registry`上传注册表信息。
|
||||
|
||||
- 如果不是新增设备(或资源),仅对工站驱动的.py文件进行了修改,则不需要在网页端新增注册表信息。只需要运行补全注册表信息之后,上传注册表即可。
|
||||
|
||||
|
||||
@@ -111,8 +111,8 @@ new_device: # 设备名,要唯一
|
||||
|
||||
1. 以 `auto-` 开头的动作:从你 Python 类的方法自动生成
|
||||
2. 通用的驱动动作:
|
||||
- `_execute_driver_command`:同步执行驱动命令
|
||||
- `_execute_driver_command_async`:异步执行驱动命令
|
||||
- `_execute_driver_command`:同步执行驱动命令(仅本地可用)
|
||||
- `_execute_driver_command_async`:异步执行驱动命令(仅本地可用)
|
||||
|
||||
### 如果要手动定义动作
|
||||
|
||||
|
||||
|
After Width: | Height: | Size: 1.2 MiB |
|
After Width: | Height: | Size: 629 KiB |
|
After Width: | Height: | Size: 1.1 MiB |
|
After Width: | Height: | Size: 269 KiB |
BIN
docs/developer_guide/image_add_batteryPLC/unilab_new_yaml.png
Normal file
|
After Width: | Height: | Size: 428 KiB |
|
After Width: | Height: | Size: 310 KiB |
BIN
docs/developer_guide/image_add_batteryPLC/unilab_sys_status.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
405
docs/developer_guide/materials_construction_guide.md
Normal file
@@ -0,0 +1,405 @@
|
||||
# 物料构建指南
|
||||
|
||||
## 概述
|
||||
|
||||
在UniLab-OS系统中,任何工作站中所需要用到的物料主要包括四个核心组件:
|
||||
|
||||
1. **桌子(Deck)** - 工作台面,定义整个工作空间的布局
|
||||
2. **堆栈(Warehouse)** - 存储区域,用于放置载具和物料
|
||||
3. **载具(Carriers)** - 承载瓶子等物料的容器架
|
||||
4. **瓶子(Bottles)** - 实际的物料容器
|
||||
|
||||
本文档以BioYond工作站为例,详细说明如何构建这些物料组件。
|
||||
|
||||
## 文件结构
|
||||
|
||||
物料定义文件位于 `unilabos/resources/` 文件夹中:
|
||||
|
||||
```
|
||||
unilabos/resources/bioyond/
|
||||
├── decks.py # 桌子定义
|
||||
├── YB_warehouses.py # 堆栈定义
|
||||
├── YB_bottle_carriers.py # 载具定义
|
||||
└── YB_bottles.py # 瓶子定义
|
||||
```
|
||||
|
||||
对应的注册表文件位于 `unilabos/registry/resources/bioyond/` 文件夹中:
|
||||
|
||||
```
|
||||
unilabos/registry/resources/bioyond/
|
||||
├── deck.yaml # 桌子注册表
|
||||
├── YB_bottle_carriers.yaml # 载具注册表
|
||||
└── YB_bottle.yaml # 瓶子注册表
|
||||
```
|
||||
|
||||
## 1. 桌子(Deck)构建
|
||||
|
||||
桌子是整个工作站的基础,定义了工作空间的尺寸和各个组件的位置。
|
||||
|
||||
### 代码示例 (decks.py)
|
||||
|
||||
```python
|
||||
from pylabrobot.resources import Coordinate, Deck
|
||||
from unilabos.resources.bioyond.YB_warehouses import (
|
||||
bioyond_warehouse_2x2x1,
|
||||
bioyond_warehouse_3x5x1,
|
||||
bioyond_warehouse_20x1x1,
|
||||
bioyond_warehouse_3x3x1,
|
||||
bioyond_warehouse_10x1x1
|
||||
)
|
||||
|
||||
class BIOYOND_YB_Deck(Deck):
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "YB_Deck",
|
||||
size_x: float = 4150, # 桌子X方向尺寸 (mm)
|
||||
size_y: float = 1400.0, # 桌子Y方向尺寸 (mm)
|
||||
size_z: float = 2670.0, # 桌子Z方向尺寸 (mm)
|
||||
category: str = "deck",
|
||||
setup: bool = False
|
||||
) -> None:
|
||||
super().__init__(name=name, size_x=4150.0, size_y=1400.0, size_z=2670.0)
|
||||
if setup:
|
||||
self.setup() # 当在工作站配置中setup为True时,自动创建并放置所有预定义的堆栈
|
||||
|
||||
def setup(self) -> None:
|
||||
# 定义桌子上的各个仓库区域
|
||||
self.warehouses = {
|
||||
"自动堆栈-左": bioyond_warehouse_2x2x1("自动堆栈-左"),
|
||||
"自动堆栈-右": bioyond_warehouse_2x2x1("自动堆栈-右"),
|
||||
"手动堆栈-左": bioyond_warehouse_3x5x1("手动堆栈-左"),
|
||||
"手动堆栈-右": bioyond_warehouse_3x5x1("手动堆栈-右"),
|
||||
"粉末加样头堆栈": bioyond_warehouse_20x1x1("粉末加样头堆栈"),
|
||||
"配液站内试剂仓库": bioyond_warehouse_3x3x1("配液站内试剂仓库"),
|
||||
"试剂替换仓库": bioyond_warehouse_10x1x1("试剂替换仓库"),
|
||||
}
|
||||
|
||||
# 定义各个仓库在桌子上的坐标位置
|
||||
self.warehouse_locations = {
|
||||
"自动堆栈-左": Coordinate(-100.3, 171.5, 0.0),
|
||||
"自动堆栈-右": Coordinate(3960.1, 155.9, 0.0),
|
||||
"手动堆栈-左": Coordinate(-213.3, 804.4, 0.0),
|
||||
"手动堆栈-右": Coordinate(3960.1, 807.6, 0.0),
|
||||
"粉末加样头堆栈": Coordinate(415.0, 1301.0, 0.0),
|
||||
"配液站内试剂仓库": Coordinate(2162.0, 437.0, 0.0),
|
||||
"试剂替换仓库": Coordinate(1173.0, 802.0, 0.0),
|
||||
}
|
||||
|
||||
# 将仓库分配到桌子的指定位置
|
||||
for warehouse_name, warehouse in self.warehouses.items():
|
||||
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
|
||||
```
|
||||
|
||||
### 在工作站配置中的使用
|
||||
|
||||
当在工作站配置文件中定义桌子时,可以通过`setup`参数控制是否自动建立所有堆栈:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "YB_Bioyond_Deck",
|
||||
"name": "YB_Bioyond_Deck",
|
||||
"children": [],
|
||||
"parent": "bioyond_cell_workstation",
|
||||
"type": "deck",
|
||||
"class": "BIOYOND_YB_Deck",
|
||||
"config": {
|
||||
"type": "BIOYOND_YB_Deck",
|
||||
"setup": true
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
```
|
||||
|
||||
**重要说明**:
|
||||
- 当 `"setup": true` 时,系统会自动调用桌子的 `setup()` 方法
|
||||
- 这将创建并放置所有预定义的堆栈到桌子上的指定位置
|
||||
- 如果 `"setup": false` 或省略该参数,则只创建空桌子,需要手动添加堆栈
|
||||
|
||||
### 关键要点注释
|
||||
|
||||
- `size_x`, `size_y`, `size_z`: 定义桌子的物理尺寸
|
||||
- `warehouses`: 字典类型,包含桌子上所有的仓库区域
|
||||
- `warehouse_locations`: 定义每个仓库在桌子坐标系中的位置
|
||||
- `assign_child_resource()`: 将仓库资源分配到桌子的指定位置
|
||||
- `setup()`: 可选的自动设置方法,初始化时可调用
|
||||
|
||||
## 2. 堆栈(Warehouse)构建
|
||||
|
||||
堆栈定义了存储区域的规格和布局,用于放置载具。
|
||||
|
||||
### 代码示例 (YB_warehouses.py)
|
||||
|
||||
```python
|
||||
from unilabos.resources.warehouse import WareHouse, YB_warehouse_factory
|
||||
|
||||
def bioyond_warehouse_1x4x4(name: str) -> WareHouse:
|
||||
"""创建BioYond 1x4x4仓库
|
||||
|
||||
Args:
|
||||
name: 仓库名称
|
||||
|
||||
Returns:
|
||||
WareHouse: 仓库对象
|
||||
"""
|
||||
return YB_warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=1, # X方向位置数量
|
||||
num_items_y=4, # Y方向位置数量
|
||||
num_items_z=4, # Z方向位置数量(层数)
|
||||
dx=10.0, # X方向起始偏移
|
||||
dy=10.0, # Y方向起始偏移
|
||||
dz=10.0, # Z方向起始偏移
|
||||
item_dx=137.0, # X方向间距
|
||||
item_dy=96.0, # Y方向间距
|
||||
item_dz=120.0, # Z方向间距(层高)
|
||||
category="warehouse",
|
||||
)
|
||||
|
||||
def bioyond_warehouse_2x2x1(name: str) -> WareHouse:
|
||||
"""创建BioYond 2x2x1仓库(自动堆栈)"""
|
||||
return YB_warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=2,
|
||||
num_items_y=2,
|
||||
num_items_z=1, # 单层
|
||||
dx=10.0,
|
||||
dy=10.0,
|
||||
dz=10.0,
|
||||
item_dx=137.0,
|
||||
item_dy=96.0,
|
||||
item_dz=120.0,
|
||||
category="YB_warehouse",
|
||||
)
|
||||
```
|
||||
|
||||
### 关键要点注释
|
||||
|
||||
- `num_items_x/y/z`: 定义仓库在各个方向的位置数量
|
||||
- `dx/dy/dz`: 第一个位置的起始偏移坐标
|
||||
- `item_dx/dy/dz`: 相邻位置之间的间距
|
||||
- `category`: 仓库类别,用于分类管理
|
||||
- `YB_warehouse_factory`: 统一的仓库创建工厂函数
|
||||
|
||||
## 3. 载具(Carriers)构建
|
||||
|
||||
载具是承载瓶子的容器架,定义了瓶子的排列方式和位置。
|
||||
|
||||
### 代码示例 (YB_bottle_carriers.py)
|
||||
|
||||
```python
|
||||
from pylabrobot.resources import create_homogeneous_resources, Coordinate, ResourceHolder, create_ordered_items_2d
|
||||
from unilabos.resources.itemized_carrier import Bottle, BottleCarrier
|
||||
from unilabos.resources.bioyond.YB_bottles import YB_pei_ye_xiao_Bottle
|
||||
|
||||
def YB_peiyepingxiaoban(name: str) -> BottleCarrier:
|
||||
"""配液瓶(小)板 - 4x2布局,8个位置
|
||||
|
||||
Args:
|
||||
name: 载具名称
|
||||
|
||||
Returns:
|
||||
BottleCarrier: 载具对象,包含8个配液瓶位置
|
||||
"""
|
||||
|
||||
# 载具物理尺寸 (mm)
|
||||
carrier_size_x = 127.8
|
||||
carrier_size_y = 85.5
|
||||
carrier_size_z = 65.0
|
||||
|
||||
# 瓶位参数
|
||||
bottle_diameter = 35.0 # 瓶子直径
|
||||
bottle_spacing_x = 42.0 # X方向瓶子间距
|
||||
bottle_spacing_y = 35.0 # Y方向瓶子间距
|
||||
|
||||
# 计算起始位置 (居中排列)
|
||||
start_x = (carrier_size_x - (4 - 1) * bottle_spacing_x - bottle_diameter) / 2
|
||||
start_y = (carrier_size_y - (2 - 1) * bottle_spacing_y - bottle_diameter) / 2
|
||||
|
||||
# 创建瓶位布局:4列x2行
|
||||
sites = create_ordered_items_2d(
|
||||
klass=ResourceHolder,
|
||||
num_items_x=4, # 4列
|
||||
num_items_y=2, # 2行
|
||||
dx=start_x,
|
||||
dy=start_y,
|
||||
dz=5.0, # 瓶子底部高度
|
||||
item_dx=bottle_spacing_x,
|
||||
item_dy=bottle_spacing_y,
|
||||
size_x=bottle_diameter,
|
||||
size_y=bottle_diameter,
|
||||
size_z=carrier_size_z,
|
||||
)
|
||||
|
||||
# 为每个瓶位设置名称
|
||||
for k, v in sites.items():
|
||||
v.name = f"{name}_{v.name}"
|
||||
|
||||
# 创建载具对象
|
||||
carrier = BottleCarrier(
|
||||
name=name,
|
||||
size_x=carrier_size_x,
|
||||
size_y=carrier_size_y,
|
||||
size_z=carrier_size_z,
|
||||
sites=sites,
|
||||
model="YB_peiyepingxiaoban",
|
||||
)
|
||||
|
||||
# 设置载具布局参数
|
||||
carrier.num_items_x = 4
|
||||
carrier.num_items_y = 2
|
||||
carrier.num_items_z = 1
|
||||
|
||||
# 定义瓶子排列顺序
|
||||
ordering = ["A1", "A2", "A3", "A4", "B1", "B2", "B3", "B4"]
|
||||
|
||||
# 为每个位置创建瓶子实例
|
||||
for i in range(8):
|
||||
carrier[i] = YB_pei_ye_xiao_Bottle(f"{name}_bottle_{ordering[i]}")
|
||||
|
||||
return carrier
|
||||
```
|
||||
|
||||
### 关键要点注释
|
||||
|
||||
- `carrier_size_x/y/z`: 载具的物理尺寸
|
||||
- `bottle_diameter`: 瓶子的直径,用于计算瓶位大小
|
||||
- `bottle_spacing_x/y`: 瓶子之间的间距
|
||||
- `create_ordered_items_2d`: 创建二维排列的瓶位
|
||||
- `sites`: 瓶位字典,存储所有瓶子位置信息
|
||||
- `ordering`: 定义瓶位的命名规则(如A1, A2, B1等)
|
||||
|
||||
## 4. 瓶子(Bottles)构建
|
||||
|
||||
瓶子是最终的物料容器,定义了容器的物理属性。
|
||||
|
||||
### 代码示例 (YB_bottles.py)
|
||||
|
||||
```python
|
||||
from unilabos.resources.itemized_carrier import Bottle
|
||||
|
||||
def YB_pei_ye_xiao_Bottle(
|
||||
name: str,
|
||||
diameter: float = 35.0, # 瓶子直径 (mm)
|
||||
height: float = 60.0, # 瓶子高度 (mm)
|
||||
max_volume: float = 30000.0, # 最大容量 (μL) - 30mL
|
||||
barcode: str = None, # 条码
|
||||
) -> Bottle:
|
||||
"""创建配液瓶(小)
|
||||
|
||||
Args:
|
||||
name: 瓶子名称
|
||||
diameter: 瓶子直径
|
||||
height: 瓶子高度
|
||||
max_volume: 最大容量(微升)
|
||||
barcode: 条码标识
|
||||
|
||||
Returns:
|
||||
Bottle: 瓶子对象
|
||||
"""
|
||||
return Bottle(
|
||||
name=name,
|
||||
diameter=diameter,
|
||||
height=height,
|
||||
max_volume=max_volume,
|
||||
barcode=barcode,
|
||||
model="YB_pei_ye_xiao_Bottle",
|
||||
)
|
||||
|
||||
def YB_ye_Bottle(
|
||||
name: str,
|
||||
diameter: float = 40.0,
|
||||
height: float = 70.0,
|
||||
max_volume: float = 50000.0, # 最大容量
|
||||
barcode: str = None,
|
||||
) -> Bottle:
|
||||
"""创建液体瓶"""
|
||||
return Bottle(
|
||||
name=name,
|
||||
diameter=diameter,
|
||||
height=height,
|
||||
max_volume=max_volume,
|
||||
barcode=barcode,
|
||||
model="YB_ye_Bottle",
|
||||
)
|
||||
```
|
||||
|
||||
### 关键要点注释
|
||||
|
||||
- `diameter`: 瓶子直径,影响瓶位大小计算
|
||||
- `height`: 瓶子高度,用于碰撞检测和移液计算
|
||||
- `max_volume`: 最大容量,单位为微升(μL)
|
||||
- `barcode`: 条码标识,用于瓶子追踪
|
||||
- `model`: 型号标识,用于区分不同类型的瓶子
|
||||
|
||||
## 5. 注册表配置
|
||||
|
||||
创建完物料定义后,需要在注册表中注册这些物料,使系统能够识别和使用它们。
|
||||
|
||||
在 `unilabos/registry/resources/bioyond/` 目录下创建:
|
||||
|
||||
- `deck.yaml` - 桌子注册表
|
||||
- `YB_bottle_carriers.yaml` - 载具注册表
|
||||
- `YB_bottle.yaml` - 瓶子注册表
|
||||
|
||||
### 5.1 桌子注册表 (deck.yaml)
|
||||
|
||||
```yaml
|
||||
BIOYOND_YB_Deck:
|
||||
category:
|
||||
- deck # 前端显示的分类存放
|
||||
class:
|
||||
module: unilabos.resources.bioyond.decks:BIOYOND_YB_Deck # 定义桌子的类的路径
|
||||
type: pylabrobot
|
||||
description: BIOYOND_YB_Deck # 描述信息
|
||||
handles: []
|
||||
icon: 配液站.webp # 图标文件
|
||||
init_param_schema: {}
|
||||
registry_type: resource # 注册类型
|
||||
version: 1.0.0 # 版本号
|
||||
```
|
||||
|
||||
### 5.2 载具注册表 (YB_bottle_carriers.yaml)
|
||||
|
||||
```yaml
|
||||
YB_peiyepingxiaoban:
|
||||
category:
|
||||
- yb3
|
||||
- YB_bottle_carriers
|
||||
class:
|
||||
module: unilabos.resources.bioyond.YB_bottle_carriers:YB_peiyepingxiaoban
|
||||
type: pylabrobot
|
||||
description: YB_peiyepingxiaoban
|
||||
handles: []
|
||||
icon: ''
|
||||
init_param_schema: {}
|
||||
registry_type: resource
|
||||
version: 1.0.0
|
||||
```
|
||||
|
||||
### 5.3 瓶子注册表 (YB_bottle.yaml)
|
||||
|
||||
```yaml
|
||||
YB_pei_ye_xiao_Bottle:
|
||||
category:
|
||||
- yb3
|
||||
- YB_bottle
|
||||
class:
|
||||
module: unilabos.resources.bioyond.YB_bottles:YB_pei_ye_xiao_Bottle
|
||||
type: pylabrobot
|
||||
description: YB_pei_ye_xiao_Bottle
|
||||
handles: []
|
||||
icon: ''
|
||||
init_param_schema: {}
|
||||
registry_type: resource
|
||||
version: 1.0.0
|
||||
```
|
||||
|
||||
### 注册表关键要点注释
|
||||
|
||||
- `category`: 物料分类,用于在云端(网页界面)中的分类中显示
|
||||
- `module`: Python模块路径,格式为 `模块路径:类名`
|
||||
- `type`: 框架类型,通常为 `pylabrobot`(默认即可)
|
||||
- `description`: 描述信息,显示在用户界面中
|
||||
- `icon`: (名称唯一自动匹配后端上传的图标文件名,显示在云端)
|
||||
- `registry_type`: 固定为 `resource`
|
||||
- `version`: 版本号,用于版本管理
|
||||
409
docs/developer_guide/materials_tutorial.md
Normal file
@@ -0,0 +1,409 @@
|
||||
# 物料教程(Resource)
|
||||
|
||||
本教程面向 Uni-Lab-OS 的开发者,讲解“物料”的核心概念、3种物料格式(UniLab、PyLabRobot、奔耀Bioyond)及其相互转换方法,并说明4种 children 结构表现形式及使用场景。
|
||||
|
||||
---
|
||||
|
||||
## 1. 物料是什么
|
||||
|
||||
- **物料(Resource)**:指实验工作站中的实体对象,包括设备(device)、操作甲板 (deck)、试剂、实验耗材,也包括设备上承载的具体物料或者包含的容器(如container/plate/well/瓶/孔/片等)。
|
||||
- **物料基本信息**(以 UniLab list格式为例):
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"id": "plate", // 某一类物料的唯一名称
|
||||
"name": "50ml瓶装试剂托盘", // 在云端显示的名称
|
||||
"sample_id": null, // 同类物料的不同样品
|
||||
"children": [
|
||||
"50ml试剂瓶" // 表示托盘上有一个 50ml 试剂瓶
|
||||
],
|
||||
"parent": "deck", // 此物料放置在 deck 上
|
||||
"type": "plate", // 物料类型
|
||||
"class": "plate", // 物料对应的注册/类名
|
||||
"position": {
|
||||
"x": 0, // 初始放置位置
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"config": { // 固有配置(尺寸、旋转等)
|
||||
"size_x": 400.0,
|
||||
"size_y": 400.0,
|
||||
"size_z": 400.0,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"bottle_number": 1 // 动态数据(可变化)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 2. 3种物料格式概览(UniLab、PyLabRobot、奔耀Bioyond)
|
||||
|
||||
### 2.1 UniLab 物料格式(云端/项目内通用)
|
||||
|
||||
- 结构特征:顶层通常是 `nodes` 列表;每个节点是扁平字典,`children` 是子节点 `id` 列表;`parent` 为父节点 `id` 或 `null`。
|
||||
- 用途:
|
||||
- 云端数据存储、前端可视化、与图结构算法互操作
|
||||
- 在上传/下载/部署配置时作为标准交换格式
|
||||
|
||||
示例片段(UniLab 物料格式):
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"nodes": [
|
||||
|
||||
{
|
||||
"id": "a",
|
||||
"name": "name_a",
|
||||
"sample_id": 1,
|
||||
"type": "deck",
|
||||
"class": "deck",
|
||||
"parent": null,
|
||||
"children": ["b1"],
|
||||
"position": {"x": 0, "y": 0, "z": 0},
|
||||
"config": {},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
|
||||
"id": "b1",
|
||||
"name": "name_b1",
|
||||
"sample_id": 1,
|
||||
"type": "plate",
|
||||
"class": "plate",
|
||||
"parent": "a1",
|
||||
"children": [],
|
||||
"position": {"x": 0, "y": 0, "z": 0},
|
||||
"config": {},
|
||||
"data": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 2.2 PyLabRobot(PLR)物料格式(实验流程运行时)
|
||||
|
||||
- 结构特征:严格的层级树,`children` 为“子资源字典列表”(每个子节点本身是完整对象)。
|
||||
- 用途:
|
||||
- 实验流程执行与调度,PLR 运行时期望的资源对象格式
|
||||
- 通过 `Resource.deserialize/serialize`、`load_all_state/serialize_all_state` 与对象交互
|
||||
|
||||
示例片段(PRL 物料格式)::
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "deck",
|
||||
"type": "Deck",
|
||||
"category": "deck",
|
||||
"location": {"x": 0, "y": 0, "z": 0, "type": "Coordinate"},
|
||||
"rotation": {"x": 0, "y": 0, "z": 0, "type": "Rotation"},
|
||||
"parent_name": null,
|
||||
"children": [
|
||||
{
|
||||
"name": "plate_1",
|
||||
"type": "Plate",
|
||||
"category": "plate_96",
|
||||
"location": {"x": 100, "y": 0, "z": 0, "type": "Coordinate"},
|
||||
"rotation": {"x": 0, "y": 0, "z": 0, "type": "Rotation"},
|
||||
"parent_name": "deck",
|
||||
"children": [
|
||||
{
|
||||
"name": "A1",
|
||||
"type": "Well",
|
||||
"category": "well",
|
||||
"location": {"x": 0, "y": 0, "z": 0, "type": "Coordinate"},
|
||||
"rotation": {"x": 0, "y": 0, "z": 0, "type": "Rotation"},
|
||||
"parent_name": "plate_1",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### 2.3 奔耀 Bioyond 物料格式(第三方来源)
|
||||
一般是厂商自己定义的json格式和字段,信息需要提取和对应。以下为示例说明。
|
||||
|
||||
- 结构特征:顶层 `data` 列表,每项包含 `typeName`、`code`、`barCode`、`name`、`quantity`、`unit`、`locations`(仓位 `whName`、`x/y/z`)、`detail`(细粒度内容,如瓶内液体或孔位物料)。
|
||||
- 用途:
|
||||
- 第三方 WMS/设备的物料清单输入
|
||||
- 需要自定义映射表将 `typeName` → PLR 类名,对 `locations`/`detail` 进行落位/赋值
|
||||
|
||||
示例片段(奔耀Bioyond 物料格式):
|
||||
|
||||
|
||||
```json
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"id": "3a1b5c10-d4f3-01ac-1e64-5b4be2add4b1",
|
||||
"typeName": "液",
|
||||
"code": "0006-00014",
|
||||
"barCode": "",
|
||||
"name": "EMC",
|
||||
"quantity": 50,
|
||||
"lockQuantity": 2.057,
|
||||
"unit": "瓶",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a19da43-57b5-5e75-552f-8dbd0ad1075f",
|
||||
"whid": "3a19da43-57b4-a2a8-3f52-91dbbeb836db",
|
||||
"whName": "配液站内试剂仓库",
|
||||
"code": "0003-0003",
|
||||
"x": 1,
|
||||
"y": 3,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": [
|
||||
{
|
||||
"code": "0006-00014-01",
|
||||
"name": "EMC-瓶-1",
|
||||
"x": 1,
|
||||
"y": 3,
|
||||
"z": 1,
|
||||
"quantity": 500.0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"code": 1,
|
||||
"message": "",
|
||||
"timestamp": 0
|
||||
}
|
||||
```
|
||||
### 2.4 3种物料格式关键字段对应(UniLab、PyLabRobot、奔耀Bioyond)
|
||||
|
||||
| 含义 | UniLab | PyLabRobot (PLR) | 奔耀 Bioyond |
|
||||
| - | - | - | - |
|
||||
| 节点唯一名 | `id` | `name` | `name` |
|
||||
| 父节点引用 | `parent` | `parent_name` | `locations` 坐标(无直接父名,需映射坐标下的物料) |
|
||||
| 子节点集合 | `children`(id 列表或对象列表,视结构而定) | `children`(对象列表) | `detail`(明细,非严格树结构,需要自定义映射) |
|
||||
| 类型(抽象类别) | `type`(device/container/plate/deck/…) | `category`(plate/well/…),以及类名 `type` | `typeName`(厂商自定义,如“液”、“加样头(大)”) |
|
||||
| 运行/业务数据 | `data` | 通过 `serialize_all_state()`/`load_all_state()` 管理的状态 | `quantity`、`lockQuantity` 等业务数值 |
|
||||
| 固有配置 | `config`(size_x/size_y/size_z/model/ordering…) | 资源字典中的同名键(反序列化时按构造签名取用) | 厂商自定义字段(需映射入 PLR/UniLab 的 `config` 或 `data`) |
|
||||
| 空间位置 | `position`(x/y/z) | `location`(Coordinate) + `rotation`(Rotation) | `locations`(whName、x/y/z),不含旋转 |
|
||||
| 条码/标识 | `config.barcode`(可选) | 常放在配置键中(如 `barcode`) | `barCode` |
|
||||
| 数量单位 | 无固定键,通常在 `data` | 无固定键,通常在配置或状态中 | `unit` |
|
||||
| 物料编码 | 通常在 `config` 或 `data` 自定义 | 通常在配置中自定义 | `code` |
|
||||
|
||||
说明:
|
||||
- Bioyond 不提供显式的树形父子关系,通常通过 `locations` 将物料落位到某仓位/坐标。用 `detail` 表示子级明细。
|
||||
|
||||
---
|
||||
|
||||
## 3. children 的四种结构表示
|
||||
|
||||
- **list(扁平列表)**:每个节点是扁平字典,`children` 为子节点 `id` 数组。示例:UniLab `nodes` 中的单个节点。
|
||||
|
||||
```json
|
||||
{
|
||||
"nodes": [
|
||||
{ "id": "root", "parent": null, "children": ["child1"] },
|
||||
{ "id": "child1", "parent": "root", "children": [] }
|
||||
]
|
||||
}
|
||||
```
|
||||
- **dict(嵌套字典)**:节点的 `children` 是 `{ child_id: child_node_dict }` 字典。
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "root",
|
||||
"parent": null,
|
||||
"children": {
|
||||
"child1": { "id": "child1", "parent": "root", "children": {} }
|
||||
}
|
||||
}
|
||||
```
|
||||
- **tree(树形列表)**:顶层是 `[root_node, ...]`,每个 `node.children` 是“子节点对象列表”(而非 id 列表)。
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "root",
|
||||
"parent": null,
|
||||
"children": [
|
||||
{ "id": "child1", "parent": "root", "children": [] }
|
||||
]
|
||||
}
|
||||
]
|
||||
```
|
||||
- **nestdict(顶层嵌套字典)**:顶层是 `{root_id: root_node, ...}`,或者根节点自身带 `children: {id: node}` 形态。
|
||||
|
||||
```json
|
||||
{
|
||||
"root": {
|
||||
"id": "root",
|
||||
"parent": null,
|
||||
"children": {
|
||||
"child1": { "id": "child1", "parent": "root", "children": {} }
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
这些结构之间可使用 `graphio.py` 中的工具函数互转(见下一节)。
|
||||
|
||||
---
|
||||
|
||||
## 4. 转换函数及调用
|
||||
|
||||
核心代码文件:`unilabos/resources/graphio.py`
|
||||
|
||||
### 4.1 结构互转(list/dict/tree/nestdict)
|
||||
|
||||
代码引用:
|
||||
|
||||
```217:239:unilabos/resources/graphio.py
|
||||
def dict_to_tree(nodes: dict, devices_only: bool = False) -> list[dict]:
|
||||
# ... 由扁平 dict(id->node)生成树(children 为对象列表)
|
||||
```
|
||||
|
||||
```241:267:unilabos/resources/graphio.py
|
||||
def dict_to_nested_dict(nodes: dict, devices_only: bool = False) -> dict:
|
||||
# ... 由扁平 dict 生成嵌套字典(children 为 {id:node})
|
||||
```
|
||||
|
||||
```270:273:unilabos/resources/graphio.py
|
||||
def list_to_nested_dict(nodes: list[dict]) -> dict:
|
||||
# ... 由扁平列表(children 为 id 列表)转嵌套字典
|
||||
```
|
||||
|
||||
```275:286:unilabos/resources/graphio.py
|
||||
def tree_to_list(tree: list[dict]) -> list[dict]:
|
||||
# ... 由树形列表转回扁平列表(children 还原为 id 列表)
|
||||
```
|
||||
|
||||
```289:337:unilabos/resources/graphio.py
|
||||
def nested_dict_to_list(nested_dict: dict) -> list[dict]:
|
||||
# ... 由嵌套字典转回扁平列表
|
||||
```
|
||||
|
||||
常见路径:
|
||||
|
||||
- UniLab 扁平列表 → 树:`dict_to_tree({r["id"]: r for r in resources})`
|
||||
- 树 → UniLab 扁平列表:`tree_to_list(resources_tree)`
|
||||
- 扁平列表 ↔ 嵌套字典:`list_to_nested_dict` / `nested_dict_to_list`
|
||||
|
||||
### 4.2 UniLab ↔ PyLabRobot(PLR)
|
||||
|
||||
高层封装:
|
||||
|
||||
```339:368:unilabos/resources/graphio.py
|
||||
def convert_resources_to_type(resources_list: list[dict], resource_type: Union[type, list[type]], *, plr_model: bool = False):
|
||||
# UniLab -> (NestedDict or PLR)
|
||||
```
|
||||
|
||||
```371:395:unilabos/resources/graphio.py
|
||||
def convert_resources_from_type(resources_list, resource_type: Union[type, list[type]], *, is_plr: bool = False):
|
||||
# (NestedDict or PLR) -> UniLab 扁平列表
|
||||
```
|
||||
|
||||
底层转换:
|
||||
|
||||
```398:441:unilabos/resources/graphio.py
|
||||
def resource_ulab_to_plr(resource: dict, plr_model=False) -> "ResourcePLR":
|
||||
# UniLab 单节点(树根) -> PLR Resource 对象
|
||||
```
|
||||
|
||||
```443:481:unilabos/resources/graphio.py
|
||||
def resource_plr_to_ulab(resource_plr: "ResourcePLR", parent_name: str = None, with_children=True):
|
||||
# PLR Resource -> UniLab 单节点(dict)
|
||||
```
|
||||
|
||||
示例:
|
||||
|
||||
```python
|
||||
from unilabos.resources.graphio import convert_resources_to_type, convert_resources_from_type
|
||||
from pylabrobot.resources.resource import Resource as ResourcePLR
|
||||
|
||||
# UniLab 扁平列表 -> PLR 根资源对象
|
||||
plr_root = convert_resources_to_type(resources_list=ulab_list, resource_type=ResourcePLR)
|
||||
|
||||
# PLR 资源对象 -> UniLab 扁平列表(用于保存/上传)
|
||||
ulab_flat = convert_resources_from_type(resources_list=plr_root, resource_type=ResourcePLR)
|
||||
```
|
||||
|
||||
可选项:
|
||||
|
||||
- `plr_model=True`:保留 `model` 字段(默认会移除)。
|
||||
- `with_children=False`:`resource_plr_to_ulab` 仅转换当前节点。
|
||||
|
||||
### 4.3 奔耀(Bioyond)→ PLR(及进一步到 UniLab)
|
||||
|
||||
转换入口:
|
||||
|
||||
```483:527:unilabos/resources/graphio.py
|
||||
def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: dict = {}, deck: Any = None) -> list[dict]:
|
||||
# Bioyond 列表 -> PLR 资源列表,并可根据 deck.warehouses 将资源落位
|
||||
```
|
||||
|
||||
使用示例:
|
||||
|
||||
```python
|
||||
import json
|
||||
from unilabos.resources.graphio import resource_bioyond_to_plr, convert_resources_from_type
|
||||
from pylabrobot.resources.resource import Resource as ResourcePLR
|
||||
|
||||
resp = json.load(open("unilabos/devices/workstation/bioyond_cell/bioyond_test_yibin.json", encoding="utf-8"))
|
||||
materials = resp["data"]
|
||||
|
||||
# 将第三方类型name映射到 PLR 资源类名(需根据现场定义)
|
||||
type_mapping = {
|
||||
"液": "RegularContainer",
|
||||
"加样头(大)": "RegularContainer"
|
||||
}
|
||||
|
||||
plr_list = resource_bioyond_to_plr(materials, type_mapping=type_mapping, deck=None)
|
||||
|
||||
# 如需上传云端(UniLab 扁平格式):
|
||||
ulab_flat = convert_resources_from_type(plr_list, [ResourcePLR])
|
||||
```
|
||||
|
||||
说明:
|
||||
|
||||
- `type_mapping` 必须由开发者根据设备/物料种类人工维护。
|
||||
- 如传入 `deck`,且 `deck.warehouses` 命名与 `whName` 对应,可将物料安放到仓库坐标(x/y/z)。
|
||||
|
||||
---
|
||||
|
||||
## 5. 何时使用哪种格式
|
||||
|
||||
- **云端/持久化**:使用 UniLab 物料格式(扁平 `nodes` 列表,children 为 id 列表)。便于版本化、可视化与网络传输。
|
||||
- **实验工作流执行**:使用 PyLabRobot(PLR)格式。PLR 运行时依赖严格的树形资源结构与对象 API。
|
||||
- **第三方设备/系统(Bioyond)输入**:保持来源格式不变,使用 `resource_bioyond_to_plr` + 人工 `type_mapping` 将其转换为 PLR(必要时再转 UniLab)。
|
||||
|
||||
---
|
||||
|
||||
## 6. 常见问题与注意事项
|
||||
|
||||
- **children 形态不一致**:不同函数期望不同 children 形态,注意在进入转换前先用“结构互转”工具函数标准化形态。
|
||||
- **devices_only**:`dict_to_tree/dict_to_nested_dict` 支持仅保留 `type == device` 的节点。
|
||||
- **模型/类型字段**:PLR 对象序列化参数有所差异,`resource_ulab_to_plr` 内部会根据构造签名移除不兼容字段(如 `category`)。
|
||||
- **驱动初始化**:`initialize_resource(s)` 支持从注册表/类路径创建 PLR/UniLab 资源或列表。
|
||||
|
||||
参考代码:
|
||||
|
||||
```530:577:unilabos/resources/graphio.py
|
||||
def initialize_resource(resource_config: dict, resource_type: Any = None) -> Union[list[dict], ResourcePLR]:
|
||||
# 从注册类/模块反射创建资源,或将 UniLab 字典包装为列表
|
||||
```
|
||||
|
||||
```580:597:unilabos/resources/graphio.py
|
||||
def initialize_resources(resources_config) -> list[dict]:
|
||||
# 批量初始化
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -26,13 +26,19 @@ boot_examples/index.md
|
||||
## 开发者指南
|
||||
|
||||
```{toctree}
|
||||
|
||||
:maxdepth: 2
|
||||
|
||||
developer_guide/device_driver
|
||||
developer_guide/add_device
|
||||
developer_guide/add_action
|
||||
developer_guide/actions
|
||||
developer_guide/workstation_architecture
|
||||
developer_guide/add_protocol
|
||||
developer_guide/add_batteryPLC
|
||||
developer_guide/materials_tutorial
|
||||
developer_guide/materials_construction_guide
|
||||
|
||||
```
|
||||
|
||||
## 接口文档
|
||||
|
||||
14
docs/requirements.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
# Sphinx文档构建依赖
|
||||
sphinx>=7.0.0
|
||||
sphinx-rtd-theme>=2.0.0
|
||||
myst-parser>=2.0.0
|
||||
sphinxcontrib-mermaid
|
||||
|
||||
# 用于支持Jupyter notebook文档
|
||||
myst-nb>=1.0.0
|
||||
|
||||
# 用于代码复制按钮
|
||||
sphinx-copybutton>=0.5.0
|
||||
|
||||
# 用于自动摘要生成
|
||||
sphinx-autobuild>=2024.2.4
|
||||
@@ -24,6 +24,8 @@ class WSConfig:
|
||||
max_reconnect_attempts = 999 # 最大重连次数
|
||||
ping_interval = 30 # ping间隔(秒)
|
||||
```
|
||||
您可以进入实验室,点击左下角的头像在实验室详情中获取所在实验室的ak sk
|
||||

|
||||
|
||||
### 完整配置示例
|
||||
|
||||
|
||||
BIN
docs/user_guide/image/copy_aksk.gif
Normal file
|
After Width: | Height: | Size: 526 KiB |
BIN
docs/user_guide/image/creatworkfollow.gif
Normal file
|
After Width: | Height: | Size: 327 KiB |
BIN
docs/user_guide/image/links.png
Normal file
|
After Width: | Height: | Size: 275 KiB |
BIN
docs/user_guide/image/linksandrun.png
Normal file
|
After Width: | Height: | Size: 186 KiB |
BIN
docs/user_guide/image/material.png
Normal file
|
After Width: | Height: | Size: 581 KiB |
BIN
docs/user_guide/image/new.png
Normal file
|
After Width: | Height: | Size: 120 KiB |
@@ -245,3 +245,78 @@ unilab --ak your_ak --sk your_sk --port 8080 --disable_browser
|
||||
- 检查图谱文件格式是否正确
|
||||
- 验证设备连接和端点配置
|
||||
- 确保注册表路径正确
|
||||
|
||||
## 页面操作
|
||||
|
||||
### 1. 启动成功
|
||||
当您启动成功后,可以看到物料列表,节点模版和组态图如图展示
|
||||

|
||||
|
||||
### 2. 根据需求创建设备和物料
|
||||
我们可以做一个简单的案例
|
||||
* 在容器1中加入水
|
||||
* 通过传输泵将容器1中的水转移到容器2中
|
||||
#### 2.1 添加所需的设备和物料
|
||||
仪器设备work_station中的workstation 数量x1
|
||||
仪器设备virtual_device中的virtual_transfer_pump 数量x1
|
||||
物料耗材container中的container 数量x2
|
||||
|
||||
#### 2.2 将设备和物料根据父子关系进行关联
|
||||
当我们添加设备时,仪器耗材模块的物料列表也会实时更新
|
||||
我们需要将设备和物料拖拽到workstation中并在画布上将它们连接起来,就像真实的设备操作一样
|
||||

|
||||
|
||||
### 3. 创建工作流
|
||||
进入工作流模块 → 点击"我创建的" → 新建工作流
|
||||

|
||||
|
||||
#### 3.1 新增工作流节点
|
||||
我们可以进入指定工作流,在空白处右键
|
||||
* 选择Laboratory→host_node中的creat_resource
|
||||
* 选择Laboratory→workstation中的PumpTransferProtocol
|
||||
|
||||

|
||||
|
||||
#### 3.2 配置节点参数
|
||||
根据案例,工作流包含两个步骤:
|
||||
1. 使用creat_resource在容器中创建水
|
||||
2. 通过泵传输协议将水传输到另一个容器
|
||||
|
||||
我们点击creat_resource卡片上的编辑按钮来配置参数⭐️
|
||||
class_name :container
|
||||
device_id : workstation
|
||||
liquid_input_slot : 0或-1均可
|
||||
liquid_type : water
|
||||
liquid_volume : 根据需求填写即可,默认单位ml,这里举例50
|
||||
parent : workstation
|
||||
res_id : containe
|
||||
关联设备名称(原unilabos_device_id) : 这里就填写host_node
|
||||
**配置完成后点击底部保存按钮**
|
||||
|
||||
我们点击PumpTransferProtocol卡片上的编辑按钮来配置参数⭐️
|
||||
event : transfer_liquid
|
||||
from_vessel : water
|
||||
to_vessel : container1
|
||||
volume : 根据需求填写即可,默认单位ml,这里举例50
|
||||
关联设备名称(原unilabos_device_id) : 这里就填写workstation
|
||||
**配置完成后点击底部保存按钮**
|
||||
|
||||
#### 3.3 运行工作流
|
||||
1. 连接两个节点卡片
|
||||
2. 点击底部保存按钮
|
||||
3. 点击运行按钮执行工作流
|
||||
|
||||

|
||||
|
||||
### 运行监控
|
||||
* 运行状态和消息实时显示在底部控制台
|
||||
* 如有报错,可点击查看详细信息
|
||||
|
||||
### 结果验证
|
||||
工作流完成后,返回仪器耗材模块:
|
||||
* 点击 container1卡片查看详情
|
||||
* 确认其中包含参数指定的水和容量
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
197
docs/user_guide/quick_install_guide.md
Normal file
@@ -0,0 +1,197 @@
|
||||
# Uni-Lab-OS 一键安装快速指南
|
||||
|
||||
## 概述
|
||||
|
||||
本指南提供最快速的 Uni-Lab-OS 安装方法,使用预打包的 conda 环境,无需手动配置依赖。
|
||||
|
||||
## 前置要求
|
||||
|
||||
- 已安装 Conda/Miniconda/Miniforge/Mamba
|
||||
- 至少 10GB 可用磁盘空间
|
||||
- Windows 10+, macOS 10.14+, 或 Linux (Ubuntu 20.04+)
|
||||
|
||||
## 安装步骤
|
||||
|
||||
### 第一步:下载预打包环境
|
||||
|
||||
1. 访问 [GitHub Actions - Conda Pack Build](https://github.com/dptech-corp/Uni-Lab-OS/actions/workflows/conda-pack-build.yml)
|
||||
|
||||
2. 选择最新的成功构建记录(绿色勾号 ✓)
|
||||
|
||||
3. 在页面底部的 "Artifacts" 部分,下载对应你操作系统的压缩包:
|
||||
- Windows: `unilab-pack-win-64-{branch}.zip`
|
||||
- macOS (Intel): `unilab-pack-osx-64-{branch}.tar.gz`
|
||||
- macOS (Apple Silicon): `unilab-pack-osx-arm64-{branch}.tar.gz`
|
||||
- Linux: `unilab-pack-linux-64-{branch}.tar.gz`
|
||||
|
||||
### 第二步:解压并运行安装脚本
|
||||
|
||||
#### Windows
|
||||
|
||||
```batch
|
||||
REM 使用 Windows 资源管理器解压下载的 zip 文件
|
||||
REM 或使用命令行:
|
||||
tar -xzf unilab-pack-win-64-dev.zip
|
||||
|
||||
REM 进入解压后的目录
|
||||
cd unilab-pack-win-64-dev
|
||||
|
||||
REM 双击运行 install_unilab.bat
|
||||
REM 或在命令行中执行:
|
||||
install_unilab.bat
|
||||
```
|
||||
|
||||
#### macOS
|
||||
|
||||
```bash
|
||||
# 解压下载的压缩包
|
||||
tar -xzf unilab-pack-osx-arm64-dev.tar.gz
|
||||
|
||||
# 进入解压后的目录
|
||||
cd unilab-pack-osx-arm64-dev
|
||||
|
||||
# 运行安装脚本
|
||||
bash install_unilab.sh
|
||||
```
|
||||
|
||||
#### Linux
|
||||
|
||||
```bash
|
||||
# 解压下载的压缩包
|
||||
tar -xzf unilab-pack-linux-64-dev.tar.gz
|
||||
|
||||
# 进入解压后的目录
|
||||
cd unilab-pack-linux-64-dev
|
||||
|
||||
# 添加执行权限(如果需要)
|
||||
chmod +x install_unilab.sh
|
||||
|
||||
# 运行安装脚本
|
||||
./install_unilab.sh
|
||||
```
|
||||
|
||||
### 第三步:激活环境
|
||||
|
||||
```bash
|
||||
conda activate unilab
|
||||
```
|
||||
|
||||
### 第四步:验证安装(推荐)
|
||||
|
||||
```bash
|
||||
# 确保已激活环境
|
||||
conda activate unilab
|
||||
|
||||
# 运行验证脚本
|
||||
python verify_installation.py
|
||||
```
|
||||
|
||||
如果看到 "✓ All checks passed!",说明安装成功!
|
||||
|
||||
## 常见问题
|
||||
|
||||
### Q: 安装脚本找不到 conda?
|
||||
|
||||
**A:** 确保你已经安装了 conda/miniconda/miniforge,并且安装在标准位置:
|
||||
|
||||
- **Windows**:
|
||||
|
||||
- `%USERPROFILE%\miniforge3`
|
||||
- `%USERPROFILE%\miniconda3`
|
||||
- `%USERPROFILE%\anaconda3`
|
||||
- `C:\ProgramData\miniforge3`
|
||||
|
||||
- **macOS/Linux**:
|
||||
- `~/miniforge3`
|
||||
- `~/miniconda3`
|
||||
- `~/anaconda3`
|
||||
- `/opt/conda`
|
||||
|
||||
如果安装在其他位置,可以先激活 conda base 环境,然后手动运行安装脚本。
|
||||
|
||||
### Q: 安装后激活环境提示找不到?
|
||||
|
||||
**A:** 尝试以下方法:
|
||||
|
||||
```bash
|
||||
# 方法 1: 使用 conda activate
|
||||
conda activate unilab
|
||||
|
||||
# 方法 2: 使用完整路径激活(Windows)
|
||||
call C:\Users\{YourUsername}\miniforge3\envs\unilab\Scripts\activate.bat
|
||||
|
||||
# 方法 2: 使用完整路径激活(Unix)
|
||||
source ~/miniforge3/envs/unilab/bin/activate
|
||||
```
|
||||
|
||||
### Q: conda-unpack 失败怎么办?
|
||||
|
||||
**A:** 尝试手动运行:
|
||||
|
||||
```bash
|
||||
# Windows
|
||||
cd %CONDA_PREFIX%\envs\unilab
|
||||
.\Scripts\conda-unpack.exe
|
||||
|
||||
# macOS/Linux
|
||||
cd $CONDA_PREFIX/envs/unilab
|
||||
./bin/conda-unpack
|
||||
```
|
||||
|
||||
### Q: 验证脚本报错?
|
||||
|
||||
**A:** 首先确认环境已激活:
|
||||
|
||||
```bash
|
||||
# 检查当前环境
|
||||
conda env list
|
||||
|
||||
# 应该看到 unilab 前面有 * 标记
|
||||
```
|
||||
|
||||
如果仍有问题,查看具体报错信息,可能需要:
|
||||
|
||||
- 重新运行安装脚本
|
||||
- 检查磁盘空间
|
||||
- 查看详细文档
|
||||
|
||||
### Q: 环境很大,有办法减小吗?
|
||||
|
||||
**A:** 预打包的环境包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。如果空间有限,考虑使用手动安装方式,只安装需要的组件。
|
||||
|
||||
### Q: 如何更新到最新版本?
|
||||
|
||||
**A:** 重新下载最新的预打包环境,运行安装脚本时选择覆盖现有环境。
|
||||
|
||||
或者在现有环境中更新:
|
||||
|
||||
```bash
|
||||
conda activate unilab
|
||||
|
||||
# 更新 unilabos
|
||||
cd /path/to/Uni-Lab-OS
|
||||
git pull
|
||||
pip install -e . --upgrade
|
||||
|
||||
# 更新 ros-humble-unilabos-msgs
|
||||
mamba update ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
## 下一步
|
||||
|
||||
安装完成后,你可以:
|
||||
|
||||
1. **查看启动指南**: {doc}`launch`
|
||||
2. **运行示例**: {doc}`../boot_examples/index`
|
||||
3. **配置设备**: 编辑 `unilabos_data/startup_config.json`
|
||||
4. **阅读开发文档**: {doc}`../developer_guide/workstation_architecture`
|
||||
|
||||
## 需要帮助?
|
||||
|
||||
- **文档**: [docs/user_guide/installation.md](installation.md)
|
||||
- **问题反馈**: [GitHub Issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
- **开发版安装**: 参考 {doc}`installation` 的方式二
|
||||
|
||||
---
|
||||
|
||||
**提示**: 这个预打包环境包含了从指定分支(通常是 `dev`)构建的最新代码。如果需要稳定版本,请使用方式二手动安装 release 版本。
|
||||
22
package.xml
@@ -1,22 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||
<package format="3">
|
||||
<name>unilabos</name>
|
||||
<version>0.0.0</version>
|
||||
<description>ROS2 package for unilabos server</description>
|
||||
<maintainer email="changjh@pku.edu.cn">changjh</maintainer>
|
||||
<license>TODO: License declaration</license>
|
||||
|
||||
<build_depend>action_msgs</build_depend>
|
||||
<exec_depend>action_msgs</exec_depend>
|
||||
<member_of_group>rosidl_interface_packages</member_of_group>
|
||||
|
||||
<test_depend>ament_copyright</test_depend>
|
||||
<test_depend>ament_flake8</test_depend>
|
||||
<test_depend>ament_pep257</test_depend>
|
||||
<test_depend>python3-pytest</test_depend>
|
||||
|
||||
<export>
|
||||
<build_type>ament_python</build_type>
|
||||
</export>
|
||||
</package>
|
||||
@@ -1,6 +1,6 @@
|
||||
package:
|
||||
name: ros-humble-unilabos-msgs
|
||||
version: 0.10.6
|
||||
version: 0.10.10
|
||||
source:
|
||||
path: ../../unilabos_msgs
|
||||
target_directory: src
|
||||
|
||||
41
recipes/ros-humble-unilabos-msgs/bld_ament_cmake.bat
Normal file
@@ -0,0 +1,41 @@
|
||||
:: Generated by vinca http://github.com/RoboStack/vinca.
|
||||
:: DO NOT EDIT!
|
||||
setlocal EnableDelayedExpansion
|
||||
|
||||
set "PYTHONPATH=%LIBRARY_PREFIX%\lib\site-packages;%SP_DIR%"
|
||||
|
||||
:: MSVC is preferred.
|
||||
set CC=cl.exe
|
||||
set CXX=cl.exe
|
||||
|
||||
rd /s /q build
|
||||
mkdir build
|
||||
pushd build
|
||||
|
||||
:: set "CMAKE_GENERATOR=Ninja"
|
||||
|
||||
:: try to fix long paths issues by using default generator
|
||||
set "CMAKE_GENERATOR=Visual Studio %VS_MAJOR% %VS_YEAR%"
|
||||
set "SP_DIR_FORWARDSLASHES=%SP_DIR:\=/%"
|
||||
|
||||
set PYTHON="%PREFIX%\python.exe"
|
||||
|
||||
cmake ^
|
||||
-G "%CMAKE_GENERATOR%" ^
|
||||
-DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^
|
||||
-DCMAKE_BUILD_TYPE=Release ^
|
||||
-DCMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=True ^
|
||||
-DPYTHON_EXECUTABLE=%PYTHON% ^
|
||||
-DPython_EXECUTABLE=%PYTHON% ^
|
||||
-DPython3_EXECUTABLE=%PYTHON% ^
|
||||
-DSETUPTOOLS_DEB_LAYOUT=OFF ^
|
||||
-DBUILD_SHARED_LIBS=ON ^
|
||||
-DBUILD_TESTING=OFF ^
|
||||
-DCMAKE_OBJECT_PATH_MAX=255 ^
|
||||
-DPYTHON_INSTALL_DIR=%SP_DIR_FORWARDSLASHES% ^
|
||||
--compile-no-warning-as-error ^
|
||||
%SRC_DIR%\%PKG_NAME%\src\work
|
||||
if errorlevel 1 exit 1
|
||||
|
||||
cmake --build . --config Release --target install
|
||||
if errorlevel 1 exit 1
|
||||
71
recipes/ros-humble-unilabos-msgs/build_ament_cmake.sh
Normal file
@@ -0,0 +1,71 @@
|
||||
# Generated by vinca http://github.com/RoboStack/vinca.
|
||||
# DO NOT EDIT!
|
||||
|
||||
rm -rf build
|
||||
mkdir build
|
||||
cd build
|
||||
|
||||
# necessary for correctly linking SIP files (from python_qt_bindings)
|
||||
export LINK=$CXX
|
||||
|
||||
if [[ "$CONDA_BUILD_CROSS_COMPILATION" != "1" ]]; then
|
||||
PYTHON_EXECUTABLE=$PREFIX/bin/python
|
||||
PKG_CONFIG_EXECUTABLE=$PREFIX/bin/pkg-config
|
||||
OSX_DEPLOYMENT_TARGET="10.15"
|
||||
else
|
||||
PYTHON_EXECUTABLE=$BUILD_PREFIX/bin/python
|
||||
PKG_CONFIG_EXECUTABLE=$BUILD_PREFIX/bin/pkg-config
|
||||
OSX_DEPLOYMENT_TARGET="11.0"
|
||||
fi
|
||||
|
||||
echo "USING PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE}"
|
||||
echo "USING PKG_CONFIG_EXECUTABLE=${PKG_CONFIG_EXECUTABLE}"
|
||||
|
||||
export ROS_PYTHON_VERSION=`$PYTHON_EXECUTABLE -c "import sys; print('%i.%i' % (sys.version_info[0:2]))"`
|
||||
echo "Using Python ${ROS_PYTHON_VERSION}"
|
||||
# Fix up SP_DIR which for some reason might contain a path to a wrong Python version
|
||||
FIXED_SP_DIR=$(echo $SP_DIR | sed -E "s/python[0-9]+\.[0-9]+/python$ROS_PYTHON_VERSION/")
|
||||
echo "Using site-package dir ${FIXED_SP_DIR}"
|
||||
|
||||
# see https://github.com/conda-forge/cross-python-feedstock/issues/24
|
||||
if [[ "$CONDA_BUILD_CROSS_COMPILATION" == "1" ]]; then
|
||||
find $PREFIX/lib/cmake -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true
|
||||
find $PREFIX/share/rosidl* -type f -exec sed -i "s~$PREFIX/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true
|
||||
find $PREFIX/share/rosidl* -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true
|
||||
find $PREFIX/lib/cmake -type f -exec sed -i "s~message(FATAL_ERROR \"The imported target~message(WARNING \"The imported target~g" {} + || true
|
||||
fi
|
||||
|
||||
if [[ $target_platform =~ linux.* ]]; then
|
||||
export CFLAGS="${CFLAGS} -D__STDC_FORMAT_MACROS=1"
|
||||
export CXXFLAGS="${CXXFLAGS} -D__STDC_FORMAT_MACROS=1"
|
||||
fi;
|
||||
|
||||
# Needed for qt-gui-cpp ..
|
||||
if [[ $target_platform =~ linux.* ]]; then
|
||||
ln -s $GCC ${BUILD_PREFIX}/bin/gcc
|
||||
ln -s $GXX ${BUILD_PREFIX}/bin/g++
|
||||
fi;
|
||||
|
||||
cmake \
|
||||
-G "Ninja" \
|
||||
-DCMAKE_INSTALL_PREFIX=$PREFIX \
|
||||
-DCMAKE_PREFIX_PATH=$PREFIX \
|
||||
-DAMENT_PREFIX_PATH=$PREFIX \
|
||||
-DCMAKE_INSTALL_LIBDIR=lib \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
|
||||
-DPython_EXECUTABLE=$PYTHON_EXECUTABLE \
|
||||
-DPython3_EXECUTABLE=$PYTHON_EXECUTABLE \
|
||||
-DPython3_FIND_STRATEGY=LOCATION \
|
||||
-DPKG_CONFIG_EXECUTABLE=$PKG_CONFIG_EXECUTABLE \
|
||||
-DPYTHON_INSTALL_DIR=$FIXED_SP_DIR \
|
||||
-DSETUPTOOLS_DEB_LAYOUT=OFF \
|
||||
-DCATKIN_SKIP_TESTING=$SKIP_TESTING \
|
||||
-DCMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=True \
|
||||
-DBUILD_SHARED_LIBS=ON \
|
||||
-DBUILD_TESTING=OFF \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$OSX_DEPLOYMENT_TARGET \
|
||||
--compile-no-warning-as-error \
|
||||
$SRC_DIR/$PKG_NAME/src/work
|
||||
|
||||
cmake --build . --config Release --target install
|
||||
61
recipes/ros-humble-unilabos-msgs/recipe.yaml
Normal file
@@ -0,0 +1,61 @@
|
||||
package:
|
||||
name: ros-humble-unilabos-msgs
|
||||
version: 0.9.7
|
||||
source:
|
||||
path: ../../unilabos_msgs
|
||||
folder: ros-humble-unilabos-msgs/src/work
|
||||
|
||||
build:
|
||||
script:
|
||||
sel(win): bld_ament_cmake.bat
|
||||
sel(unix): build_ament_cmake.sh
|
||||
number: 5
|
||||
about:
|
||||
home: https://www.ros.org/
|
||||
license: BSD-3-Clause
|
||||
summary: |
|
||||
Robot Operating System
|
||||
|
||||
extra:
|
||||
recipe-maintainers:
|
||||
- ros-forge
|
||||
|
||||
requirements:
|
||||
build:
|
||||
- "{{ compiler('cxx') }}"
|
||||
- "{{ compiler('c') }}"
|
||||
- sel(linux64): sysroot_linux-64 2.17
|
||||
- ninja
|
||||
- setuptools
|
||||
- sel(unix): make
|
||||
- sel(unix): coreutils
|
||||
- sel(osx): tapi
|
||||
- sel(build_platform != target_platform): pkg-config
|
||||
- cmake
|
||||
- cython
|
||||
- sel(win): vs2022_win-64
|
||||
- sel(build_platform != target_platform): python
|
||||
- sel(build_platform != target_platform): cross-python_{{ target_platform }}
|
||||
- sel(build_platform != target_platform): numpy
|
||||
host:
|
||||
- numpy
|
||||
- pip
|
||||
- sel(build_platform == target_platform): pkg-config
|
||||
- robostack-staging::ros-humble-action-msgs
|
||||
- robostack-staging::ros-humble-ament-cmake
|
||||
- robostack-staging::ros-humble-ament-lint-auto
|
||||
- robostack-staging::ros-humble-ament-lint-common
|
||||
- robostack-staging::ros-humble-ros-environment
|
||||
- robostack-staging::ros-humble-ros-workspace
|
||||
- robostack-staging::ros-humble-rosidl-default-generators
|
||||
- robostack-staging::ros-humble-std-msgs
|
||||
- robostack-staging::ros-humble-geometry-msgs
|
||||
- robostack-staging::ros2-distro-mutex=0.5.*
|
||||
run:
|
||||
- robostack-staging::ros-humble-action-msgs
|
||||
- robostack-staging::ros-humble-ros-workspace
|
||||
- robostack-staging::ros-humble-rosidl-default-runtime
|
||||
- robostack-staging::ros-humble-std-msgs
|
||||
- robostack-staging::ros-humble-geometry-msgs
|
||||
# - robostack-staging::ros2-distro-mutex=0.6.*
|
||||
- sel(osx and x86_64): __osx >={{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
||||
@@ -1,6 +1,6 @@
|
||||
package:
|
||||
name: unilabos
|
||||
version: "0.10.6"
|
||||
version: "0.10.10"
|
||||
|
||||
source:
|
||||
path: ../..
|
||||
|
||||
190
scripts/create_readme.py
Normal file
@@ -0,0 +1,190 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Create Distribution Package README
|
||||
===================================
|
||||
|
||||
Generate README.txt for conda-pack distribution packages.
|
||||
|
||||
Usage:
|
||||
python create_readme.py <platform> <branch> <output_file>
|
||||
|
||||
Arguments:
|
||||
platform: Platform identifier (win-64, linux-64, osx-64, osx-arm64)
|
||||
branch: Git branch name
|
||||
output_file: Output file path (e.g., dist-package/README.txt)
|
||||
|
||||
Example:
|
||||
python create_readme.py win-64 dev dist-package/README.txt
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_readme_content(platform: str, branch: str) -> str:
|
||||
"""
|
||||
Generate README content for the specified platform.
|
||||
|
||||
Args:
|
||||
platform: Platform identifier
|
||||
branch: Git branch name
|
||||
|
||||
Returns:
|
||||
str: README content
|
||||
"""
|
||||
# Get current UTC time
|
||||
build_date = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
|
||||
# Determine platform-specific content
|
||||
is_windows = platform == "win-64"
|
||||
|
||||
if is_windows:
|
||||
archive_ext = "zip"
|
||||
install_script = "install_unilab.bat"
|
||||
platform_instructions = """Windows:
|
||||
1. Extract the downloaded ZIP file
|
||||
2. Double-click install_unilab.bat (or run in cmd)
|
||||
3. Follow the prompts"""
|
||||
else:
|
||||
archive_ext = "tar.gz"
|
||||
install_script = "install_unilab.sh"
|
||||
platform_name = {"linux-64": "linux-64", "osx-64": "osx-64", "osx-arm64": "osx-arm64"}.get(platform, platform)
|
||||
platform_instructions = f"""macOS/Linux:
|
||||
1. Download and extract unilab-pack-{platform_name}.tar.gz
|
||||
2. Run: bash install_unilab.sh
|
||||
3. Follow the prompts
|
||||
|
||||
Alternative (if downloaded from GitHub Actions):
|
||||
1. Extract the artifact ZIP file
|
||||
2. Extract unilab-pack-{platform_name}.tar.gz inside
|
||||
3. Run: bash install_unilab.sh"""
|
||||
|
||||
# Generate README content
|
||||
readme = f"""UniLabOS Conda-Pack Environment
|
||||
================================
|
||||
|
||||
This package contains a pre-built UniLabOS environment.
|
||||
|
||||
Installation Instructions:
|
||||
--------------------------
|
||||
|
||||
{platform_instructions}
|
||||
|
||||
The installation script will:
|
||||
- Automatically find your conda installation
|
||||
- Extract the environment to conda's envs/unilab directory
|
||||
- Run conda-unpack to finalize setup
|
||||
|
||||
After installation:
|
||||
conda activate unilab
|
||||
python verify_installation.py
|
||||
|
||||
Verification:
|
||||
-------------
|
||||
|
||||
The verify_installation.py script will check:
|
||||
- Python version (3.11.11)
|
||||
- ROS2 rclpy installation
|
||||
- UniLabOS installation and dependencies
|
||||
|
||||
If all checks pass, you're ready to use UniLabOS!
|
||||
|
||||
Package Contents:
|
||||
-----------------
|
||||
|
||||
- {install_script} (automatic installation script)
|
||||
- unilab-env-{platform}.tar.gz (packed conda environment)
|
||||
- verify_installation.py (environment verification tool)
|
||||
- README.txt (this file)
|
||||
|
||||
Build Information:
|
||||
------------------
|
||||
|
||||
Branch: {branch}
|
||||
Platform: {platform}
|
||||
Python: 3.11.11
|
||||
Date: {build_date}
|
||||
|
||||
Troubleshooting:
|
||||
----------------
|
||||
|
||||
If installation fails:
|
||||
|
||||
1. Ensure conda or mamba is installed
|
||||
Check: conda --version
|
||||
|
||||
2. Verify you have sufficient disk space
|
||||
Required: ~5-10 GB after extraction
|
||||
|
||||
3. Check installation permissions
|
||||
You need write access to conda's envs directory
|
||||
|
||||
4. For detailed logs, run the install script from terminal
|
||||
|
||||
For more help:
|
||||
- Documentation: docs/user_guide/installation.md
|
||||
- Quick Start: QUICK_START_CONDA_PACK.md
|
||||
- Issues: https://github.com/dptech-corp/Uni-Lab-OS/issues
|
||||
|
||||
License:
|
||||
--------
|
||||
|
||||
UniLabOS is licensed under GPL-3.0-only.
|
||||
See LICENSE file for details.
|
||||
|
||||
Repository: https://github.com/dptech-corp/Uni-Lab-OS
|
||||
"""
|
||||
|
||||
return readme
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate README.txt for conda-pack distribution",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python create_readme.py win-64 dev dist-package/README.txt
|
||||
python create_readme.py linux-64 main dist-package/README.txt
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument("platform", choices=["win-64", "linux-64", "osx-64", "osx-arm64"], help="Platform identifier")
|
||||
|
||||
parser.add_argument("branch", help="Git branch name")
|
||||
|
||||
parser.add_argument("output_file", help="Output file path")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
# Generate README content
|
||||
readme_content = get_readme_content(args.platform, args.branch)
|
||||
|
||||
# Create output directory if needed
|
||||
output_path = Path(args.output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write README file
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
f.write(readme_content)
|
||||
|
||||
print(f" README.txt created: {output_path}")
|
||||
print(f" Platform: {args.platform}")
|
||||
print(f" Branch: {args.branch}")
|
||||
|
||||
return 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating README: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
148
scripts/create_zip_archive.py
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Create ZIP Archive with ZIP64 Support
|
||||
======================================
|
||||
|
||||
This script creates a ZIP archive with ZIP64 support for large files (>2GB).
|
||||
It's used in the conda-pack build workflow to package the distribution.
|
||||
|
||||
PowerShell's Compress-Archive has a 2GB limitation, so we use Python's zipfile
|
||||
module with allowZip64=True to handle large conda-packed environments.
|
||||
|
||||
Usage:
|
||||
python create_zip_archive.py <source_dir> <output_zip> [--compression-level LEVEL]
|
||||
|
||||
Arguments:
|
||||
source_dir: Directory to compress
|
||||
output_zip: Output ZIP file path
|
||||
--compression-level: Compression level (0-9, default: 6)
|
||||
|
||||
Example:
|
||||
python create_zip_archive.py dist-package unilab-pack-win-64.zip
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def create_zip_archive(source_dir: str, output_zip: str, compression_level: int = 6) -> bool:
|
||||
"""
|
||||
Create a ZIP archive with ZIP64 support.
|
||||
|
||||
Args:
|
||||
source_dir: Directory to compress
|
||||
output_zip: Output ZIP file path
|
||||
compression_level: Compression level (0-9)
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
"""
|
||||
try:
|
||||
source_path = Path(source_dir)
|
||||
output_path = Path(output_zip)
|
||||
|
||||
# Validate source directory
|
||||
if not source_path.exists():
|
||||
print(f"Error: Source directory does not exist: {source_dir}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not source_path.is_dir():
|
||||
print(f"Error: Source path is not a directory: {source_dir}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Remove existing output file if present
|
||||
if output_path.exists():
|
||||
print(f"Removing existing archive: {output_path}")
|
||||
output_path.unlink()
|
||||
|
||||
# Create ZIP archive
|
||||
print("=" * 70)
|
||||
print(f"Creating ZIP archive with ZIP64 support")
|
||||
print(f" Source: {source_path.absolute()}")
|
||||
print(f" Output: {output_path.absolute()}")
|
||||
print(f" Compression: Level {compression_level}")
|
||||
print("=" * 70)
|
||||
|
||||
total_size = 0
|
||||
file_count = 0
|
||||
|
||||
with zipfile.ZipFile(
|
||||
output_path, "w", zipfile.ZIP_DEFLATED, allowZip64=True, compresslevel=compression_level
|
||||
) as zipf:
|
||||
# Walk through source directory
|
||||
for root, dirs, files in os.walk(source_dir):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
arcname = os.path.relpath(file_path, source_dir)
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
# Add file to archive
|
||||
zipf.write(file_path, arcname)
|
||||
|
||||
# Display progress
|
||||
total_size += file_size
|
||||
file_count += 1
|
||||
print(f" [{file_count:3d}] Adding: {arcname:50s} {file_size:>15,} bytes")
|
||||
|
||||
# Get final archive size
|
||||
archive_size = output_path.stat().st_size
|
||||
compression_ratio = (1 - archive_size / total_size) * 100 if total_size > 0 else 0
|
||||
|
||||
# Display summary
|
||||
print("=" * 70)
|
||||
print("Archive created successfully!")
|
||||
print(f" Files added: {file_count}")
|
||||
print(f" Total size (uncompressed): {total_size:>15,} bytes ({total_size / (1024**3):.2f} GB)")
|
||||
print(f" Archive size (compressed): {archive_size:>15,} bytes ({archive_size / (1024**3):.2f} GB)")
|
||||
print(f" Compression ratio: {compression_ratio:.1f}%")
|
||||
print("=" * 70)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating ZIP archive: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Create ZIP archive with ZIP64 support for large files",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python create_zip_archive.py dist-package unilab-pack-win-64.zip
|
||||
python create_zip_archive.py dist-package unilab-pack-win-64.zip --compression-level 9
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument("source_dir", help="Directory to compress")
|
||||
|
||||
parser.add_argument("output_zip", help="Output ZIP file path")
|
||||
|
||||
parser.add_argument(
|
||||
"--compression-level",
|
||||
type=int,
|
||||
default=6,
|
||||
choices=range(0, 10),
|
||||
metavar="LEVEL",
|
||||
help="Compression level (0=no compression, 9=maximum compression, default=6)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Create archive
|
||||
success = create_zip_archive(args.source_dir, args.output_zip, args.compression_level)
|
||||
|
||||
# Exit with appropriate code
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
203
scripts/install_unilab.bat
Normal file
@@ -0,0 +1,203 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
echo ================================================
|
||||
echo UniLabOS Environment Installation Script
|
||||
echo ================================================
|
||||
echo.
|
||||
|
||||
REM Get the directory where this script is located
|
||||
set "SCRIPT_DIR=%~dp0"
|
||||
cd /d "%SCRIPT_DIR%"
|
||||
|
||||
REM Find conda installation
|
||||
echo Searching for conda installation...
|
||||
|
||||
REM Method 1: Try to get conda base using 'conda info --base'
|
||||
set "CONDA_BASE="
|
||||
for /f "tokens=*" %%i in ('conda info --base 2^>nul') do (
|
||||
set "CONDA_BASE=%%i"
|
||||
)
|
||||
|
||||
if not "%CONDA_BASE%"=="" (
|
||||
echo Found conda at: %CONDA_BASE% (via conda info)
|
||||
goto :conda_found
|
||||
)
|
||||
|
||||
REM Method 2: Use 'where conda' and parse the path
|
||||
echo Trying alternative method...
|
||||
for /f "tokens=*" %%i in ('where conda 2^>nul') do (
|
||||
set "CONDA_PATH=%%i"
|
||||
goto :parse_conda_path
|
||||
)
|
||||
|
||||
echo ERROR: Could not find conda installation!
|
||||
echo Please make sure conda/mamba is installed and in your PATH.
|
||||
echo.
|
||||
pause
|
||||
exit /b 1
|
||||
|
||||
:parse_conda_path
|
||||
REM Parse conda path to find base directory
|
||||
REM Common paths:
|
||||
REM C:\Users\hp\miniforge3\Library\bin\conda.bat
|
||||
REM C:\Users\hp\miniforge3\Scripts\conda.exe
|
||||
REM C:\Users\hp\miniforge3\condabin\conda.bat
|
||||
|
||||
echo Found conda executable at: %CONDA_PATH%
|
||||
|
||||
REM Check if path contains \Library\bin\ (typical for conda.bat)
|
||||
echo %CONDA_PATH% | findstr /C:"\Library\bin\" >nul
|
||||
if not errorlevel 1 (
|
||||
REM Path like: C:\Users\hp\miniforge3\Library\bin\conda.bat
|
||||
REM Need to go up 3 levels: bin -> Library -> miniforge3
|
||||
for %%i in ("%CONDA_PATH%") do set "CONDA_BASE=%%~dpi"
|
||||
for %%i in ("%CONDA_BASE%..\..\..") do set "CONDA_BASE=%%~fi"
|
||||
goto :conda_found
|
||||
)
|
||||
|
||||
REM Check if path contains \Scripts\ (typical for conda.exe)
|
||||
echo %CONDA_PATH% | findstr /C:"\Scripts\" >nul
|
||||
if not errorlevel 1 (
|
||||
REM Path like: C:\Users\hp\miniforge3\Scripts\conda.exe
|
||||
REM Need to go up 2 levels: Scripts -> miniforge3
|
||||
for %%i in ("%CONDA_PATH%") do set "CONDA_BASE=%%~dpi"
|
||||
for %%i in ("%CONDA_BASE%..\.") do set "CONDA_BASE=%%~fi"
|
||||
goto :conda_found
|
||||
)
|
||||
|
||||
REM Check if path contains \condabin\ (typical for conda.bat)
|
||||
echo %CONDA_PATH% | findstr /C:"\condabin\" >nul
|
||||
if not errorlevel 1 (
|
||||
REM Path like: C:\Users\hp\miniforge3\condabin\conda.bat
|
||||
REM Need to go up 2 levels: condabin -> miniforge3
|
||||
for %%i in ("%CONDA_PATH%") do set "CONDA_BASE=%%~dpi"
|
||||
for %%i in ("%CONDA_BASE%..\.") do set "CONDA_BASE=%%~fi"
|
||||
goto :conda_found
|
||||
)
|
||||
|
||||
REM Default: assume it's 2 levels up
|
||||
for %%i in ("%CONDA_PATH%") do set "CONDA_BASE=%%~dpi"
|
||||
for %%i in ("%CONDA_BASE%..\.") do set "CONDA_BASE=%%~fi"
|
||||
|
||||
:conda_found
|
||||
echo Found conda base directory: %CONDA_BASE%
|
||||
echo.
|
||||
|
||||
REM Set target environment path
|
||||
set "ENV_NAME=unilab"
|
||||
set "ENV_PATH=%CONDA_BASE%\envs\%ENV_NAME%"
|
||||
|
||||
REM Check if environment already exists
|
||||
if exist "%ENV_PATH%" (
|
||||
echo WARNING: Environment '%ENV_NAME%' already exists at %ENV_PATH%
|
||||
echo.
|
||||
set /p "OVERWRITE=Do you want to overwrite it? (y/n): "
|
||||
if /i not "!OVERWRITE!"=="y" (
|
||||
echo Installation cancelled.
|
||||
pause
|
||||
exit /b 0
|
||||
)
|
||||
echo Removing existing environment...
|
||||
rmdir /s /q "%ENV_PATH%"
|
||||
)
|
||||
|
||||
REM Find the packed environment file
|
||||
set "PACK_FILE="
|
||||
for %%f in (unilab-env*.tar.gz) do (
|
||||
set "PACK_FILE=%%f"
|
||||
goto :found_pack
|
||||
)
|
||||
|
||||
:found_pack
|
||||
if "%PACK_FILE%"=="" (
|
||||
echo ERROR: Could not find unilab-env*.tar.gz file!
|
||||
echo Please make sure the packed environment file is in the same directory as this script.
|
||||
echo.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo Found packed environment: %PACK_FILE%
|
||||
echo.
|
||||
|
||||
REM Extract the packed environment
|
||||
echo Extracting environment to %ENV_PATH%...
|
||||
mkdir "%ENV_PATH%"
|
||||
|
||||
REM Extract using tar (available in Windows 10+)
|
||||
tar -xzf "%PACK_FILE%" -C "%ENV_PATH%"
|
||||
if errorlevel 1 (
|
||||
echo ERROR: Failed to extract environment!
|
||||
echo Make sure you have Windows 10 or later with tar support.
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo.
|
||||
echo Unpacking conda environment...
|
||||
echo Changing to environment directory: %ENV_PATH%
|
||||
cd /d "%ENV_PATH%"
|
||||
|
||||
REM Run conda-unpack from the environment directory
|
||||
if exist "Scripts\conda-unpack.exe" (
|
||||
echo Running: .\Scripts\conda-unpack.exe
|
||||
.\Scripts\conda-unpack.exe
|
||||
) else if exist "Scripts\activate.bat" (
|
||||
echo Running: .\Scripts\activate.bat followed by conda-unpack
|
||||
call .\Scripts\activate.bat
|
||||
conda-unpack
|
||||
) else (
|
||||
echo ERROR: Could not find Scripts\conda-unpack.exe or Scripts\activate.bat!
|
||||
echo Current directory: %CD%
|
||||
echo Expected location: %ENV_PATH%\Scripts\
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if errorlevel 1 (
|
||||
echo ERROR: conda-unpack failed!
|
||||
pause
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo.
|
||||
echo Checking UniLabOS entry point...
|
||||
REM Check if unilab-script.py exists
|
||||
set "UNILAB_SCRIPT=%ENV_PATH%\Scripts\unilab-script.py"
|
||||
if not exist "%UNILAB_SCRIPT%" (
|
||||
echo WARNING: unilab-script.py not found, creating it...
|
||||
(
|
||||
echo # -*- coding: utf-8 -*-
|
||||
echo import re
|
||||
echo import sys
|
||||
echo.
|
||||
echo from unilabos.app.main import main
|
||||
echo.
|
||||
echo if __name__ == '__main__':
|
||||
echo sys.argv[0] = re.sub^(r'(-script\.pyw?^|\.exe^)?$', '', sys.argv[0]^)
|
||||
echo sys.exit^(main^(^)^)
|
||||
) > "%UNILAB_SCRIPT%"
|
||||
echo Created: %UNILAB_SCRIPT%
|
||||
) else (
|
||||
echo Found: %UNILAB_SCRIPT%
|
||||
)
|
||||
|
||||
echo.
|
||||
echo ================================================
|
||||
echo Installation completed successfully!
|
||||
echo ================================================
|
||||
echo.
|
||||
echo To activate the environment, run:
|
||||
echo conda activate %ENV_NAME%
|
||||
echo.
|
||||
echo or
|
||||
echo.
|
||||
echo call %ENV_PATH%\Scripts\activate.bat
|
||||
echo.
|
||||
echo You can verify the installation by running:
|
||||
echo cd /d "%SCRIPT_DIR%"
|
||||
echo python verify_installation.py
|
||||
echo.
|
||||
pause
|
||||
|
||||
139
scripts/install_unilab.sh
Executable file
@@ -0,0 +1,139 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "================================================"
|
||||
echo "UniLabOS Environment Installation Script"
|
||||
echo "================================================"
|
||||
echo ""
|
||||
|
||||
# Get the directory where this script is located
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
# Find conda installation
|
||||
echo "Searching for conda installation..."
|
||||
CONDA_BASE=""
|
||||
|
||||
# Try to find conda in PATH
|
||||
if command -v conda &> /dev/null; then
|
||||
CONDA_BASE=$(conda info --base)
|
||||
echo "Found conda at: $CONDA_BASE"
|
||||
elif [ -d "$HOME/miniforge3" ]; then
|
||||
CONDA_BASE="$HOME/miniforge3"
|
||||
echo "Found conda at: $CONDA_BASE"
|
||||
elif [ -d "$HOME/miniconda3" ]; then
|
||||
CONDA_BASE="$HOME/miniconda3"
|
||||
echo "Found conda at: $CONDA_BASE"
|
||||
elif [ -d "$HOME/anaconda3" ]; then
|
||||
CONDA_BASE="$HOME/anaconda3"
|
||||
echo "Found conda at: $CONDA_BASE"
|
||||
elif [ -d "/opt/conda" ]; then
|
||||
CONDA_BASE="/opt/conda"
|
||||
echo "Found conda at: $CONDA_BASE"
|
||||
else
|
||||
echo "ERROR: Could not find conda installation!"
|
||||
echo "Please make sure conda/mamba is installed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Initialize conda for this shell
|
||||
if [ -f "$CONDA_BASE/etc/profile.d/conda.sh" ]; then
|
||||
source "$CONDA_BASE/etc/profile.d/conda.sh"
|
||||
fi
|
||||
|
||||
# Set target environment path
|
||||
ENV_NAME="unilab"
|
||||
ENV_PATH="$CONDA_BASE/envs/$ENV_NAME"
|
||||
|
||||
# Check if environment already exists
|
||||
if [ -d "$ENV_PATH" ]; then
|
||||
echo "WARNING: Environment '$ENV_NAME' already exists at $ENV_PATH"
|
||||
read -p "Do you want to overwrite it? (y/n): " OVERWRITE
|
||||
if [ "$OVERWRITE" != "y" ] && [ "$OVERWRITE" != "Y" ]; then
|
||||
echo "Installation cancelled."
|
||||
exit 0
|
||||
fi
|
||||
echo "Removing existing environment..."
|
||||
rm -rf "$ENV_PATH"
|
||||
fi
|
||||
|
||||
# Find the packed environment file
|
||||
PACK_FILE=$(ls unilab-env*.tar.gz 2>/dev/null | head -n 1)
|
||||
|
||||
if [ -z "$PACK_FILE" ]; then
|
||||
echo "ERROR: Could not find unilab-env*.tar.gz file!"
|
||||
echo "Please make sure the packed environment file is in the same directory as this script."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Found packed environment: $PACK_FILE"
|
||||
echo ""
|
||||
|
||||
# Extract the packed environment
|
||||
echo "Extracting environment to $ENV_PATH..."
|
||||
mkdir -p "$ENV_PATH"
|
||||
tar -xzf "$PACK_FILE" -C "$ENV_PATH"
|
||||
|
||||
echo ""
|
||||
echo "Unpacking conda environment..."
|
||||
echo "Changing to environment directory: $ENV_PATH"
|
||||
cd "$ENV_PATH"
|
||||
|
||||
# Run conda-unpack from the environment directory
|
||||
if [ -f "bin/conda-unpack" ]; then
|
||||
echo "Running: ./bin/conda-unpack"
|
||||
./bin/conda-unpack
|
||||
elif [ -f "bin/activate" ]; then
|
||||
echo "Running: source bin/activate followed by conda-unpack"
|
||||
source bin/activate
|
||||
conda-unpack
|
||||
else
|
||||
echo "ERROR: Could not find bin/conda-unpack or bin/activate!"
|
||||
echo "Current directory: $(pwd)"
|
||||
echo "Expected location: $ENV_PATH/bin/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Checking UniLabOS entry point..."
|
||||
# Check if unilab script exists in bin directory
|
||||
UNILAB_SCRIPT="$ENV_PATH/bin/unilab"
|
||||
if [ ! -f "$UNILAB_SCRIPT" ]; then
|
||||
echo "WARNING: unilab script not found, creating it..."
|
||||
cat > "$UNILAB_SCRIPT" << 'EOF'
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from unilabos.app.main import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
EOF
|
||||
chmod +x "$UNILAB_SCRIPT"
|
||||
echo "Created: $UNILAB_SCRIPT"
|
||||
else
|
||||
echo "Found: $UNILAB_SCRIPT"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "================================================"
|
||||
echo "Installation completed successfully!"
|
||||
echo "================================================"
|
||||
echo ""
|
||||
echo "To activate the environment, run:"
|
||||
echo " conda activate $ENV_NAME"
|
||||
echo ""
|
||||
echo "or"
|
||||
echo ""
|
||||
echo " source $ENV_PATH/bin/activate"
|
||||
echo ""
|
||||
echo "You can verify the installation by running:"
|
||||
echo " cd $SCRIPT_DIR"
|
||||
echo " python verify_installation.py"
|
||||
echo ""
|
||||
|
||||
175
scripts/verify_installation.py
Normal file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
UniLabOS Installation Verification Script
|
||||
=========================================
|
||||
|
||||
This script verifies that UniLabOS and its dependencies are correctly installed.
|
||||
Run this script after installing the conda-pack environment to ensure everything works.
|
||||
|
||||
Usage:
|
||||
python verify_installation.py [--auto-install]
|
||||
|
||||
Options:
|
||||
--auto-install Automatically install missing packages
|
||||
|
||||
Or in the conda environment:
|
||||
conda activate unilab
|
||||
python verify_installation.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
# IMPORTANT: Set UTF-8 encoding BEFORE any other imports
|
||||
# This ensures all subsequent imports (including unilabos) can output UTF-8 characters
|
||||
if sys.platform == "win32":
|
||||
# Method 1: Reconfigure stdout/stderr to use UTF-8 with error handling
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding="utf-8", errors="replace") # type: ignore
|
||||
sys.stderr.reconfigure(encoding="utf-8", errors="replace") # type: ignore
|
||||
except (AttributeError, OSError):
|
||||
pass
|
||||
|
||||
# Method 2: Set environment variable for subprocess and console
|
||||
os.environ["PYTHONIOENCODING"] = "utf-8"
|
||||
|
||||
# Method 3: Try to change Windows console code page to UTF-8
|
||||
try:
|
||||
import ctypes
|
||||
|
||||
# Set console code page to UTF-8 (CP 65001)
|
||||
ctypes.windll.kernel32.SetConsoleCP(65001)
|
||||
ctypes.windll.kernel32.SetConsoleOutputCP(65001)
|
||||
except (ImportError, AttributeError, OSError):
|
||||
pass
|
||||
|
||||
# Now import other modules
|
||||
import importlib
|
||||
|
||||
# Use ASCII-safe symbols that work across all platforms
|
||||
CHECK_MARK = "[OK]"
|
||||
CROSS_MARK = "[FAIL]"
|
||||
|
||||
|
||||
def check_package(package_name: str, display_name: str | None = None) -> bool:
|
||||
"""
|
||||
Check if a package can be imported.
|
||||
|
||||
Args:
|
||||
package_name: Name of the package to import
|
||||
display_name: Display name (defaults to package_name)
|
||||
|
||||
Returns:
|
||||
bool: True if package is available
|
||||
"""
|
||||
if display_name is None:
|
||||
display_name = package_name
|
||||
|
||||
try:
|
||||
importlib.import_module(package_name)
|
||||
print(f" {CHECK_MARK} {display_name}")
|
||||
return True
|
||||
except ImportError:
|
||||
print(f" {CROSS_MARK} {display_name}")
|
||||
return False
|
||||
|
||||
|
||||
def check_python_version() -> bool:
|
||||
"""Check Python version."""
|
||||
version = sys.version_info
|
||||
version_str = f"{version.major}.{version.minor}.{version.micro}"
|
||||
|
||||
if version.major == 3 and version.minor >= 11:
|
||||
print(f" {CHECK_MARK} Python {version_str}")
|
||||
return True
|
||||
else:
|
||||
print(f" {CROSS_MARK} Python {version_str} (requires Python 3.11+)")
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
"""Run all verification checks."""
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Verify UniLabOS installation",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--auto-install",
|
||||
action="store_true",
|
||||
help="Automatically install missing packages",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 60)
|
||||
print("UniLabOS Installation Verification")
|
||||
print("=" * 60)
|
||||
if args.auto_install:
|
||||
print("Mode: Auto-install missing packages")
|
||||
else:
|
||||
print("Mode: Verification only")
|
||||
print()
|
||||
|
||||
all_passed = True
|
||||
|
||||
# Check Python version
|
||||
print("Checking Python version...")
|
||||
if not check_python_version():
|
||||
all_passed = False
|
||||
print()
|
||||
|
||||
# Check ROS2 rclpy
|
||||
print("Checking ROS2 rclpy...")
|
||||
if not check_package("rclpy", "ROS2 rclpy"):
|
||||
all_passed = False
|
||||
print()
|
||||
|
||||
# Run environment checker from unilabos
|
||||
print("Checking UniLabOS and dependencies...")
|
||||
try:
|
||||
from unilabos.utils.environment_check import check_environment
|
||||
|
||||
print(f" {CHECK_MARK} UniLabOS installed")
|
||||
|
||||
# Check environment with optional auto-install
|
||||
# Set show_details=False to suppress detailed Chinese output that may cause encoding issues
|
||||
env_check_passed = check_environment(auto_install=args.auto_install, show_details=False)
|
||||
|
||||
if env_check_passed:
|
||||
print(f" {CHECK_MARK} All required packages available")
|
||||
else:
|
||||
print(f" {CROSS_MARK} Some optional packages are missing")
|
||||
if not args.auto_install:
|
||||
print(" Hint: Run with --auto-install to automatically install missing packages")
|
||||
except ImportError:
|
||||
print(f" {CROSS_MARK} UniLabOS not installed")
|
||||
all_passed = False
|
||||
except Exception as e:
|
||||
print(f" {CROSS_MARK} Environment check failed: {str(e)}")
|
||||
print()
|
||||
|
||||
# Summary
|
||||
print("=" * 60)
|
||||
print("Verification Summary")
|
||||
print("=" * 60)
|
||||
|
||||
if all_passed:
|
||||
print(f"\n{CHECK_MARK} All checks passed! Your UniLabOS installation is ready.")
|
||||
print("\nNext steps:")
|
||||
print(" 1. Review the documentation: docs/user_guide/launch.md")
|
||||
print(" 2. Try the examples: docs/boot_examples/")
|
||||
print(" 3. Configure your devices: unilabos_data/startup_config.json")
|
||||
return 0
|
||||
else:
|
||||
print(f"\n{CROSS_MARK} Some checks failed. Please review the errors above.")
|
||||
print("\nTroubleshooting:")
|
||||
print(" 1. Ensure you're in the correct conda environment: conda activate unilab")
|
||||
print(" 2. Check the installation documentation: docs/user_guide/installation.md")
|
||||
print(" 3. Try reinstalling: pip install .")
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
695
scripts/workflow.py
Normal file
@@ -0,0 +1,695 @@
|
||||
import json
|
||||
import logging
|
||||
import traceback
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import networkx as nx
|
||||
import matplotlib.pyplot as plt
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SimpleGraph:
|
||||
"""简单的有向图实现,用于构建工作流图"""
|
||||
|
||||
def __init__(self):
|
||||
self.nodes = {}
|
||||
self.edges = []
|
||||
|
||||
def add_node(self, node_id, **attrs):
|
||||
"""添加节点"""
|
||||
self.nodes[node_id] = attrs
|
||||
|
||||
def add_edge(self, source, target, **attrs):
|
||||
"""添加边"""
|
||||
edge = {"source": source, "target": target, **attrs}
|
||||
self.edges.append(edge)
|
||||
|
||||
def to_dict(self):
|
||||
"""转换为工作流图格式"""
|
||||
nodes_list = []
|
||||
for node_id, attrs in self.nodes.items():
|
||||
node_attrs = attrs.copy()
|
||||
params = node_attrs.pop("parameters", {}) or {}
|
||||
node_attrs.update(params)
|
||||
nodes_list.append({"id": node_id, **node_attrs})
|
||||
|
||||
return {
|
||||
"directed": True,
|
||||
"multigraph": False,
|
||||
"graph": {},
|
||||
"nodes": nodes_list,
|
||||
"links": self.edges,
|
||||
}
|
||||
|
||||
|
||||
def extract_json_from_markdown(text: str) -> str:
|
||||
"""从markdown代码块中提取JSON"""
|
||||
text = text.strip()
|
||||
if text.startswith("```json\n"):
|
||||
text = text[8:]
|
||||
if text.startswith("```\n"):
|
||||
text = text[4:]
|
||||
if text.endswith("\n```"):
|
||||
text = text[:-4]
|
||||
return text
|
||||
|
||||
|
||||
def convert_to_type(val: str) -> Any:
|
||||
"""将字符串值转换为适当的数据类型"""
|
||||
if val == "True":
|
||||
return True
|
||||
if val == "False":
|
||||
return False
|
||||
if val == "?":
|
||||
return None
|
||||
if val.endswith(" g"):
|
||||
return float(val.split(" ")[0])
|
||||
if val.endswith("mg"):
|
||||
return float(val.split("mg")[0])
|
||||
elif val.endswith("mmol"):
|
||||
return float(val.split("mmol")[0]) / 1000
|
||||
elif val.endswith("mol"):
|
||||
return float(val.split("mol")[0])
|
||||
elif val.endswith("ml"):
|
||||
return float(val.split("ml")[0])
|
||||
elif val.endswith("RPM"):
|
||||
return float(val.split("RPM")[0])
|
||||
elif val.endswith(" °C"):
|
||||
return float(val.split(" ")[0])
|
||||
elif val.endswith(" %"):
|
||||
return float(val.split(" ")[0])
|
||||
return val
|
||||
|
||||
|
||||
def refactor_data(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""统一的数据重构函数,根据操作类型自动选择模板"""
|
||||
refactored_data = []
|
||||
|
||||
# 定义操作映射,包含生物实验和有机化学的所有操作
|
||||
OPERATION_MAPPING = {
|
||||
# 生物实验操作
|
||||
"transfer_liquid": "SynBioFactory-liquid_handler.prcxi-transfer_liquid",
|
||||
"transfer": "SynBioFactory-liquid_handler.biomek-transfer",
|
||||
"incubation": "SynBioFactory-liquid_handler.biomek-incubation",
|
||||
"move_labware": "SynBioFactory-liquid_handler.biomek-move_labware",
|
||||
"oscillation": "SynBioFactory-liquid_handler.biomek-oscillation",
|
||||
# 有机化学操作
|
||||
"HeatChillToTemp": "SynBioFactory-workstation-HeatChillProtocol",
|
||||
"StopHeatChill": "SynBioFactory-workstation-HeatChillStopProtocol",
|
||||
"StartHeatChill": "SynBioFactory-workstation-HeatChillStartProtocol",
|
||||
"HeatChill": "SynBioFactory-workstation-HeatChillProtocol",
|
||||
"Dissolve": "SynBioFactory-workstation-DissolveProtocol",
|
||||
"Transfer": "SynBioFactory-workstation-TransferProtocol",
|
||||
"Evaporate": "SynBioFactory-workstation-EvaporateProtocol",
|
||||
"Recrystallize": "SynBioFactory-workstation-RecrystallizeProtocol",
|
||||
"Filter": "SynBioFactory-workstation-FilterProtocol",
|
||||
"Dry": "SynBioFactory-workstation-DryProtocol",
|
||||
"Add": "SynBioFactory-workstation-AddProtocol",
|
||||
}
|
||||
|
||||
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||
|
||||
for step in data:
|
||||
operation = step.get("action")
|
||||
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
||||
continue
|
||||
|
||||
# 处理重复操作
|
||||
if operation == "Repeat":
|
||||
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
||||
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
||||
for i in range(int(times)):
|
||||
sub_data = refactor_data(sub_steps)
|
||||
refactored_data.extend(sub_data)
|
||||
continue
|
||||
|
||||
# 获取模板名称
|
||||
template = OPERATION_MAPPING.get(operation)
|
||||
if not template:
|
||||
# 自动推断模板类型
|
||||
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
||||
template = f"SynBioFactory-liquid_handler.biomek-{operation}"
|
||||
else:
|
||||
template = f"SynBioFactory-workstation-{operation}Protocol"
|
||||
|
||||
# 创建步骤数据
|
||||
step_data = {
|
||||
"template": template,
|
||||
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
||||
"lab_node_type": "Device",
|
||||
"parameters": step.get("parameters", step.get("action_args", {})),
|
||||
}
|
||||
refactored_data.append(step_data)
|
||||
|
||||
return refactored_data
|
||||
|
||||
|
||||
def build_protocol_graph(
|
||||
labware_info: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]], workstation_name: str
|
||||
) -> SimpleGraph:
|
||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑"""
|
||||
G = SimpleGraph()
|
||||
resource_last_writer = {}
|
||||
LAB_NAME = "SynBioFactory"
|
||||
|
||||
protocol_steps = refactor_data(protocol_steps)
|
||||
|
||||
# 检查协议步骤中的模板来判断协议类型
|
||||
has_biomek_template = any(
|
||||
("biomek" in step.get("template", "")) or ("prcxi" in step.get("template", ""))
|
||||
for step in protocol_steps
|
||||
)
|
||||
|
||||
if has_biomek_template:
|
||||
# 生物实验协议图构建
|
||||
for labware_id, labware in labware_info.items():
|
||||
node_id = str(uuid.uuid4())
|
||||
|
||||
labware_attrs = labware.copy()
|
||||
labware_id = labware_attrs.pop("id", labware_attrs.get("name", f"labware_{uuid.uuid4()}"))
|
||||
labware_attrs["description"] = labware_id
|
||||
labware_attrs["lab_node_type"] = (
|
||||
"Reagent" if "Plate" in str(labware_id) else "Labware" if "Rack" in str(labware_id) else "Sample"
|
||||
)
|
||||
labware_attrs["device_id"] = workstation_name
|
||||
|
||||
G.add_node(node_id, template=f"{LAB_NAME}-host_node-create_resource", **labware_attrs)
|
||||
resource_last_writer[labware_id] = f"{node_id}:labware"
|
||||
|
||||
# 处理协议步骤
|
||||
prev_node = None
|
||||
for i, step in enumerate(protocol_steps):
|
||||
node_id = str(uuid.uuid4())
|
||||
G.add_node(node_id, **step)
|
||||
|
||||
# 添加控制流边
|
||||
if prev_node is not None:
|
||||
G.add_edge(prev_node, node_id, source_port="ready", target_port="ready")
|
||||
prev_node = node_id
|
||||
|
||||
# 处理物料流
|
||||
params = step.get("parameters", {})
|
||||
if "sources" in params and params["sources"] in resource_last_writer:
|
||||
source_node, source_port = resource_last_writer[params["sources"]].split(":")
|
||||
G.add_edge(source_node, node_id, source_port=source_port, target_port="labware")
|
||||
|
||||
if "targets" in params:
|
||||
resource_last_writer[params["targets"]] = f"{node_id}:labware"
|
||||
|
||||
# 添加协议结束节点
|
||||
end_id = str(uuid.uuid4())
|
||||
G.add_node(end_id, template=f"{LAB_NAME}-liquid_handler.biomek-run_protocol")
|
||||
if prev_node is not None:
|
||||
G.add_edge(prev_node, end_id, source_port="ready", target_port="ready")
|
||||
|
||||
else:
|
||||
# 有机化学协议图构建
|
||||
WORKSTATION_ID = workstation_name
|
||||
|
||||
# 为所有labware创建资源节点
|
||||
for item_id, item in labware_info.items():
|
||||
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
||||
node_id = str(uuid.uuid4())
|
||||
|
||||
# 判断节点类型
|
||||
if item.get("type") == "hardware" or "reactor" in str(item_id).lower():
|
||||
if "reactor" not in str(item_id).lower():
|
||||
continue
|
||||
lab_node_type = "Sample"
|
||||
description = f"Prepare Reactor: {item_id}"
|
||||
liquid_type = []
|
||||
liquid_volume = []
|
||||
else:
|
||||
lab_node_type = "Reagent"
|
||||
description = f"Add Reagent to Flask: {item_id}"
|
||||
liquid_type = [item_id]
|
||||
liquid_volume = [1e5]
|
||||
|
||||
G.add_node(
|
||||
node_id,
|
||||
template=f"{LAB_NAME}-host_node-create_resource",
|
||||
description=description,
|
||||
lab_node_type=lab_node_type,
|
||||
res_id=item_id,
|
||||
device_id=WORKSTATION_ID,
|
||||
class_name="container",
|
||||
parent=WORKSTATION_ID,
|
||||
bind_locations={"x": 0.0, "y": 0.0, "z": 0.0},
|
||||
liquid_input_slot=[-1],
|
||||
liquid_type=liquid_type,
|
||||
liquid_volume=liquid_volume,
|
||||
slot_on_deck="",
|
||||
role=item.get("role", ""),
|
||||
)
|
||||
resource_last_writer[item_id] = f"{node_id}:labware"
|
||||
|
||||
last_control_node_id = None
|
||||
|
||||
# 处理协议步骤
|
||||
for step in protocol_steps:
|
||||
node_id = str(uuid.uuid4())
|
||||
G.add_node(node_id, **step)
|
||||
|
||||
# 控制流
|
||||
if last_control_node_id is not None:
|
||||
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
||||
last_control_node_id = node_id
|
||||
|
||||
# 物料流
|
||||
params = step.get("parameters", {})
|
||||
input_resources = {
|
||||
"Vessel": params.get("vessel"),
|
||||
"ToVessel": params.get("to_vessel"),
|
||||
"FromVessel": params.get("from_vessel"),
|
||||
"reagent": params.get("reagent"),
|
||||
"solvent": params.get("solvent"),
|
||||
"compound": params.get("compound"),
|
||||
"sources": params.get("sources"),
|
||||
"targets": params.get("targets"),
|
||||
}
|
||||
|
||||
for target_port, resource_name in input_resources.items():
|
||||
if resource_name and resource_name in resource_last_writer:
|
||||
source_node, source_port = resource_last_writer[resource_name].split(":")
|
||||
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
||||
|
||||
output_resources = {
|
||||
"VesselOut": params.get("vessel"),
|
||||
"FromVesselOut": params.get("from_vessel"),
|
||||
"ToVesselOut": params.get("to_vessel"),
|
||||
"FiltrateOut": params.get("filtrate_vessel"),
|
||||
"reagent": params.get("reagent"),
|
||||
"solvent": params.get("solvent"),
|
||||
"compound": params.get("compound"),
|
||||
"sources_out": params.get("sources"),
|
||||
"targets_out": params.get("targets"),
|
||||
}
|
||||
|
||||
for source_port, resource_name in output_resources.items():
|
||||
if resource_name:
|
||||
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
||||
|
||||
return G
|
||||
|
||||
|
||||
def draw_protocol_graph(protocol_graph: SimpleGraph, output_path: str):
|
||||
"""
|
||||
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
||||
"""
|
||||
if not protocol_graph:
|
||||
print("Cannot draw graph: Graph object is empty.")
|
||||
return
|
||||
|
||||
G = nx.DiGraph()
|
||||
|
||||
for node_id, attrs in protocol_graph.nodes.items():
|
||||
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||
G.add_node(node_id, label=label, **attrs)
|
||||
|
||||
for edge in protocol_graph.edges:
|
||||
G.add_edge(edge["source"], edge["target"])
|
||||
|
||||
plt.figure(figsize=(20, 15))
|
||||
try:
|
||||
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
||||
except Exception:
|
||||
pos = nx.shell_layout(G) # Fallback layout
|
||||
|
||||
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
||||
nx.draw(
|
||||
G,
|
||||
pos,
|
||||
with_labels=False,
|
||||
node_size=2500,
|
||||
node_color="skyblue",
|
||||
node_shape="o",
|
||||
edge_color="gray",
|
||||
width=1.5,
|
||||
arrowsize=15,
|
||||
)
|
||||
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
||||
|
||||
plt.title("Chemical Protocol Workflow Graph", size=15)
|
||||
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
||||
plt.close()
|
||||
print(f" - Visualization saved to '{output_path}'")
|
||||
|
||||
|
||||
from networkx.drawing.nx_agraph import to_agraph
|
||||
import re
|
||||
|
||||
COMPASS = {"n","e","s","w","ne","nw","se","sw","c"}
|
||||
|
||||
def _is_compass(port: str) -> bool:
|
||||
return isinstance(port, str) and port.lower() in COMPASS
|
||||
|
||||
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
||||
"""
|
||||
使用 Graphviz 端口语法绘制协议工作流图。
|
||||
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
||||
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
||||
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
||||
"""
|
||||
if not protocol_graph:
|
||||
print("Cannot draw graph: Graph object is empty.")
|
||||
return
|
||||
|
||||
# 1) 先用 networkx 搭建有向图,保留端口属性
|
||||
G = nx.DiGraph()
|
||||
for node_id, attrs in protocol_graph.nodes.items():
|
||||
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
||||
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
||||
G.add_node(node_id, _core_label=str(label), **{k:v for k,v in attrs.items() if k not in ("label",)})
|
||||
|
||||
edges_data = []
|
||||
in_ports_by_node = {} # 收集命名输入端口
|
||||
out_ports_by_node = {} # 收集命名输出端口
|
||||
|
||||
for edge in protocol_graph.edges:
|
||||
u = edge["source"]
|
||||
v = edge["target"]
|
||||
sp = edge.get("source_port")
|
||||
tp = edge.get("target_port")
|
||||
|
||||
# 记录到图里(保留原始端口信息)
|
||||
G.add_edge(u, v, source_port=sp, target_port=tp)
|
||||
edges_data.append((u, v, sp, tp))
|
||||
|
||||
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
||||
if sp and not _is_compass(sp):
|
||||
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
||||
if tp and not _is_compass(tp):
|
||||
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
||||
|
||||
# 2) 转为 AGraph,使用 Graphviz 渲染
|
||||
A = to_agraph(G)
|
||||
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
||||
A.node_attr.update(shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica")
|
||||
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
||||
|
||||
# 3) 为需要命名端口的节点设置 record 形状与 label
|
||||
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
||||
for n in A.nodes():
|
||||
node = A.get_node(n)
|
||||
core = G.nodes[n].get("_core_label", n)
|
||||
|
||||
in_ports = sorted(in_ports_by_node.get(n, []))
|
||||
out_ports = sorted(out_ports_by_node.get(n, []))
|
||||
|
||||
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
||||
if in_ports or out_ports:
|
||||
def port_fields(ports):
|
||||
if not ports:
|
||||
return " " # 必须留一个空槽占位
|
||||
# 每个端口一个小格子,<p> name
|
||||
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
||||
|
||||
left = port_fields(in_ports)
|
||||
right = port_fields(out_ports)
|
||||
|
||||
# 三栏:左(入) | 中(节点名) | 右(出)
|
||||
record_label = f"{{ {left} | {core} | {right} }}"
|
||||
node.attr.update(shape="record", label=record_label)
|
||||
else:
|
||||
# 没有命名端口:普通盒子,显示核心标签
|
||||
node.attr.update(label=str(core))
|
||||
|
||||
# 4) 给边设置 headport / tailport
|
||||
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
||||
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
||||
for (u, v, sp, tp) in edges_data:
|
||||
e = A.get_edge(u, v)
|
||||
|
||||
# Graphviz 属性:tail 是源,head 是目标
|
||||
if sp:
|
||||
if _is_compass(sp):
|
||||
e.attr["tailport"] = sp.lower()
|
||||
else:
|
||||
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
||||
e.attr["tailport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(sp))
|
||||
|
||||
if tp:
|
||||
if _is_compass(tp):
|
||||
e.attr["headport"] = tp.lower()
|
||||
else:
|
||||
e.attr["headport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(tp))
|
||||
|
||||
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
||||
# e.attr["arrowhead"] = "vee"
|
||||
|
||||
# 5) 输出
|
||||
A.draw(output_path, prog="dot")
|
||||
print(f" - Port-aware workflow rendered to '{output_path}'")
|
||||
|
||||
|
||||
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
||||
"""展平嵌套的XDL程序结构"""
|
||||
flattened_operations = []
|
||||
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||
|
||||
def extract_operations(element: ET.Element):
|
||||
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
||||
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
||||
flattened_operations.append(element)
|
||||
|
||||
for child in element:
|
||||
extract_operations(child)
|
||||
|
||||
for child in procedure_elem:
|
||||
extract_operations(child)
|
||||
|
||||
return flattened_operations
|
||||
|
||||
|
||||
def parse_xdl_content(xdl_content: str) -> tuple:
|
||||
"""解析XDL内容"""
|
||||
try:
|
||||
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
||||
root = ET.fromstring(xdl_content_cleaned)
|
||||
|
||||
synthesis_elem = root.find("Synthesis")
|
||||
if synthesis_elem is None:
|
||||
return None, None, None
|
||||
|
||||
# 解析硬件组件
|
||||
hardware_elem = synthesis_elem.find("Hardware")
|
||||
hardware = []
|
||||
if hardware_elem is not None:
|
||||
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
||||
|
||||
# 解析试剂
|
||||
reagents_elem = synthesis_elem.find("Reagents")
|
||||
reagents = []
|
||||
if reagents_elem is not None:
|
||||
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
||||
|
||||
# 解析程序
|
||||
procedure_elem = synthesis_elem.find("Procedure")
|
||||
if procedure_elem is None:
|
||||
return None, None, None
|
||||
|
||||
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
||||
return hardware, reagents, flattened_operations
|
||||
|
||||
except ET.ParseError as e:
|
||||
raise ValueError(f"Invalid XDL format: {e}")
|
||||
|
||||
|
||||
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
||||
"""
|
||||
将XDL XML格式转换为标准的字典格式
|
||||
|
||||
Args:
|
||||
xdl_content: XDL XML内容
|
||||
|
||||
Returns:
|
||||
转换结果,包含步骤和器材信息
|
||||
"""
|
||||
try:
|
||||
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
||||
if hardware is None:
|
||||
return {"error": "Failed to parse XDL content", "success": False}
|
||||
|
||||
# 将XDL元素转换为字典格式
|
||||
steps_data = []
|
||||
for elem in flattened_operations:
|
||||
# 转换参数类型
|
||||
parameters = {}
|
||||
for key, val in elem.attrib.items():
|
||||
converted_val = convert_to_type(val)
|
||||
if converted_val is not None:
|
||||
parameters[key] = converted_val
|
||||
|
||||
step_dict = {
|
||||
"operation": elem.tag,
|
||||
"parameters": parameters,
|
||||
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
||||
}
|
||||
steps_data.append(step_dict)
|
||||
|
||||
# 合并硬件和试剂为统一的labware_info格式
|
||||
labware_data = []
|
||||
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
||||
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"steps": steps_data,
|
||||
"labware": labware_data,
|
||||
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"XDL conversion failed: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return {"error": error_msg, "success": False}
|
||||
|
||||
|
||||
def create_workflow(
|
||||
steps_info: str,
|
||||
labware_info: str,
|
||||
workflow_name: str = "Generated Workflow",
|
||||
workstation_name: str = "workstation",
|
||||
workflow_description: str = "Auto-generated workflow from protocol",
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
创建工作流,输入数据已经是统一的字典格式
|
||||
|
||||
Args:
|
||||
steps_info: 步骤信息 (JSON字符串,已经是list of dict格式)
|
||||
labware_info: 实验器材和试剂信息 (JSON字符串,已经是list of dict格式)
|
||||
workflow_name: 工作流名称
|
||||
workflow_description: 工作流描述
|
||||
|
||||
Returns:
|
||||
创建结果,包含工作流UUID和详细信息
|
||||
"""
|
||||
try:
|
||||
# 直接解析JSON数据
|
||||
steps_info_clean = extract_json_from_markdown(steps_info)
|
||||
labware_info_clean = extract_json_from_markdown(labware_info)
|
||||
|
||||
steps_data = json.loads(steps_info_clean)
|
||||
labware_data = json.loads(labware_info_clean)
|
||||
|
||||
# 统一处理所有数据
|
||||
protocol_graph = build_protocol_graph(labware_data, steps_data, workstation_name=workstation_name)
|
||||
|
||||
# 检测协议类型(用于标签)
|
||||
protocol_type = "bio" if any("biomek" in step.get("template", "") for step in refactored_steps) else "organic"
|
||||
|
||||
# 转换为工作流格式
|
||||
data = protocol_graph.to_dict()
|
||||
|
||||
# 转换节点格式
|
||||
for i, node in enumerate(data["nodes"]):
|
||||
description = node.get("description", "")
|
||||
onode = {
|
||||
"template": node.pop("template"),
|
||||
"id": node["id"],
|
||||
"lab_node_type": node.get("lab_node_type", "Device"),
|
||||
"name": description or f"Node {i + 1}",
|
||||
"params": {"default": node},
|
||||
"handles": {},
|
||||
}
|
||||
|
||||
# 处理边连接
|
||||
for edge in data["links"]:
|
||||
if edge["source"] == node["id"]:
|
||||
source_port = edge.get("source_port", "output")
|
||||
if source_port not in onode["handles"]:
|
||||
onode["handles"][source_port] = {"type": "source"}
|
||||
|
||||
if edge["target"] == node["id"]:
|
||||
target_port = edge.get("target_port", "input")
|
||||
if target_port not in onode["handles"]:
|
||||
onode["handles"][target_port] = {"type": "target"}
|
||||
|
||||
data["nodes"][i] = onode
|
||||
|
||||
# 发送到API创建工作流
|
||||
api_secret = configs.Lab.Key
|
||||
if not api_secret:
|
||||
return {"error": "API SecretKey is not configured", "success": False}
|
||||
|
||||
# Step 1: 创建工作流
|
||||
workflow_url = f"{configs.Lab.Api}/api/v1/workflow/"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
params = {"secret_key": api_secret}
|
||||
|
||||
graph_data = {"name": workflow_name, **data}
|
||||
|
||||
logger.info(f"Creating workflow: {workflow_name}")
|
||||
response = requests.post(
|
||||
workflow_url, params=params, json=graph_data, headers=headers, timeout=configs.Lab.Timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
workflow_info = response.json()
|
||||
|
||||
if workflow_info.get("code") != 0:
|
||||
error_msg = f"API returned an error: {workflow_info.get('msg', 'Unknown Error')}"
|
||||
logger.error(error_msg)
|
||||
return {"error": error_msg, "success": False}
|
||||
|
||||
workflow_uuid = workflow_info.get("data", {}).get("uuid")
|
||||
if not workflow_uuid:
|
||||
return {"error": "Failed to get workflow UUID from response", "success": False}
|
||||
|
||||
# Step 2: 添加到模板库(可选)
|
||||
try:
|
||||
library_url = f"{configs.Lab.Api}/api/flociety/vs/workflows/library/"
|
||||
lib_payload = {
|
||||
"workflow_uuid": workflow_uuid,
|
||||
"title": workflow_name,
|
||||
"description": workflow_description,
|
||||
"labels": [protocol_type.title(), "Auto-generated"],
|
||||
}
|
||||
|
||||
library_response = requests.post(
|
||||
library_url, params=params, json=lib_payload, headers=headers, timeout=configs.Lab.Timeout
|
||||
)
|
||||
library_response.raise_for_status()
|
||||
|
||||
library_info = library_response.json()
|
||||
logger.info(f"Workflow added to library: {library_info}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"workflow_uuid": workflow_uuid,
|
||||
"workflow_info": workflow_info.get("data"),
|
||||
"library_info": library_info.get("data"),
|
||||
"protocol_type": protocol_type,
|
||||
"message": f"Workflow '{workflow_name}' created successfully",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
# 即使添加到库失败,工作流创建仍然成功
|
||||
logger.warning(f"Failed to add workflow to library: {str(e)}")
|
||||
return {
|
||||
"success": True,
|
||||
"workflow_uuid": workflow_uuid,
|
||||
"workflow_info": workflow_info.get("data"),
|
||||
"protocol_type": protocol_type,
|
||||
"message": f"Workflow '{workflow_name}' created successfully (library addition failed)",
|
||||
}
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
error_msg = f"Network error when calling API: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return {"error": error_msg, "success": False}
|
||||
except json.JSONDecodeError as e:
|
||||
error_msg = f"JSON parsing error: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
return {"error": error_msg, "success": False}
|
||||
except Exception as e:
|
||||
error_msg = f"An unexpected error occurred: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
logger.error(traceback.format_exc())
|
||||
return {"error": error_msg, "success": False}
|
||||
7
setup.py
@@ -4,13 +4,14 @@ package_name = 'unilabos'
|
||||
|
||||
setup(
|
||||
name=package_name,
|
||||
version='0.10.6',
|
||||
version='0.10.10',
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
install_requires=['setuptools'],
|
||||
zip_safe=True,
|
||||
maintainer='Junhan Chang',
|
||||
maintainer_email='changjh@pku.edu.cn',
|
||||
author="The unilabos developers",
|
||||
maintainer='Junhan Chang, Xuwznln',
|
||||
maintainer_email='Junhan Chang <changjh@pku.edu.cn>, Xuwznln <18435084+Xuwznln@users.noreply.github.com>',
|
||||
description='',
|
||||
license='GPL v3',
|
||||
tests_require=['pytest'],
|
||||
|
||||
171
test/experiments/ICCAS506.json
Normal file
@@ -0,0 +1,171 @@
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "dispensing_station_bioyond",
|
||||
"name": "dispensing_station_bioyond",
|
||||
"children": [
|
||||
"Bioyond_Dispensing_Deck"
|
||||
],
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "bioyond_dispensing_station",
|
||||
"config": {
|
||||
"config": {
|
||||
"api_key": "DE9BDDA0",
|
||||
"api_host": "http://192.168.1.200:44400",
|
||||
"material_type_mappings": {
|
||||
"BIOYOND_PolymerStation_1FlaskCarrier": [
|
||||
"烧杯",
|
||||
"3a14196b-24f2-ca49-9081-0cab8021bf1a"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1BottleCarrier": [
|
||||
"试剂瓶",
|
||||
"3a14196b-8bcf-a460-4f74-23f21ca79e72"
|
||||
],
|
||||
"BIOYOND_PolymerStation_6StockCarrier": [
|
||||
"分装板",
|
||||
"3a14196e-5dfe-6e21-0c79-fe2036d052c4"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Liquid_Vial": [
|
||||
"10%分装小瓶",
|
||||
"3a14196c-76be-2279-4e22-7310d69aed68"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Vial": [
|
||||
"90%分装小瓶",
|
||||
"3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"
|
||||
],
|
||||
"BIOYOND_PolymerStation_8StockCarrier": [
|
||||
"样品板",
|
||||
"3a14196e-b7a0-a5da-1931-35f3000281e9"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Stock": [
|
||||
"样品瓶",
|
||||
"3a14196a-cf7d-8aea-48d8-b9662c7dba94"
|
||||
]
|
||||
}
|
||||
},
|
||||
"deck": {
|
||||
"data": {
|
||||
"_resource_child_name": "Bioyond_Dispensing_Deck",
|
||||
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerPreparationStation_Deck"
|
||||
}
|
||||
},
|
||||
"protocol_type": []
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "Bioyond_Dispensing_Deck",
|
||||
"name": "Bioyond_Dispensing_Deck",
|
||||
"sample_id": null,
|
||||
"children": [],
|
||||
"parent": "dispensing_station_bioyond",
|
||||
"type": "deck",
|
||||
"class": "BIOYOND_PolymerPreparationStation_Deck",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "BIOYOND_PolymerPreparationStation_Deck",
|
||||
"setup": true,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
}
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "reaction_station_bioyond",
|
||||
"name": "reaction_station_bioyond",
|
||||
"parent": null,
|
||||
"children": [
|
||||
"Bioyond_Deck"
|
||||
],
|
||||
"type": "device",
|
||||
"class": "reaction_station.bioyond",
|
||||
"config": {
|
||||
"config": {
|
||||
"api_key": "DE9BDDA0",
|
||||
"api_host": "http://192.168.1.200:44402",
|
||||
"workflow_mappings": {
|
||||
"reactor_taken_out": "3a16081e-4788-ca37-eff4-ceed8d7019d1",
|
||||
"reactor_taken_in": "3a160df6-76b3-0957-9eb0-cb496d5721c6",
|
||||
"Solid_feeding_vials": "3a160877-87e7-7699-7bc6-ec72b05eb5e6",
|
||||
"Liquid_feeding_vials(non-titration)": "3a167d99-6158-c6f0-15b5-eb030f7d8e47",
|
||||
"Liquid_feeding_solvents": "3a160824-0665-01ed-285a-51ef817a9046",
|
||||
"Liquid_feeding(titration)": "3a16082a-96ac-0449-446a-4ed39f3365b6",
|
||||
"liquid_feeding_beaker": "3a16087e-124f-8ddb-8ec1-c2dff09ca784",
|
||||
"Drip_back": "3a162cf9-6aac-565a-ddd7-682ba1796a4a"
|
||||
},
|
||||
"material_type_mappings": {
|
||||
"BIOYOND_PolymerStation_Reactor": [
|
||||
"反应器",
|
||||
"3a14233b-902d-0d7b-4533-3f60f1c41c1b"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1BottleCarrier": [
|
||||
"试剂瓶",
|
||||
"3a14233b-56e3-6c53-a8ab-fcaac163a9ba"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1FlaskCarrier": [
|
||||
"烧杯",
|
||||
"3a14233b-f0a9-ba84-eaa9-0d4718b361b6"
|
||||
],
|
||||
"BIOYOND_PolymerStation_6StockCarrier": [
|
||||
"样品板",
|
||||
"3a142339-80de-8f25-6093-1b1b1b6c322e"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Vial": [
|
||||
"90%分装小瓶",
|
||||
"3a14233a-26e1-28f8-af6a-60ca06ba0165"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Liquid_Vial": [
|
||||
"10%分装小瓶",
|
||||
"3a14233a-84a3-088d-6676-7cb4acd57c64"
|
||||
],
|
||||
"BIOYOND_PolymerStation_TipBox": [
|
||||
"枪头盒",
|
||||
"3a143890-9d51-60ac-6d6f-6edb43c12041"
|
||||
]
|
||||
}
|
||||
},
|
||||
"deck": {
|
||||
"data": {
|
||||
"_resource_child_name": "Bioyond_Deck",
|
||||
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerReactionStation_Deck"
|
||||
}
|
||||
},
|
||||
"protocol_type": []
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "Bioyond_Deck",
|
||||
"name": "Bioyond_Deck",
|
||||
"children": [],
|
||||
"parent": "reaction_station_bioyond",
|
||||
"type": "deck",
|
||||
"class": "BIOYOND_PolymerReactionStation_Deck",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "BIOYOND_PolymerReactionStation_Deck",
|
||||
"setup": true,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
}
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -170,15 +170,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 1000.0
|
||||
"max_volume": 1000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 200,
|
||||
"size_y": 150,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
{
|
||||
"liquid_type": "DMF",
|
||||
"liquid_volume": 1000.0
|
||||
}
|
||||
]
|
||||
"liquids": [["DMF", 500.0]],
|
||||
"pending_liquids": [["DMF", 500.0]]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -194,15 +195,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 1000.0
|
||||
"max_volume": 1000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 200,
|
||||
"size_y": 150,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
{
|
||||
"liquid_type": "ethyl_acetate",
|
||||
"liquid_volume": 1000.0
|
||||
}
|
||||
]
|
||||
"liquids": [["ethyl_acetate", 1000.0]],
|
||||
"pending_liquids": [["ethyl_acetate", 1000.0]]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -218,15 +220,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 1000.0
|
||||
"max_volume": 1000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 300,
|
||||
"size_y": 150,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
{
|
||||
"liquid_type": "hexane",
|
||||
"liquid_volume": 1000.0
|
||||
}
|
||||
]
|
||||
"liquids": [["hexane", 1000.0]],
|
||||
"pending_liquids": [["hexane", 1000.0]]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -242,15 +245,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 1000.0
|
||||
"max_volume": 1000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 900,
|
||||
"size_y": 150,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
{
|
||||
"liquid_type": "methanol",
|
||||
"liquid_volume": 1000.0
|
||||
}
|
||||
]
|
||||
"liquids": [["methanol", 1000.0]],
|
||||
"pending_liquids": [["methanol", 1000.0]]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -266,15 +270,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 1000.0
|
||||
"max_volume": 1000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 950,
|
||||
"size_y": 150,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
{
|
||||
"liquid_type": "water",
|
||||
"liquid_volume": 1000.0
|
||||
}
|
||||
]
|
||||
"liquids": [["water", 1000.0]],
|
||||
"pending_liquids": [["water", 1000.0]]
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -335,14 +340,16 @@
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 500.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"max_temp": 200.0,
|
||||
"min_temp": -20.0,
|
||||
"has_stirrer": true,
|
||||
"has_heater": true
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -419,11 +426,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 2000.0
|
||||
"max_volume": 2000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 500,
|
||||
"size_y": 400,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -439,11 +451,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 2000.0
|
||||
"max_volume": 2000.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 1100,
|
||||
"size_y": 500,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -649,11 +666,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 250.0
|
||||
"max_volume": 250.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 900,
|
||||
"size_y": 500,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -669,11 +691,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 250.0
|
||||
"max_volume": 250.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 950,
|
||||
"size_y": 500,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -689,11 +716,16 @@
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 250.0
|
||||
"max_volume": 250.0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"size_x": 1050,
|
||||
"size_y": 500,
|
||||
"size_z": 0
|
||||
},
|
||||
"data": {
|
||||
"liquids": [
|
||||
]
|
||||
"liquids": [],
|
||||
"pending_liquids": []
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -733,6 +765,11 @@
|
||||
},
|
||||
"config": {
|
||||
"max_volume": 500.0,
|
||||
"size_x": 550,
|
||||
"size_y": 250,
|
||||
"size_z": 0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"reagent": "sodium_chloride",
|
||||
"physical_state": "solid"
|
||||
},
|
||||
@@ -756,6 +793,11 @@
|
||||
},
|
||||
"config": {
|
||||
"volume": 500.0,
|
||||
"size_x": 600,
|
||||
"size_y": 250,
|
||||
"size_z": 0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"reagent": "sodium_carbonate",
|
||||
"physical_state": "solid"
|
||||
},
|
||||
@@ -779,6 +821,11 @@
|
||||
},
|
||||
"config": {
|
||||
"volume": 500.0,
|
||||
"size_x": 650,
|
||||
"size_y": 250,
|
||||
"size_z": 0,
|
||||
"type": "RegularContainer",
|
||||
"category": "container",
|
||||
"reagent": "magnesium_chloride",
|
||||
"physical_state": "solid"
|
||||
},
|
||||
|
||||
@@ -8,11 +8,41 @@
|
||||
],
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "dispensing_station.bioyond",
|
||||
"class": "bioyond_dispensing_station",
|
||||
"config": {
|
||||
"config": {
|
||||
"api_key": "DE9BDDA0",
|
||||
"api_host": "http://192.168.1.200:44388"
|
||||
"api_host": "http://192.168.1.200:44388",
|
||||
"material_type_mappings": {
|
||||
"BIOYOND_PolymerStation_1FlaskCarrier": [
|
||||
"烧杯",
|
||||
"3a14196b-24f2-ca49-9081-0cab8021bf1a"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1BottleCarrier": [
|
||||
"试剂瓶",
|
||||
"3a14196b-8bcf-a460-4f74-23f21ca79e72"
|
||||
],
|
||||
"BIOYOND_PolymerStation_6StockCarrier": [
|
||||
"分装板",
|
||||
"3a14196e-5dfe-6e21-0c79-fe2036d052c4"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Liquid_Vial": [
|
||||
"10%分装小瓶",
|
||||
"3a14196c-76be-2279-4e22-7310d69aed68"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Vial": [
|
||||
"90%分装小瓶",
|
||||
"3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"
|
||||
],
|
||||
"BIOYOND_PolymerStation_8StockCarrier": [
|
||||
"样品板",
|
||||
"3a14196e-b7a0-a5da-1931-35f3000281e9"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Stock": [
|
||||
"样品瓶",
|
||||
"3a14196a-cf7d-8aea-48d8-b9662c7dba94"
|
||||
]
|
||||
}
|
||||
},
|
||||
"deck": {
|
||||
"data": {
|
||||
@@ -20,13 +50,6 @@
|
||||
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerPreparationStation_Deck"
|
||||
}
|
||||
},
|
||||
"station_config": {
|
||||
"station_type": "dispensing_station",
|
||||
"enable_dispensing_station": true,
|
||||
"enable_reaction_station": false,
|
||||
"station_name": "DispensingStation_001",
|
||||
"description": "Bioyond配液工作站"
|
||||
},
|
||||
"protocol_type": []
|
||||
},
|
||||
"data": {}
|
||||
|
||||
148
test/experiments/laiyu_liquid.json
Normal file
@@ -0,0 +1,148 @@
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "laiyu_liquid_station",
|
||||
"name": "LaiYu液体处理工作站",
|
||||
"children": [
|
||||
"module_1_8tubes",
|
||||
"module_2_96well_deep",
|
||||
"module_3_beaker",
|
||||
"module_4_96well_tips"
|
||||
],
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "laiyu_liquid",
|
||||
"position": {
|
||||
"x": 500,
|
||||
"y": 200,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"total_modules": 4,
|
||||
"total_wells": 201,
|
||||
"safety_margin": {
|
||||
"x": 5.0,
|
||||
"y": 5.0,
|
||||
"z": 5.0
|
||||
},
|
||||
"protocol_type": ["LiquidHandlingProtocol", "PipettingProtocol", "TransferProtocol"]
|
||||
},
|
||||
"data": {
|
||||
"status": "Ready",
|
||||
"version": "1.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "module_1_8tubes",
|
||||
"name": "8管位置模块",
|
||||
"children": [],
|
||||
"parent": "laiyu_liquid_station",
|
||||
"type": "container",
|
||||
"class": "opentrons_24_tuberack_nest_1point5ml_snapcap",
|
||||
"position": {
|
||||
"x": 100,
|
||||
"y": 100,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"module_type": "tube_rack",
|
||||
"wells_count": 8,
|
||||
"well_diameter": 29.0,
|
||||
"well_depth": 117.0,
|
||||
"well_volume": 77000.0,
|
||||
"well_shape": "circular",
|
||||
"layout": "2x4"
|
||||
},
|
||||
"data": {
|
||||
"max_volume": 77000.0,
|
||||
"current_volume": 0.0,
|
||||
"wells": ["A1", "A2", "A3", "A4", "B1", "B2", "B3", "B4"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "module_2_96well_deep",
|
||||
"name": "96深孔板",
|
||||
"children": [],
|
||||
"parent": "laiyu_liquid_station",
|
||||
"type": "plate",
|
||||
"class": "nest_96_wellplate_2ml_deep",
|
||||
"position": {
|
||||
"x": 300,
|
||||
"y": 100,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"module_type": "96_well_deep_plate",
|
||||
"wells_count": 96,
|
||||
"well_diameter": 8.2,
|
||||
"well_depth": 39.4,
|
||||
"well_volume": 2080.0,
|
||||
"well_shape": "circular",
|
||||
"layout": "8x12"
|
||||
},
|
||||
"data": {
|
||||
"max_volume": 2080.0,
|
||||
"current_volume": 0.0,
|
||||
"plate_type": "deep_well"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "module_3_beaker",
|
||||
"name": "敞口玻璃瓶",
|
||||
"children": [],
|
||||
"parent": "laiyu_liquid_station",
|
||||
"type": "container",
|
||||
"class": "container",
|
||||
"position": {
|
||||
"x": 500,
|
||||
"y": 100,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"module_type": "beaker_holder",
|
||||
"wells_count": 1,
|
||||
"well_diameter": 85.0,
|
||||
"well_depth": 120.0,
|
||||
"well_volume": 500000.0,
|
||||
"well_shape": "circular",
|
||||
"supported_containers": ["250ml", "500ml", "1000ml"]
|
||||
},
|
||||
"data": {
|
||||
"max_volume": 500000.0,
|
||||
"current_volume": 0.0,
|
||||
"container_type": "beaker",
|
||||
"wells": ["A1"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "module_4_96well_tips",
|
||||
"name": "96吸头架",
|
||||
"children": [],
|
||||
"parent": "laiyu_liquid_station",
|
||||
"type": "container",
|
||||
"class": "tip_rack",
|
||||
"position": {
|
||||
"x": 700,
|
||||
"y": 100,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"module_type": "tip_rack",
|
||||
"wells_count": 96,
|
||||
"well_diameter": 8.2,
|
||||
"well_depth": 60.0,
|
||||
"well_volume": 6000.0,
|
||||
"well_shape": "circular",
|
||||
"layout": "8x12",
|
||||
"tip_type": "standard"
|
||||
},
|
||||
"data": {
|
||||
"max_volume": 6000.0,
|
||||
"current_volume": 0.0,
|
||||
"tip_capacity": "1000μL",
|
||||
"tips_available": 96
|
||||
}
|
||||
}
|
||||
],
|
||||
"links": []
|
||||
}
|
||||
19
test/experiments/opcua_example.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "id",
|
||||
"name": "name",
|
||||
"children": [
|
||||
],
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "opcua_example",
|
||||
"config": {
|
||||
"url": "url",
|
||||
"config_path": "unilabos/device_comms/opcua_client/opcua_workflow_example.json"
|
||||
},
|
||||
"data": {
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -10,24 +10,22 @@
|
||||
"x": 620.6111111111111,
|
||||
"y": 171,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"data": {
|
||||
"children": [
|
||||
{
|
||||
"_resource_child_name": "deck",
|
||||
"_resource_type": "pylabrobot.resources.opentrons.deck:OTDeck"
|
||||
}
|
||||
],
|
||||
"backend": {
|
||||
"type": "LiquidHandlerRvizBackend"
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"data": {},
|
||||
"children": [
|
||||
"deck"
|
||||
]
|
||||
],
|
||||
"config": {
|
||||
"deck": {
|
||||
"_resource_child_name": "deck",
|
||||
"_resource_type": "pylabrobot.resources.opentrons.deck:OTDeck"
|
||||
},
|
||||
"backend": {
|
||||
"type": "UniLiquidHandlerRvizBackend"
|
||||
},
|
||||
"simulator": true
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"id": "deck",
|
||||
@@ -9650,7 +9648,7 @@
|
||||
"children": [],
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "robotic_arm.SCARA_with_slider.virtual",
|
||||
"class": "robotic_arm.SCARA_with_slider.moveit.virtual",
|
||||
"position": {
|
||||
"x": -500,
|
||||
"y": 1000,
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"timeout": 10.0,
|
||||
"axis": "Left",
|
||||
"channel_num": 8,
|
||||
"setup": true,
|
||||
"setup": false,
|
||||
"debug": true,
|
||||
"simulator": true,
|
||||
"matrix_id": "71593"
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"type": "device",
|
||||
"class": "reaction_station.bioyond",
|
||||
"config": {
|
||||
"bioyond_config": {
|
||||
"config": {
|
||||
"api_key": "DE9BDDA0",
|
||||
"api_host": "http://192.168.1.200:44402",
|
||||
"workflow_mappings": {
|
||||
@@ -19,14 +19,39 @@
|
||||
"Solid_feeding_vials": "3a160877-87e7-7699-7bc6-ec72b05eb5e6",
|
||||
"Liquid_feeding_vials(non-titration)": "3a167d99-6158-c6f0-15b5-eb030f7d8e47",
|
||||
"Liquid_feeding_solvents": "3a160824-0665-01ed-285a-51ef817a9046",
|
||||
"Liquid_feeding(titration)": "3a160824-0665-01ed-285a-51ef817a9046",
|
||||
"Liquid_feeding_beaker": "3a16087e-124f-8ddb-8ec1-c2dff09ca784",
|
||||
"Liquid_feeding(titration)": "3a16082a-96ac-0449-446a-4ed39f3365b6",
|
||||
"liquid_feeding_beaker": "3a16087e-124f-8ddb-8ec1-c2dff09ca784",
|
||||
"Drip_back": "3a162cf9-6aac-565a-ddd7-682ba1796a4a"
|
||||
},
|
||||
"material_type_mappings": {
|
||||
"烧杯": "BIOYOND_PolymerStation_1FlaskCarrier",
|
||||
"试剂瓶": "BIOYOND_PolymerStation_1BottleCarrier",
|
||||
"样品板": "BIOYOND_PolymerStation_6VialCarrier"
|
||||
"BIOYOND_PolymerStation_Reactor": [
|
||||
"反应器",
|
||||
"3a14233b-902d-0d7b-4533-3f60f1c41c1b"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1BottleCarrier": [
|
||||
"试剂瓶",
|
||||
"3a14233b-56e3-6c53-a8ab-fcaac163a9ba"
|
||||
],
|
||||
"BIOYOND_PolymerStation_1FlaskCarrier": [
|
||||
"烧杯",
|
||||
"3a14233b-f0a9-ba84-eaa9-0d4718b361b6"
|
||||
],
|
||||
"BIOYOND_PolymerStation_6StockCarrier": [
|
||||
"样品板",
|
||||
"3a142339-80de-8f25-6093-1b1b1b6c322e"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Solid_Vial": [
|
||||
"90%分装小瓶",
|
||||
"3a14233a-26e1-28f8-af6a-60ca06ba0165"
|
||||
],
|
||||
"BIOYOND_PolymerStation_Liquid_Vial": [
|
||||
"10%分装小瓶",
|
||||
"3a14233a-84a3-088d-6676-7cb4acd57c64"
|
||||
],
|
||||
"BIOYOND_PolymerStation_TipBox": [
|
||||
"枪头盒",
|
||||
"3a143890-9d51-60ac-6d6f-6edb43c12041"
|
||||
]
|
||||
}
|
||||
},
|
||||
"deck": {
|
||||
@@ -42,9 +67,7 @@
|
||||
{
|
||||
"id": "Bioyond_Deck",
|
||||
"name": "Bioyond_Deck",
|
||||
"sample_id": null,
|
||||
"children": [
|
||||
],
|
||||
"children": [],
|
||||
"parent": "reaction_station_bioyond",
|
||||
"type": "deck",
|
||||
"class": "BIOYOND_PolymerReactionStation_Deck",
|
||||
|
||||
382
test/experiments/test_laiyu.json
Normal file
@@ -0,0 +1,382 @@
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "liquid_handler",
|
||||
"name": "liquid_handler",
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "liquid_handler",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"data": {},
|
||||
"children": [
|
||||
"deck"
|
||||
],
|
||||
"config": {
|
||||
"deck": {
|
||||
"_resource_child_name": "deck",
|
||||
"_resource_type": "unilabos.devices.liquid_handling.laiyu.laiyu:TransformXYZDeck",
|
||||
"name": "deck"
|
||||
},
|
||||
"backend": {
|
||||
"type": "UniLiquidHandlerLaiyuBackend",
|
||||
"port": "/dev/ttyUSB_CH340"
|
||||
},
|
||||
"simulator": false,
|
||||
"total_height": 232.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "deck",
|
||||
"name": "deck",
|
||||
|
||||
"children": [
|
||||
"tip_rack",
|
||||
"plate_well",
|
||||
"tube_rack",
|
||||
"bottle_rack"
|
||||
],
|
||||
"parent": "liquid_handler",
|
||||
"type": "deck",
|
||||
"class": "TransformXYZDeck",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 18
|
||||
},
|
||||
"config": {
|
||||
"type": "TransformXYZDeck",
|
||||
"size_x": 624.3,
|
||||
"size_y": 565.2,
|
||||
"size_z": 900,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
}
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "tip_rack",
|
||||
"name": "tip_rack",
|
||||
|
||||
"children": [
|
||||
"tip_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "tip_rack",
|
||||
"class": "tiprack_box",
|
||||
"position": {
|
||||
"x": 150,
|
||||
"y": 7,
|
||||
"z": 103
|
||||
},
|
||||
"config": {
|
||||
"type": "TipRack",
|
||||
"size_x": 134,
|
||||
"size_y": 96,
|
||||
"size_z": 7.0,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tip_rack",
|
||||
"model": "tiprack_box",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
{
|
||||
"id": "tip_rack_A1",
|
||||
"name": "tip_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "tip_rack",
|
||||
"type": "container",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 11.12,
|
||||
"y": 75,
|
||||
"z": -91.54
|
||||
},
|
||||
"config": {
|
||||
"type": "TipSpot",
|
||||
"size_x": 9,
|
||||
"size_y": 9,
|
||||
"size_z": 95,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tip_spot",
|
||||
"model": null,
|
||||
"prototype_tip": {
|
||||
"type": "Tip",
|
||||
"total_tip_length": 95,
|
||||
"has_filter": false,
|
||||
"maximal_volume": 1000.0,
|
||||
"fitting_depth": 3.29
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"tip": null,
|
||||
"tip_state": null,
|
||||
"pending_tip": null
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "plate_well",
|
||||
"name": "plate_well",
|
||||
|
||||
"children": [
|
||||
"plate_well_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "plate",
|
||||
"class": "plate_96",
|
||||
"position": {
|
||||
"x": 161,
|
||||
"y": 116,
|
||||
"z": 48.5
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 127.76,
|
||||
"size_y": 85.48,
|
||||
"size_z": 45.5,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "plate",
|
||||
"model": "plate_96",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
{
|
||||
"id": "plate_well_A1",
|
||||
"name": "plate_well_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "plate_well",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 10.1,
|
||||
"y": 70,
|
||||
"z": 6.1
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 8.2,
|
||||
"size_y": 8.2,
|
||||
"size_z": 38,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "well",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "tube_rack",
|
||||
"name": "tube_rack",
|
||||
|
||||
"children": [
|
||||
"tube_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "container",
|
||||
"class": "tube_container",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 127,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 151,
|
||||
"size_y": 75,
|
||||
"size_z": 75,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"model": "tube_container",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "tube_rack_A1",
|
||||
"name": "tube_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "tube_rack",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 6,
|
||||
"y": 38,
|
||||
"z": 10
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 34,
|
||||
"size_y": 34,
|
||||
"size_z": 117,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
,
|
||||
|
||||
|
||||
{
|
||||
"id": "bottle_rack",
|
||||
"name": "bottle_rack",
|
||||
|
||||
"children": [
|
||||
"bottle_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "container",
|
||||
"class": "bottle_container",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 130,
|
||||
"size_y": 117,
|
||||
"size_z": 8,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube_rack",
|
||||
"model": "bottle_container",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "bottle_rack_A1",
|
||||
"name": "bottle_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "bottle_rack",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 25,
|
||||
"y": 18.5,
|
||||
"z": 8
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 80,
|
||||
"size_y": 80,
|
||||
"size_z": 117,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
],
|
||||
"links": []
|
||||
}
|
||||
383
test/experiments/test_laiyu_v.json
Normal file
@@ -0,0 +1,383 @@
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "liquid_handler",
|
||||
"name": "liquid_handler",
|
||||
"parent": null,
|
||||
"type": "device",
|
||||
"class": "liquid_handler",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"data": {},
|
||||
"children": [
|
||||
"deck"
|
||||
],
|
||||
"config": {
|
||||
"deck": {
|
||||
"_resource_child_name": "deck",
|
||||
"_resource_type": "unilabos.devices.liquid_handling.laiyu.laiyu:TransformXYZDeck",
|
||||
"name": "deck"
|
||||
},
|
||||
"backend": {
|
||||
"type": "UniLiquidHandlerRvizBackend"
|
||||
},
|
||||
"simulator": true,
|
||||
"total_height": 300,
|
||||
"joint_config": "TransformXYZDeck",
|
||||
"simulate_rviz": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "deck",
|
||||
"name": "deck",
|
||||
|
||||
"children": [
|
||||
"tip_rack",
|
||||
"plate_well",
|
||||
"tube_rack",
|
||||
"bottle_rack"
|
||||
],
|
||||
"parent": "liquid_handler",
|
||||
"type": "deck",
|
||||
"class": "TransformXYZDeck",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 18
|
||||
},
|
||||
"config": {
|
||||
"type": "TransformXYZDeck",
|
||||
"size_x": 624.3,
|
||||
"size_y": 565.2,
|
||||
"size_z": 900,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
}
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
{
|
||||
"id": "tip_rack",
|
||||
"name": "tip_rack",
|
||||
|
||||
"children": [
|
||||
"tip_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "tip_rack",
|
||||
"class": "tiprack_box",
|
||||
"position": {
|
||||
"x": 150,
|
||||
"y": 7,
|
||||
"z": 103
|
||||
},
|
||||
"config": {
|
||||
"type": "TipRack",
|
||||
"size_x": 134,
|
||||
"size_y": 96,
|
||||
"size_z": 7.0,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tip_rack",
|
||||
"model": "tiprack_box",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
{
|
||||
"id": "tip_rack_A1",
|
||||
"name": "tip_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "tip_rack",
|
||||
"type": "container",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 11.12,
|
||||
"y": 75,
|
||||
"z": -91.54
|
||||
},
|
||||
"config": {
|
||||
"type": "TipSpot",
|
||||
"size_x": 9,
|
||||
"size_y": 9,
|
||||
"size_z": 95,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tip_spot",
|
||||
"model": null,
|
||||
"prototype_tip": {
|
||||
"type": "Tip",
|
||||
"total_tip_length": 95,
|
||||
"has_filter": false,
|
||||
"maximal_volume": 1000.0,
|
||||
"fitting_depth": 3.29
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"tip": null,
|
||||
"tip_state": null,
|
||||
"pending_tip": null
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "plate_well",
|
||||
"name": "plate_well",
|
||||
|
||||
"children": [
|
||||
"plate_well_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "plate",
|
||||
"class": "plate_96",
|
||||
"position": {
|
||||
"x": 161,
|
||||
"y": 116,
|
||||
"z": 48.5
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 127.76,
|
||||
"size_y": 85.48,
|
||||
"size_z": 45.5,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "plate",
|
||||
"model": "plate_96",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
{
|
||||
"id": "plate_well_A1",
|
||||
"name": "plate_well_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "plate_well",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 10.1,
|
||||
"y": 70,
|
||||
"z": 6.1
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 8.2,
|
||||
"size_y": 8.2,
|
||||
"size_z": 38,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "well",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "tube_rack",
|
||||
"name": "tube_rack",
|
||||
|
||||
"children": [
|
||||
"tube_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "container",
|
||||
"class": "tube_container",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 127,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 151,
|
||||
"size_y": 75,
|
||||
"size_z": 75,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"model": "tube_container",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "tube_rack_A1",
|
||||
"name": "tube_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "tube_rack",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 6,
|
||||
"y": 38,
|
||||
"z": 10
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 34,
|
||||
"size_y": 34,
|
||||
"size_z": 117,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
,
|
||||
|
||||
|
||||
{
|
||||
"id": "bottle_rack",
|
||||
"name": "bottle_rack",
|
||||
|
||||
"children": [
|
||||
"bottle_rack_A1"
|
||||
],
|
||||
"parent": "deck",
|
||||
"type": "container",
|
||||
"class": "bottle_container",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"config": {
|
||||
"type": "Plate",
|
||||
"size_x": 130,
|
||||
"size_y": 117,
|
||||
"size_z": 8,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube_rack",
|
||||
"model": "bottle_container",
|
||||
"ordering": [
|
||||
"A1"
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"id": "bottle_rack_A1",
|
||||
"name": "bottle_rack_A1",
|
||||
|
||||
"children": [],
|
||||
"parent": "bottle_rack",
|
||||
"type": "device",
|
||||
"class": "",
|
||||
"position": {
|
||||
"x": 25,
|
||||
"y": 18.5,
|
||||
"z": 8
|
||||
},
|
||||
"config": {
|
||||
"type": "Well",
|
||||
"size_x": 80,
|
||||
"size_y": 80,
|
||||
"size_z": 117,
|
||||
"rotation": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0,
|
||||
"type": "Rotation"
|
||||
},
|
||||
"category": "tube",
|
||||
"model": null,
|
||||
"max_volume": 2000,
|
||||
"material_z_thickness": null,
|
||||
"compute_volume_from_height": null,
|
||||
"compute_height_from_volume": null,
|
||||
"bottom_type": "unknown",
|
||||
"cross_section_type": "rectangle"
|
||||
},
|
||||
"data": {
|
||||
"liquids": [["water", 50.0]],
|
||||
"pending_liquids": [["water", 50.0]],
|
||||
"liquid_history": []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
],
|
||||
"links": []
|
||||
}
|
||||
@@ -3,7 +3,8 @@
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Dict, Any, Optional, List
|
||||
from typing import Dict, Any, List
|
||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||
|
||||
|
||||
class SmartPumpController:
|
||||
@@ -14,6 +15,8 @@ class SmartPumpController:
|
||||
适用于实验室自动化系统中的液体处理任务。
|
||||
"""
|
||||
|
||||
_ros_node: BaseROS2DeviceNode
|
||||
|
||||
def __init__(self, device_id: str = "smart_pump_01", port: str = "/dev/ttyUSB0"):
|
||||
"""
|
||||
初始化智能泵控制器
|
||||
@@ -30,6 +33,9 @@ class SmartPumpController:
|
||||
self.calibration_factor = 1.0
|
||||
self.pump_mode = "continuous" # continuous, volume, rate
|
||||
|
||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||
self._ros_node = ros_node
|
||||
|
||||
def connect_device(self, timeout: int = 10) -> bool:
|
||||
"""
|
||||
连接到泵设备
|
||||
@@ -90,7 +96,7 @@ class SmartPumpController:
|
||||
pump_time = (volume / flow_rate) * 60 # 转换为秒
|
||||
|
||||
self.current_flow_rate = flow_rate
|
||||
await asyncio.sleep(min(pump_time, 3.0)) # 模拟泵送过程
|
||||
await self._ros_node.sleep(min(pump_time, 3.0)) # 模拟泵送过程
|
||||
|
||||
self.total_volume_pumped += volume
|
||||
self.current_flow_rate = 0.0
|
||||
@@ -170,6 +176,8 @@ class AdvancedTemperatureController:
|
||||
适用于需要精确温度控制的化学反应和材料处理过程。
|
||||
"""
|
||||
|
||||
_ros_node: BaseROS2DeviceNode
|
||||
|
||||
def __init__(self, controller_id: str = "temp_controller_01"):
|
||||
"""
|
||||
初始化温度控制器
|
||||
@@ -185,6 +193,9 @@ class AdvancedTemperatureController:
|
||||
self.pid_enabled = True
|
||||
self.temperature_history: List[Dict] = []
|
||||
|
||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||
self._ros_node = ros_node
|
||||
|
||||
def set_target_temperature(self, temperature: float, rate: float = 10.0) -> bool:
|
||||
"""
|
||||
设置目标温度
|
||||
@@ -238,7 +249,7 @@ class AdvancedTemperatureController:
|
||||
}
|
||||
)
|
||||
|
||||
await asyncio.sleep(step_time)
|
||||
await self._ros_node.sleep(step_time)
|
||||
|
||||
# 保持历史记录不超过100条
|
||||
if len(self.temperature_history) > 100:
|
||||
@@ -330,6 +341,8 @@ class MultiChannelAnalyzer:
|
||||
常用于光谱分析、电化学测量等应用场景。
|
||||
"""
|
||||
|
||||
_ros_node: BaseROS2DeviceNode
|
||||
|
||||
def __init__(self, analyzer_id: str = "analyzer_01", channels: int = 8):
|
||||
"""
|
||||
初始化多通道分析仪
|
||||
@@ -344,6 +357,9 @@ class MultiChannelAnalyzer:
|
||||
self.is_measuring = False
|
||||
self.sample_rate = 1000 # Hz
|
||||
|
||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||
self._ros_node = ros_node
|
||||
|
||||
def configure_channel(self, channel: int, enabled: bool = True, unit: str = "V") -> bool:
|
||||
"""
|
||||
配置通道
|
||||
@@ -376,7 +392,7 @@ class MultiChannelAnalyzer:
|
||||
|
||||
# 模拟数据采集
|
||||
measurements = []
|
||||
for second in range(duration):
|
||||
for _ in range(duration):
|
||||
timestamp = asyncio.get_event_loop().time()
|
||||
frame_data = {}
|
||||
|
||||
@@ -391,7 +407,7 @@ class MultiChannelAnalyzer:
|
||||
|
||||
measurements.append({"timestamp": timestamp, "data": frame_data})
|
||||
|
||||
await asyncio.sleep(1.0) # 每秒采集一次
|
||||
await self._ros_node.sleep(1.0) # 每秒采集一次
|
||||
|
||||
self.is_measuring = False
|
||||
|
||||
@@ -465,6 +481,8 @@ class AutomatedDispenser:
|
||||
集成称重功能,确保分配精度和重现性。
|
||||
"""
|
||||
|
||||
_ros_node: BaseROS2DeviceNode
|
||||
|
||||
def __init__(self, dispenser_id: str = "dispenser_01"):
|
||||
"""
|
||||
初始化自动分配器
|
||||
@@ -479,6 +497,9 @@ class AutomatedDispenser:
|
||||
self.container_capacity = 1000.0 # mL
|
||||
self.precision_mode = True
|
||||
|
||||
def post_init(self, ros_node: BaseROS2DeviceNode):
|
||||
self._ros_node = ros_node
|
||||
|
||||
def move_to_position(self, x: float, y: float, z: float) -> bool:
|
||||
"""
|
||||
移动到指定位置
|
||||
@@ -517,7 +538,7 @@ class AutomatedDispenser:
|
||||
if viscosity == "high":
|
||||
dispense_time *= 2 # 高粘度液体需要更长时间
|
||||
|
||||
await asyncio.sleep(min(dispense_time, 5.0)) # 最多等待5秒
|
||||
await self._ros_node.sleep(min(dispense_time, 5.0)) # 最多等待5秒
|
||||
|
||||
self.dispensed_total += volume
|
||||
|
||||
|
||||
181
test/resources/bioyond_materials_liquidhandling_1.json
Normal file
@@ -0,0 +1,181 @@
|
||||
[
|
||||
{
|
||||
"id": "3a1c62c4-c3d2-b803-b72d-7f1153ffef3b",
|
||||
"typeName": "试剂瓶",
|
||||
"code": "0004-00050",
|
||||
"barCode": "",
|
||||
"name": "NMP",
|
||||
"quantity": 287.16699029126215,
|
||||
"lockQuantity": 285.16699029126215,
|
||||
"unit": "毫升",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198c-c2d0-efce-0939-69ca5a7dfd39",
|
||||
"whid": "3a14198c-c2cc-0290-e086-44a428fba248",
|
||||
"whName": "试剂堆栈",
|
||||
"code": "0001-0008",
|
||||
"x": 2,
|
||||
"y": 4,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-0e03-1bc1-1296-dae1905c4108",
|
||||
"typeName": "试剂瓶",
|
||||
"code": "0004-00052",
|
||||
"barCode": "",
|
||||
"name": "NMP",
|
||||
"quantity": 386.8990291262136,
|
||||
"lockQuantity": 45.89902912621359,
|
||||
"unit": "毫升",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198c-c2d0-f3e7-871a-e470d144296f",
|
||||
"whid": "3a14198c-c2cc-0290-e086-44a428fba248",
|
||||
"whName": "试剂堆栈",
|
||||
"code": "0001-0005",
|
||||
"x": 2,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-0e03-68a4-bcb3-02fc6ba72d1b",
|
||||
"typeName": "试剂瓶",
|
||||
"code": "0004-00053",
|
||||
"barCode": "",
|
||||
"name": "NMP",
|
||||
"quantity": 400.0,
|
||||
"lockQuantity": 0.0,
|
||||
"unit": "",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198c-c2d0-2070-efc8-44e245f10c6f",
|
||||
"whid": "3a14198c-c2cc-0290-e086-44a428fba248",
|
||||
"whName": "试剂堆栈",
|
||||
"code": "0001-0006",
|
||||
"x": 2,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-d850-5439-4499f20f07fe",
|
||||
"typeName": "分装板",
|
||||
"code": "0007-00185",
|
||||
"barCode": "",
|
||||
"name": "1010",
|
||||
"quantity": 1.0,
|
||||
"lockQuantity": 2.0,
|
||||
"unit": "块",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198e-6929-46fe-841e-03dd753f1e4a",
|
||||
"whid": "3a14198e-6928-121f-7ca6-88ad3ae7e6a0",
|
||||
"whName": "粉末堆栈",
|
||||
"code": "0002-0009",
|
||||
"x": 3,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": [
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-28a4-f5d0-f7e2436c575f",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-94ae-f770-27847e73ad38",
|
||||
"code": null,
|
||||
"name": "90%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 2,
|
||||
"y": 3,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-3ed6-3607-133df89baf5b",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-f2fa-66bf-94c565d852fb",
|
||||
"code": null,
|
||||
"name": "10%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 1,
|
||||
"y": 3,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-72b6-e015-be7b93cf09eb",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-81cf-7dad-2e51cab9ffd6",
|
||||
"code": null,
|
||||
"name": "90%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 2,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-81d3-ad30-48134afc9ce7",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-3fa1-cc72-fda6276ae38d",
|
||||
"code": null,
|
||||
"name": "10%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-dbdf-d966-9a8926fe1e06",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-c632-c7da-02d385b18628",
|
||||
"code": null,
|
||||
"name": "10%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 1,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdefe-d5e0-f099-b260-e3089a2d08c3",
|
||||
"detailMaterialId": "3a1cdefe-d5e0-561f-73b6-f8501f814dbb",
|
||||
"code": null,
|
||||
"name": "90%分装小瓶",
|
||||
"quantity": "1",
|
||||
"lockQuantity": "1",
|
||||
"unit": "个",
|
||||
"x": 2,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
216
test/resources/bioyond_materials_liquidhandling_2.json
Normal file
@@ -0,0 +1,216 @@
|
||||
[
|
||||
{
|
||||
"id": "3a1cde21-a4f4-4f95-6221-eaafc2ae6a8d",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00407",
|
||||
"barCode": "",
|
||||
"name": "ODA",
|
||||
"quantity": 25.0,
|
||||
"lockQuantity": 2.0,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cde21-a4f4-7887-9258-e8f8ab7c8a7a",
|
||||
"typeName": "样品板",
|
||||
"code": "0008-00160",
|
||||
"barCode": "",
|
||||
"name": "1010sample",
|
||||
"quantity": 1.0,
|
||||
"lockQuantity": 27.69187,
|
||||
"unit": "块",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198e-6929-4379-affa-9a2935c17f99",
|
||||
"whid": "3a14198e-6928-121f-7ca6-88ad3ae7e6a0",
|
||||
"whName": "粉末堆栈",
|
||||
"code": "0002-0002",
|
||||
"x": 1,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": [
|
||||
{
|
||||
"id": "3a1cde21-a4f4-0339-f2b6-8e680ad7e8c7",
|
||||
"detailMaterialId": "3a1cde21-a4f4-ab37-f7a2-ecc3bc083e7c",
|
||||
"code": null,
|
||||
"name": "MPDA",
|
||||
"quantity": "10.505",
|
||||
"lockQuantity": "-0.0174",
|
||||
"unit": "克",
|
||||
"x": 2,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cde21-a4f4-a21a-23cf-bb7857b41947",
|
||||
"detailMaterialId": "3a1cde21-a4f4-99c7-55e7-c80c7320e300",
|
||||
"code": null,
|
||||
"name": "ODA",
|
||||
"quantity": "1.795",
|
||||
"lockQuantity": "2.0093",
|
||||
"unit": "克",
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cde21-a4f4-af1b-ba0b-2874836800e9",
|
||||
"detailMaterialId": "3a1cde21-a4f4-4f95-6221-eaafc2ae6a8d",
|
||||
"code": null,
|
||||
"name": "ODA",
|
||||
"quantity": "25",
|
||||
"lockQuantity": "2",
|
||||
"unit": "克",
|
||||
"x": 1,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "3a1cde21-a4f4-99c7-55e7-c80c7320e300",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00406",
|
||||
"barCode": "",
|
||||
"name": "ODA",
|
||||
"quantity": 1.795,
|
||||
"lockQuantity": 2.00927,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cde21-a4f4-ab37-f7a2-ecc3bc083e7c",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00408",
|
||||
"barCode": "",
|
||||
"name": "MPDA",
|
||||
"quantity": 10.505,
|
||||
"lockQuantity": -0.0174,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92a-08f6-c822-732ab734154c",
|
||||
"typeName": "样品板",
|
||||
"code": "0008-00161",
|
||||
"barCode": "",
|
||||
"name": "1010sample2",
|
||||
"quantity": 1.0,
|
||||
"lockQuantity": 3.0,
|
||||
"unit": "块",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [
|
||||
{
|
||||
"id": "3a14198e-6929-31f0-8a22-0f98f72260df",
|
||||
"whid": "3a14198e-6928-121f-7ca6-88ad3ae7e6a0",
|
||||
"whName": "粉末堆栈",
|
||||
"code": "0002-0001",
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"quantity": 0
|
||||
}
|
||||
],
|
||||
"detail": [
|
||||
{
|
||||
"id": "3a1cdeff-c92b-3ace-9623-0bcdef6fa07d",
|
||||
"detailMaterialId": "3a1cdeff-c92b-d084-2a96-5d62746d9321",
|
||||
"code": null,
|
||||
"name": "BTDA1",
|
||||
"quantity": "0.362",
|
||||
"lockQuantity": "14.494",
|
||||
"unit": "克",
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92b-856e-f481-792b91b6dbde",
|
||||
"detailMaterialId": "3a1cdeff-c92b-30f2-f907-8f5e2fe0586b",
|
||||
"code": null,
|
||||
"name": "BTDA3",
|
||||
"quantity": "1.935",
|
||||
"lockQuantity": "13.067",
|
||||
"unit": "克",
|
||||
"x": 1,
|
||||
"y": 2,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92b-d144-c5e5-ab9d94e21187",
|
||||
"detailMaterialId": "3a1cdeff-c92b-519f-a70f-0bb71af537a7",
|
||||
"code": null,
|
||||
"name": "BTDA2",
|
||||
"quantity": "1.903",
|
||||
"lockQuantity": "13.035",
|
||||
"unit": "克",
|
||||
"x": 2,
|
||||
"y": 1,
|
||||
"z": 1,
|
||||
"associateId": null
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92b-30f2-f907-8f5e2fe0586b",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00411",
|
||||
"barCode": "",
|
||||
"name": "BTDA3",
|
||||
"quantity": 1.935,
|
||||
"lockQuantity": 13.067,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92b-519f-a70f-0bb71af537a7",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00410",
|
||||
"barCode": "",
|
||||
"name": "BTDA2",
|
||||
"quantity": 1.903,
|
||||
"lockQuantity": 13.035,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
},
|
||||
{
|
||||
"id": "3a1cdeff-c92b-d084-2a96-5d62746d9321",
|
||||
"typeName": "样品瓶",
|
||||
"code": "0002-00409",
|
||||
"barCode": "",
|
||||
"name": "BTDA1",
|
||||
"quantity": 0.362,
|
||||
"lockQuantity": 14.494,
|
||||
"unit": "克",
|
||||
"status": 1,
|
||||
"isUse": false,
|
||||
"locations": [],
|
||||
"detail": []
|
||||
}
|
||||
]
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"data": [
|
||||
[
|
||||
{
|
||||
"id": "3a1c67a9-aed7-b94d-9e24-bfdf10c8baa9",
|
||||
"typeName": "烧杯",
|
||||
@@ -191,8 +190,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"code": 1,
|
||||
"message": "",
|
||||
"timestamp": 1758560573511
|
||||
}
|
||||
]
|
||||
@@ -2,6 +2,7 @@ import pytest
|
||||
import json
|
||||
import os
|
||||
|
||||
from pylabrobot.resources import Resource as ResourcePLR
|
||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||
from unilabos.registry.registry import lab_registry
|
||||
|
||||
@@ -11,25 +12,55 @@ lab_registry.setup()
|
||||
|
||||
|
||||
type_mapping = {
|
||||
"烧杯": "BIOYOND_PolymerStation_1FlaskCarrier",
|
||||
"试剂瓶": "BIOYOND_PolymerStation_1BottleCarrier",
|
||||
"样品板": "BIOYOND_PolymerStation_6VialCarrier",
|
||||
"烧杯": ("BIOYOND_PolymerStation_1FlaskCarrier", "3a14196b-24f2-ca49-9081-0cab8021bf1a"),
|
||||
"试剂瓶": ("BIOYOND_PolymerStation_1BottleCarrier", ""),
|
||||
"样品板": ("BIOYOND_PolymerStation_6StockCarrier", "3a14196e-b7a0-a5da-1931-35f3000281e9"),
|
||||
"分装板": ("BIOYOND_PolymerStation_6VialCarrier", "3a14196e-5dfe-6e21-0c79-fe2036d052c4"),
|
||||
"样品瓶": ("BIOYOND_PolymerStation_Solid_Stock", "3a14196a-cf7d-8aea-48d8-b9662c7dba94"),
|
||||
"90%分装小瓶": ("BIOYOND_PolymerStation_Solid_Vial", "3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"),
|
||||
"10%分装小瓶": ("BIOYOND_PolymerStation_Liquid_Vial", "3a14196c-76be-2279-4e22-7310d69aed68"),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bioyond_materials() -> list[dict]:
|
||||
def bioyond_materials_reaction() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)["data"]
|
||||
with open("bioyond_materials_reaction.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
def test_bioyond_to_plr(bioyond_materials) -> list[dict]:
|
||||
@pytest.fixture
|
||||
def bioyond_materials_liquidhandling_1() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials_liquidhandling_1.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bioyond_materials_liquidhandling_2() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials_liquidhandling_2.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
@pytest.mark.parametrize("materials_fixture", [
|
||||
"bioyond_materials_reaction",
|
||||
"bioyond_materials_liquidhandling_1",
|
||||
])
|
||||
def test_bioyond_to_plr(materials_fixture, request) -> list[dict]:
|
||||
materials = request.getfixturevalue(materials_fixture)
|
||||
deck = BIOYOND_PolymerReactionStation_Deck("test_deck")
|
||||
print("将 BioYond 物料数据转换为 PLR 格式...")
|
||||
output = resource_bioyond_to_plr(bioyond_materials, type_mapping=type_mapping, deck=deck)
|
||||
output = resource_bioyond_to_plr(materials, type_mapping=type_mapping, deck=deck)
|
||||
print(deck.summary())
|
||||
print([resource.serialize() for resource in output])
|
||||
print([resource.serialize_all_state() for resource in output])
|
||||
json.dump(deck.serialize(), open("test.json", "w", encoding="utf-8"), indent=4)
|
||||
|
||||
115
test/resources/test_itemized_carrier.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
测试修改后的 get_child_identifier 函数
|
||||
"""
|
||||
|
||||
from unilabos.resources.itemized_carrier import ItemizedCarrier, Bottle
|
||||
from pylabrobot.resources.coordinate import Coordinate
|
||||
|
||||
def test_get_child_identifier_with_indices():
|
||||
"""测试返回x,y,z索引的 get_child_identifier 函数"""
|
||||
|
||||
# 创建一些测试瓶子
|
||||
bottle1 = Bottle("bottle1", diameter=25.0, height=50.0, max_volume=15.0)
|
||||
bottle1.location = Coordinate(10, 20, 5)
|
||||
|
||||
bottle2 = Bottle("bottle2", diameter=25.0, height=50.0, max_volume=15.0)
|
||||
bottle2.location = Coordinate(50, 20, 5)
|
||||
|
||||
bottle3 = Bottle("bottle3", diameter=25.0, height=50.0, max_volume=15.0)
|
||||
bottle3.location = Coordinate(90, 20, 5)
|
||||
|
||||
# 创建载架,指定维度
|
||||
sites = {
|
||||
"A1": bottle1,
|
||||
"A2": bottle2,
|
||||
"A3": bottle3,
|
||||
"B1": None, # 空位
|
||||
"B2": None,
|
||||
"B3": None
|
||||
}
|
||||
|
||||
carrier = ItemizedCarrier(
|
||||
name="test_carrier",
|
||||
size_x=150,
|
||||
size_y=100,
|
||||
size_z=30,
|
||||
num_items_x=3, # 3列
|
||||
num_items_y=2, # 2行
|
||||
num_items_z=1, # 1层
|
||||
sites=sites
|
||||
)
|
||||
|
||||
print("测试载架维度:")
|
||||
print(f"num_items_x: {carrier.num_items_x}")
|
||||
print(f"num_items_y: {carrier.num_items_y}")
|
||||
print(f"num_items_z: {carrier.num_items_z}")
|
||||
print()
|
||||
|
||||
# 测试获取bottle1的标识符信息 (A1 = idx:0, x:0, y:0, z:0)
|
||||
result1 = carrier.get_child_identifier(bottle1)
|
||||
print("测试bottle1 (A1):")
|
||||
print(f" identifier: {result1['identifier']}")
|
||||
print(f" idx: {result1['idx']}")
|
||||
print(f" x index: {result1['x']}")
|
||||
print(f" y index: {result1['y']}")
|
||||
print(f" z index: {result1['z']}")
|
||||
|
||||
# Assert 验证 bottle1 (A1) 的结果
|
||||
assert result1['identifier'] == 'A1', f"Expected identifier 'A1', got '{result1['identifier']}'"
|
||||
assert result1['idx'] == 0, f"Expected idx 0, got {result1['idx']}"
|
||||
assert result1['x'] == 0, f"Expected x index 0, got {result1['x']}"
|
||||
assert result1['y'] == 0, f"Expected y index 0, got {result1['y']}"
|
||||
assert result1['z'] == 0, f"Expected z index 0, got {result1['z']}"
|
||||
print(" ✓ bottle1 (A1) 测试通过")
|
||||
print()
|
||||
|
||||
# 测试获取bottle2的标识符信息 (A2 = idx:1, x:1, y:0, z:0)
|
||||
result2 = carrier.get_child_identifier(bottle2)
|
||||
print("测试bottle2 (A2):")
|
||||
print(f" identifier: {result2['identifier']}")
|
||||
print(f" idx: {result2['idx']}")
|
||||
print(f" x index: {result2['x']}")
|
||||
print(f" y index: {result2['y']}")
|
||||
print(f" z index: {result2['z']}")
|
||||
|
||||
# Assert 验证 bottle2 (A2) 的结果
|
||||
assert result2['identifier'] == 'A2', f"Expected identifier 'A2', got '{result2['identifier']}'"
|
||||
assert result2['idx'] == 1, f"Expected idx 1, got {result2['idx']}"
|
||||
assert result2['x'] == 1, f"Expected x index 1, got {result2['x']}"
|
||||
assert result2['y'] == 0, f"Expected y index 0, got {result2['y']}"
|
||||
assert result2['z'] == 0, f"Expected z index 0, got {result2['z']}"
|
||||
print(" ✓ bottle2 (A2) 测试通过")
|
||||
print()
|
||||
|
||||
# 测试获取bottle3的标识符信息 (A3 = idx:2, x:2, y:0, z:0)
|
||||
result3 = carrier.get_child_identifier(bottle3)
|
||||
print("测试bottle3 (A3):")
|
||||
print(f" identifier: {result3['identifier']}")
|
||||
print(f" idx: {result3['idx']}")
|
||||
print(f" x index: {result3['x']}")
|
||||
print(f" y index: {result3['y']}")
|
||||
print(f" z index: {result3['z']}")
|
||||
|
||||
# Assert 验证 bottle3 (A3) 的结果
|
||||
assert result3['identifier'] == 'A3', f"Expected identifier 'A3', got '{result3['identifier']}'"
|
||||
assert result3['idx'] == 2, f"Expected idx 2, got {result3['idx']}"
|
||||
assert result3['x'] == 2, f"Expected x index 2, got {result3['x']}"
|
||||
assert result3['y'] == 0, f"Expected y index 0, got {result3['y']}"
|
||||
assert result3['z'] == 0, f"Expected z index 0, got {result3['z']}"
|
||||
print(" ✓ bottle3 (A3) 测试通过")
|
||||
print()
|
||||
|
||||
# 测试错误情况:查找不存在的资源
|
||||
bottle_not_exists = Bottle("bottle_not_exists", diameter=25.0, height=50.0, max_volume=15.0)
|
||||
try:
|
||||
carrier.get_child_identifier(bottle_not_exists)
|
||||
assert False, "应该抛出 ValueError 异常"
|
||||
except ValueError as e:
|
||||
print("✓ 正确抛出了 ValueError 异常:", str(e))
|
||||
assert "is not assigned to this carrier" in str(e), "异常消息应该包含预期的文本"
|
||||
|
||||
print("\n🎉 所有测试都通过了!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_get_child_identifier_with_indices()
|
||||
68
test/resources/test_resourcetreeset.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
|
||||
from pylabrobot.resources import Resource as ResourcePLR
|
||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.registry.registry import lab_registry
|
||||
|
||||
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
||||
|
||||
lab_registry.setup()
|
||||
|
||||
|
||||
type_mapping = {
|
||||
"烧杯": ("BIOYOND_PolymerStation_1FlaskCarrier", "3a14196b-24f2-ca49-9081-0cab8021bf1a"),
|
||||
"试剂瓶": ("BIOYOND_PolymerStation_1BottleCarrier", ""),
|
||||
"样品板": ("BIOYOND_PolymerStation_6StockCarrier", "3a14196e-b7a0-a5da-1931-35f3000281e9"),
|
||||
"分装板": ("BIOYOND_PolymerStation_6VialCarrier", "3a14196e-5dfe-6e21-0c79-fe2036d052c4"),
|
||||
"样品瓶": ("BIOYOND_PolymerStation_Solid_Stock", "3a14196a-cf7d-8aea-48d8-b9662c7dba94"),
|
||||
"90%分装小瓶": ("BIOYOND_PolymerStation_Solid_Vial", "3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"),
|
||||
"10%分装小瓶": ("BIOYOND_PolymerStation_Liquid_Vial", "3a14196c-76be-2279-4e22-7310d69aed68"),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bioyond_materials_reaction() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials_reaction.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bioyond_materials_liquidhandling_1() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials_liquidhandling_1.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bioyond_materials_liquidhandling_2() -> list[dict]:
|
||||
print("加载 BioYond 物料数据...")
|
||||
print(os.getcwd())
|
||||
with open("bioyond_materials_liquidhandling_2.json", "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
print(f"加载了 {len(data)} 条物料数据")
|
||||
return data
|
||||
|
||||
|
||||
@pytest.mark.parametrize("materials_fixture", [
|
||||
"bioyond_materials_reaction",
|
||||
"bioyond_materials_liquidhandling_1",
|
||||
])
|
||||
def test_resourcetreeset_from_plr(materials_fixture, request) -> list[dict]:
|
||||
materials = request.getfixturevalue(materials_fixture)
|
||||
deck = BIOYOND_PolymerReactionStation_Deck("test_deck")
|
||||
output = resource_bioyond_to_plr(materials, type_mapping=type_mapping, deck=deck)
|
||||
print(deck.summary())
|
||||
|
||||
r = ResourceTreeSet.from_plr_resources([deck])
|
||||
print(r.dump())
|
||||
# json.dump(deck.serialize(), open("test.json", "w", encoding="utf-8"), indent=4)
|
||||
186
test/workflow/example_bio.json
Normal file
@@ -0,0 +1,186 @@
|
||||
{
|
||||
"workflow": [
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_1",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 66.0,
|
||||
"dis_vol": 66.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_2",
|
||||
"targets": "Liquid_3",
|
||||
"asp_vol": 58.0,
|
||||
"dis_vol": 96.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_4",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 85.0,
|
||||
"dis_vol": 170.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_4",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 63.333333333333336,
|
||||
"dis_vol": 170.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_2",
|
||||
"targets": "Liquid_3",
|
||||
"asp_vol": 72.0,
|
||||
"dis_vol": 150.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_4",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 85.0,
|
||||
"dis_vol": 170.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_4",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 63.333333333333336,
|
||||
"dis_vol": 170.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_2",
|
||||
"targets": "Liquid_3",
|
||||
"asp_vol": 72.0,
|
||||
"dis_vol": 150.0,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 94.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_2",
|
||||
"targets": "Liquid_3",
|
||||
"asp_vol": 20.0,
|
||||
"dis_vol": 20.0,
|
||||
"asp_flow_rate": 7.6,
|
||||
"dis_flow_rate": 7.6
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_5",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 6.0,
|
||||
"dis_vol": 12.0,
|
||||
"asp_flow_rate": 7.6,
|
||||
"dis_flow_rate": 7.6
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_5",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 10.666666666666666,
|
||||
"dis_vol": 12.0,
|
||||
"asp_flow_rate": 7.599999999999999,
|
||||
"dis_flow_rate": 7.6
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "Liquid_2",
|
||||
"targets": "Liquid_6",
|
||||
"asp_vol": 12.0,
|
||||
"dis_vol": 10.0,
|
||||
"asp_flow_rate": 7.6,
|
||||
"dis_flow_rate": 7.6
|
||||
}
|
||||
}
|
||||
],
|
||||
"reagent": {
|
||||
"Liquid_6": {
|
||||
"slot": 1,
|
||||
"well": [
|
||||
"A2"
|
||||
],
|
||||
"labware": "elution plate"
|
||||
},
|
||||
"Liquid_1": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A1",
|
||||
"A2",
|
||||
"A4"
|
||||
],
|
||||
"labware": "reagent reservoir"
|
||||
},
|
||||
"Liquid_4": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A1",
|
||||
"A2",
|
||||
"A4"
|
||||
],
|
||||
"labware": "reagent reservoir"
|
||||
},
|
||||
"Liquid_5": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A1",
|
||||
"A2",
|
||||
"A4"
|
||||
],
|
||||
"labware": "reagent reservoir"
|
||||
},
|
||||
"Liquid_2": {
|
||||
"slot": 4,
|
||||
"well": [
|
||||
"A2"
|
||||
],
|
||||
"labware": "TAG1 plate on Magnetic Module GEN2"
|
||||
},
|
||||
"Liquid_3": {
|
||||
"slot": 12,
|
||||
"well": [
|
||||
"A1"
|
||||
],
|
||||
"labware": "Opentrons Fixed Trash"
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
test/workflow/example_bio_graph.png
Normal file
|
After Width: | Height: | Size: 148 KiB |
63
test/workflow/example_prcxi.json
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"steps_info": [
|
||||
{
|
||||
"step_number": 1,
|
||||
"action": "transfer_liquid",
|
||||
"parameters": {
|
||||
"source": "sample supernatant",
|
||||
"target": "antibody-coated well",
|
||||
"volume": 100
|
||||
}
|
||||
},
|
||||
{
|
||||
"step_number": 2,
|
||||
"action": "transfer_liquid",
|
||||
"parameters": {
|
||||
"source": "washing buffer",
|
||||
"target": "antibody-coated well",
|
||||
"volume": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"step_number": 3,
|
||||
"action": "transfer_liquid",
|
||||
"parameters": {
|
||||
"source": "washing buffer",
|
||||
"target": "antibody-coated well",
|
||||
"volume": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"step_number": 4,
|
||||
"action": "transfer_liquid",
|
||||
"parameters": {
|
||||
"source": "washing buffer",
|
||||
"target": "antibody-coated well",
|
||||
"volume": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"step_number": 5,
|
||||
"action": "transfer_liquid",
|
||||
"parameters": {
|
||||
"source": "TMB substrate",
|
||||
"target": "antibody-coated well",
|
||||
"volume": 100
|
||||
}
|
||||
}
|
||||
],
|
||||
"labware_info": [
|
||||
{"reagent_name": "sample supernatant", "material_name": "96深孔板", "positions": 1},
|
||||
{"reagent_name": "washing buffer", "material_name": "储液槽", "positions": 2},
|
||||
{"reagent_name": "TMB substrate", "material_name": "储液槽", "positions": 3},
|
||||
{"reagent_name": "antibody-coated well", "material_name": "96 细胞培养皿", "positions": 4},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 5},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 6},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 7},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 8},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 9},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 10},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 11},
|
||||
{"reagent_name": "", "material_name": "300μL Tip头", "positions": 13}
|
||||
]
|
||||
}
|
||||
BIN
test/workflow/example_prcxi_graph.png
Normal file
|
After Width: | Height: | Size: 140 KiB |
BIN
test/workflow/example_prcxi_graph_20251022_1359.png
Normal file
|
After Width: | Height: | Size: 117 KiB |
94
test/workflow/merge_workflow.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
ROOT_DIR = Path(__file__).resolve().parents[2]
|
||||
if str(ROOT_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT_DIR))
|
||||
|
||||
import pytest
|
||||
|
||||
from scripts.workflow import build_protocol_graph, draw_protocol_graph, draw_protocol_graph_with_ports
|
||||
|
||||
|
||||
ROOT_DIR = Path(__file__).resolve().parents[2]
|
||||
if str(ROOT_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(ROOT_DIR))
|
||||
|
||||
|
||||
def _normalize_steps(data):
|
||||
normalized = []
|
||||
for step in data:
|
||||
action = step.get("action") or step.get("operation")
|
||||
if not action:
|
||||
continue
|
||||
raw_params = step.get("parameters") or step.get("action_args") or {}
|
||||
params = dict(raw_params)
|
||||
|
||||
if "source" in raw_params and "sources" not in raw_params:
|
||||
params["sources"] = raw_params["source"]
|
||||
if "target" in raw_params and "targets" not in raw_params:
|
||||
params["targets"] = raw_params["target"]
|
||||
|
||||
description = step.get("description") or step.get("purpose")
|
||||
step_dict = {"action": action, "parameters": params}
|
||||
if description:
|
||||
step_dict["description"] = description
|
||||
normalized.append(step_dict)
|
||||
return normalized
|
||||
|
||||
|
||||
def _normalize_labware(data):
|
||||
labware = {}
|
||||
for item in data:
|
||||
reagent_name = item.get("reagent_name")
|
||||
key = reagent_name or item.get("material_name") or item.get("name")
|
||||
if not key:
|
||||
continue
|
||||
key = str(key)
|
||||
idx = 1
|
||||
original_key = key
|
||||
while key in labware:
|
||||
idx += 1
|
||||
key = f"{original_key}_{idx}"
|
||||
|
||||
labware[key] = {
|
||||
"slot": item.get("positions") or item.get("slot"),
|
||||
"labware": item.get("material_name") or item.get("labware"),
|
||||
"well": item.get("well", []),
|
||||
"type": item.get("type", "reagent"),
|
||||
"role": item.get("role", ""),
|
||||
"name": key,
|
||||
}
|
||||
return labware
|
||||
|
||||
|
||||
@pytest.mark.parametrize("protocol_name", [
|
||||
"example_bio",
|
||||
# "bioyond_materials_liquidhandling_1",
|
||||
"example_prcxi",
|
||||
])
|
||||
def test_build_protocol_graph(protocol_name):
|
||||
data_path = Path(__file__).with_name(f"{protocol_name}.json")
|
||||
with data_path.open("r", encoding="utf-8") as fp:
|
||||
d = json.load(fp)
|
||||
|
||||
if "workflow" in d and "reagent" in d:
|
||||
protocol_steps = d["workflow"]
|
||||
labware_info = d["reagent"]
|
||||
elif "steps_info" in d and "labware_info" in d:
|
||||
protocol_steps = _normalize_steps(d["steps_info"])
|
||||
labware_info = _normalize_labware(d["labware_info"])
|
||||
else:
|
||||
raise ValueError("Unsupported protocol format")
|
||||
|
||||
graph = build_protocol_graph(
|
||||
labware_info=labware_info,
|
||||
protocol_steps=protocol_steps,
|
||||
workstation_name="PRCXi",
|
||||
)
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M")
|
||||
output_path = data_path.with_name(f"{protocol_name}_graph_{timestamp}.png")
|
||||
draw_protocol_graph_with_ports(graph, str(output_path))
|
||||
print(graph)
|
||||
@@ -0,0 +1 @@
|
||||
__version__ = "0.10.10"
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import threading
|
||||
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.utils import logger
|
||||
|
||||
|
||||
# 根据选择的 backend 启动相应的功能
|
||||
def start_backend(
|
||||
backend: str,
|
||||
devices_config: dict = {},
|
||||
resources_config: list = [],
|
||||
resources_edge_config: list = [],
|
||||
devices_config: ResourceTreeSet,
|
||||
resources_config: ResourceTreeSet,
|
||||
resources_edge_config: list[dict] = [],
|
||||
graph=None,
|
||||
controllers_config: dict = {},
|
||||
bridges=[],
|
||||
without_host: bool = False,
|
||||
is_slave: bool = False,
|
||||
visual: str = "None",
|
||||
resources_mesh_config: dict = {},
|
||||
**kwargs,
|
||||
@@ -31,7 +32,7 @@ def start_backend(
|
||||
raise ValueError(f"Unsupported backend: {backend}")
|
||||
|
||||
backend_thread = threading.Thread(
|
||||
target=main if not without_host else slave,
|
||||
target=main if not is_slave else slave,
|
||||
args=(
|
||||
devices_config,
|
||||
resources_config,
|
||||
|
||||
@@ -6,21 +6,19 @@ import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from typing import Dict, Any, List
|
||||
|
||||
import networkx as nx
|
||||
import yaml
|
||||
|
||||
|
||||
# 首先添加项目根目录到路径
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
unilabos_dir = os.path.dirname(os.path.dirname(current_dir))
|
||||
if unilabos_dir not in sys.path:
|
||||
sys.path.append(unilabos_dir)
|
||||
|
||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
||||
from unilabos.resources.graphio import modify_to_backend_format
|
||||
|
||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||
|
||||
def load_config_from_file(config_path):
|
||||
if config_path is None:
|
||||
@@ -43,7 +41,7 @@ def convert_argv_dashes_to_underscores(args: argparse.ArgumentParser):
|
||||
for i, arg in enumerate(sys.argv):
|
||||
for option_string in option_strings:
|
||||
if arg.startswith(option_string):
|
||||
new_arg = arg[:2] + arg[2 : len(option_string)].replace("-", "_") + arg[len(option_string) :]
|
||||
new_arg = arg[:2] + arg[2:len(option_string)].replace("-", "_") + arg[len(option_string):]
|
||||
sys.argv[i] = new_arg
|
||||
break
|
||||
|
||||
@@ -182,6 +180,7 @@ def main():
|
||||
working_dir = os.path.abspath(os.getcwd())
|
||||
else:
|
||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||
|
||||
if args_dict.get("working_dir"):
|
||||
working_dir = args_dict.get("working_dir", "")
|
||||
if config_path and not os.path.exists(config_path):
|
||||
@@ -213,6 +212,14 @@ def main():
|
||||
# 加载配置文件
|
||||
print_status(f"当前工作目录为 {working_dir}", "info")
|
||||
load_config_from_file(config_path)
|
||||
|
||||
# 根据配置重新设置日志级别
|
||||
from unilabos.utils.log import configure_logger, logger
|
||||
|
||||
if hasattr(BasicConfig, "log_level"):
|
||||
logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.")
|
||||
configure_logger(loglevel=BasicConfig.log_level)
|
||||
|
||||
if args_dict["addr"] == "test":
|
||||
print_status("使用测试环境地址", "info")
|
||||
HTTPConfig.remote_addr = "https://uni-lab.test.bohrium.com/api/v1"
|
||||
@@ -225,6 +232,15 @@ def main():
|
||||
else:
|
||||
HTTPConfig.remote_addr = args_dict.get("addr", "")
|
||||
|
||||
# 设置BasicConfig参数
|
||||
if args_dict.get("ak", ""):
|
||||
BasicConfig.ak = args_dict.get("ak", "")
|
||||
print_status("传入了ak参数,优先采用传入参数!", "info")
|
||||
if args_dict.get("sk", ""):
|
||||
BasicConfig.sk = args_dict.get("sk", "")
|
||||
print_status("传入了sk参数,优先采用传入参数!", "info")
|
||||
|
||||
# 使用远程资源启动
|
||||
if args_dict["use_remote_resource"]:
|
||||
print_status("使用远程资源启动", "info")
|
||||
from unilabos.app.web import http_client
|
||||
@@ -236,13 +252,6 @@ def main():
|
||||
else:
|
||||
print_status("远程资源不存在,本地将进行首次上报!", "info")
|
||||
|
||||
# 设置BasicConfig参数
|
||||
if args_dict.get("ak", ""):
|
||||
BasicConfig.ak = args_dict.get("ak", "")
|
||||
print_status("传入了ak参数,优先采用传入参数!", "info")
|
||||
if args_dict.get("sk", ""):
|
||||
BasicConfig.sk = args_dict.get("sk", "")
|
||||
print_status("传入了sk参数,优先采用传入参数!", "info")
|
||||
BasicConfig.working_dir = working_dir
|
||||
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||
@@ -257,8 +266,6 @@ def main():
|
||||
read_node_link_json,
|
||||
read_graphml,
|
||||
dict_from_graph,
|
||||
dict_to_nested_dict,
|
||||
initialize_resources,
|
||||
)
|
||||
from unilabos.app.communication import get_communication_client
|
||||
from unilabos.registry.registry import build_registry
|
||||
@@ -266,6 +273,8 @@ def main():
|
||||
from unilabos.app.web import http_client
|
||||
from unilabos.app.web import start_server
|
||||
from unilabos.app.register import register_devices_and_resources
|
||||
from unilabos.resources.graphio import modify_to_backend_format
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDict
|
||||
|
||||
# 显示启动横幅
|
||||
print_unilab_banner(args_dict)
|
||||
@@ -278,8 +287,11 @@ def main():
|
||||
if not BasicConfig.ak or not BasicConfig.sk:
|
||||
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
||||
os._exit(1)
|
||||
graph: nx.Graph
|
||||
resource_tree_set: ResourceTreeSet
|
||||
resource_links: List[Dict[str, Any]]
|
||||
request_startup_json = http_client.request_startup_json()
|
||||
if args_dict["graph"] is None:
|
||||
request_startup_json = http_client.request_startup_json()
|
||||
if not request_startup_json:
|
||||
print_status(
|
||||
"未指定设备加载文件路径,尝试从HTTP获取失败,请检查网络或者使用-g参数指定设备加载文件路径", "error"
|
||||
@@ -287,61 +299,64 @@ def main():
|
||||
os._exit(1)
|
||||
else:
|
||||
print_status("联网获取设备加载文件成功", "info")
|
||||
graph, data = read_node_link_json(request_startup_json)
|
||||
graph, resource_tree_set, resource_links = read_node_link_json(request_startup_json)
|
||||
else:
|
||||
file_path = args_dict["graph"]
|
||||
if file_path.endswith(".json"):
|
||||
graph, data = read_node_link_json(file_path)
|
||||
graph, resource_tree_set, resource_links = read_node_link_json(file_path)
|
||||
else:
|
||||
graph, data = read_graphml(file_path)
|
||||
graph, resource_tree_set, resource_links = read_graphml(file_path)
|
||||
import unilabos.resources.graphio as graph_res
|
||||
|
||||
graph_res.physical_setup_graph = graph
|
||||
resource_edge_info = modify_to_backend_format(data["links"])
|
||||
resource_edge_info = modify_to_backend_format(resource_links)
|
||||
materials = lab_registry.obtain_registry_resource_info()
|
||||
materials.extend(lab_registry.obtain_registry_device_info())
|
||||
materials = {k["id"]: k for k in materials}
|
||||
nodes = {k["id"]: k for k in data["nodes"]}
|
||||
# 从 ResourceTreeSet 中获取节点信息
|
||||
nodes = {node.res_content.id: node.res_content for node in resource_tree_set.all_nodes}
|
||||
edge_info = len(resource_edge_info)
|
||||
for ind, i in enumerate(resource_edge_info[::-1]):
|
||||
source_node = nodes[i["source"]]
|
||||
target_node = nodes[i["target"]]
|
||||
source_node: ResourceDict = nodes[i["source"]]
|
||||
target_node: ResourceDict = nodes[i["target"]]
|
||||
source_handle = i["sourceHandle"]
|
||||
target_handle = i["targetHandle"]
|
||||
source_handler_keys = [
|
||||
h["handler_key"] for h in materials[source_node["class"]]["handles"] if h["io_type"] == "source"
|
||||
h["handler_key"] for h in materials[source_node.klass]["handles"] if h["io_type"] == "source"
|
||||
]
|
||||
target_handler_keys = [
|
||||
h["handler_key"] for h in materials[target_node["class"]]["handles"] if h["io_type"] == "target"
|
||||
h["handler_key"] for h in materials[target_node.klass]["handles"] if h["io_type"] == "target"
|
||||
]
|
||||
if source_handle not in source_handler_keys:
|
||||
print_status(
|
||||
f"节点 {source_node['id']} 的source端点 {source_handle} 不存在,请检查,支持的端点 {source_handler_keys}",
|
||||
f"节点 {source_node.id} 的source端点 {source_handle} 不存在,请检查,支持的端点 {source_handler_keys}",
|
||||
"error",
|
||||
)
|
||||
resource_edge_info.pop(edge_info - ind - 1)
|
||||
continue
|
||||
if target_handle not in target_handler_keys:
|
||||
print_status(
|
||||
f"节点 {target_node['id']} 的target端点 {target_handle} 不存在,请检查,支持的端点 {target_handler_keys}",
|
||||
f"节点 {target_node.id} 的target端点 {target_handle} 不存在,请检查,支持的端点 {target_handler_keys}",
|
||||
"error",
|
||||
)
|
||||
resource_edge_info.pop(edge_info - ind - 1)
|
||||
continue
|
||||
|
||||
devices_and_resources = dict_from_graph(graph_res.physical_setup_graph)
|
||||
# args_dict["resources_config"] = initialize_resources(list(deepcopy(devices_and_resources).values()))
|
||||
args_dict["resources_config"] = list(devices_and_resources.values())
|
||||
args_dict["devices_config"] = dict_to_nested_dict(deepcopy(devices_and_resources), devices_only=False)
|
||||
args_dict["graph"] = graph_res.physical_setup_graph
|
||||
# 如果从远端获取了物料信息,则与本地物料进行同步
|
||||
if request_startup_json and "nodes" in request_startup_json:
|
||||
print_status("开始同步远端物料到本地...", "info")
|
||||
remote_tree_set = ResourceTreeSet.from_raw_list(request_startup_json["nodes"])
|
||||
resource_tree_set.merge_remote_resources(remote_tree_set)
|
||||
print_status("远端物料同步完成", "info")
|
||||
|
||||
print_status(f"{len(args_dict['resources_config'])} Resources loaded:", "info")
|
||||
for i in args_dict["resources_config"]:
|
||||
print_status(f"DeviceId: {i['id']}, Class: {i['class']}", "info")
|
||||
# 使用 ResourceTreeSet 代替 list
|
||||
args_dict["resources_config"] = resource_tree_set
|
||||
args_dict["devices_config"] = resource_tree_set
|
||||
args_dict["graph"] = graph_res.physical_setup_graph
|
||||
|
||||
if BasicConfig.upload_registry:
|
||||
# 设备注册到服务端 - 需要 ak 和 sk
|
||||
if args_dict.get("ak") and args_dict.get("sk"):
|
||||
if BasicConfig.ak and BasicConfig.sk:
|
||||
print_status("开始注册设备到服务端...", "info")
|
||||
try:
|
||||
register_devices_and_resources(lab_registry)
|
||||
@@ -351,9 +366,7 @@ def main():
|
||||
else:
|
||||
print_status("未提供 ak 和 sk,跳过设备注册", "info")
|
||||
else:
|
||||
print_status(
|
||||
"本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning"
|
||||
)
|
||||
print_status("本次启动注册表不报送云端,如果您需要联网调试,请在启动命令增加--upload_registry", "warning")
|
||||
|
||||
if args_dict["controllers"] is not None:
|
||||
args_dict["controllers_config"] = yaml.safe_load(open(args_dict["controllers"], encoding="utf-8"))
|
||||
@@ -362,34 +375,38 @@ def main():
|
||||
|
||||
args_dict["bridges"] = []
|
||||
|
||||
# 获取通信客户端(仅支持WebSocket)
|
||||
comm_client = get_communication_client()
|
||||
|
||||
if "websocket" in args_dict["app_bridges"]:
|
||||
args_dict["bridges"].append(comm_client)
|
||||
if "fastapi" in args_dict["app_bridges"]:
|
||||
args_dict["bridges"].append(http_client)
|
||||
if "websocket" in args_dict["app_bridges"]:
|
||||
# 获取通信客户端(仅支持WebSocket)
|
||||
if BasicConfig.is_host_mode:
|
||||
comm_client = get_communication_client()
|
||||
if "websocket" in args_dict["app_bridges"]:
|
||||
args_dict["bridges"].append(comm_client)
|
||||
def _exit(signum, frame):
|
||||
comm_client.stop()
|
||||
sys.exit(0)
|
||||
|
||||
def _exit(signum, frame):
|
||||
comm_client.stop()
|
||||
sys.exit(0)
|
||||
signal.signal(signal.SIGINT, _exit)
|
||||
signal.signal(signal.SIGTERM, _exit)
|
||||
comm_client.start()
|
||||
else:
|
||||
print_status("SlaveMode跳过Websocket连接")
|
||||
|
||||
signal.signal(signal.SIGINT, _exit)
|
||||
signal.signal(signal.SIGTERM, _exit)
|
||||
comm_client.start()
|
||||
args_dict["resources_mesh_config"] = {}
|
||||
args_dict["resources_edge_config"] = resource_edge_info
|
||||
# web visiualize 2D
|
||||
if args_dict["visual"] != "disable":
|
||||
enable_rviz = args_dict["visual"] == "rviz"
|
||||
devices_and_resources = dict_from_graph(graph_res.physical_setup_graph)
|
||||
if devices_and_resources is not None:
|
||||
from unilabos.device_mesh.resource_visalization import (
|
||||
ResourceVisualization,
|
||||
) # 此处开启后,logger会变更为INFO,有需要请调整
|
||||
|
||||
resource_visualization = ResourceVisualization(
|
||||
devices_and_resources, args_dict["resources_config"], enable_rviz=enable_rviz
|
||||
devices_and_resources,
|
||||
[n.res_content for n in args_dict["resources_config"].all_nodes], # type: ignore # FIXME
|
||||
enable_rviz=enable_rviz,
|
||||
)
|
||||
args_dict["resources_mesh_config"] = resource_visualization.resource_model
|
||||
start_backend(**args_dict)
|
||||
@@ -402,7 +419,23 @@ def main():
|
||||
)
|
||||
server_thread.start()
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
resource_visualization.start()
|
||||
try:
|
||||
resource_visualization.start()
|
||||
except OSError as e:
|
||||
if "AMENT_PREFIX_PATH" in str(e):
|
||||
print_status(
|
||||
f"ROS 2环境未正确设置,跳过3D可视化启动。错误详情: {e}",
|
||||
"warning"
|
||||
)
|
||||
print_status(
|
||||
"建议解决方案:\n"
|
||||
"1. 激活Conda环境: conda activate unilab\n"
|
||||
"2. 或使用 --backend simple 参数\n"
|
||||
"3. 或使用 --visual disable 参数禁用可视化",
|
||||
"info"
|
||||
)
|
||||
else:
|
||||
raise
|
||||
while True:
|
||||
time.sleep(1)
|
||||
else:
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
import argparse
|
||||
import json
|
||||
import time
|
||||
from typing import Optional, Tuple, Dict, Any
|
||||
|
||||
from unilabos.config.config import BasicConfig
|
||||
from unilabos.registry.registry import build_registry
|
||||
|
||||
from unilabos.app.main import load_config_from_file
|
||||
from unilabos.utils.log import logger
|
||||
from unilabos.utils.type_check import TypeEncoder
|
||||
|
||||
|
||||
def register_devices_and_resources(lab_registry):
|
||||
def register_devices_and_resources(lab_registry, gather_only=False) -> Optional[Tuple[Dict[str, Any], Dict[str, Any]]]:
|
||||
"""
|
||||
注册设备和资源到服务器(仅支持HTTP)
|
||||
"""
|
||||
@@ -33,6 +29,8 @@ def register_devices_and_resources(lab_registry):
|
||||
resources_to_register[resource_info["id"]] = resource_info
|
||||
logger.debug(f"[UniLab Register] 收集资源: {resource_info['id']}")
|
||||
|
||||
if gather_only:
|
||||
return devices_to_register, resources_to_register
|
||||
# 注册设备
|
||||
if devices_to_register:
|
||||
try:
|
||||
|
||||
@@ -6,9 +6,12 @@ HTTP客户端模块
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from threading import Thread
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
import requests
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.utils.log import info
|
||||
from unilabos.config.config import HTTPConfig, BasicConfig
|
||||
from unilabos.utils import logger
|
||||
@@ -46,7 +49,7 @@ class HTTPClient:
|
||||
Response: API响应对象
|
||||
"""
|
||||
response = requests.post(
|
||||
f"{self.remote_addr}/lab/material/edge",
|
||||
f"{self.remote_addr}/edge/material/edge",
|
||||
json={
|
||||
"edges": resources,
|
||||
},
|
||||
@@ -61,6 +64,91 @@ class HTTPClient:
|
||||
logger.error(f"添加物料关系失败: {response.status_code}, {response.text}")
|
||||
return response
|
||||
|
||||
def resource_tree_add(self, resources: ResourceTreeSet, mount_uuid: str, first_add: bool) -> Dict[str, str]:
|
||||
"""
|
||||
添加资源
|
||||
|
||||
Args:
|
||||
resources: 要添加的资源树集合(ResourceTreeSet)
|
||||
mount_uuid: 要挂载的资源的uuid
|
||||
first_add: 是否为首次添加资源,可以是host也可以是slave来的
|
||||
Returns:
|
||||
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
||||
"""
|
||||
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid}, indent=4))
|
||||
# 从序列化数据中提取所有节点的UUID(保存旧UUID)
|
||||
old_uuids = {n.res_content.uuid: n for n in resources.all_nodes}
|
||||
if not self.initialized or first_add:
|
||||
self.initialized = True
|
||||
info(f"首次添加资源,当前远程地址: {self.remote_addr}")
|
||||
response = requests.post(
|
||||
f"{self.remote_addr}/edge/material",
|
||||
json={"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid},
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=60,
|
||||
)
|
||||
else:
|
||||
response = requests.put(
|
||||
f"{self.remote_addr}/edge/material",
|
||||
json={"nodes": [x for xs in resources.dump() for x in xs], "mount_uuid": mount_uuid},
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
with open(os.path.join(BasicConfig.working_dir, "res_resource_tree_add.json"), "w", encoding="utf-8") as f:
|
||||
f.write(f"{response.status_code}" + "\n" + response.text)
|
||||
# 处理响应,构建UUID映射
|
||||
uuid_mapping = {}
|
||||
if response.status_code == 200:
|
||||
res = response.json()
|
||||
if "code" in res and res["code"] != 0:
|
||||
logger.error(f"添加物料失败: {response.text}")
|
||||
else:
|
||||
data = res["data"]
|
||||
for i in data:
|
||||
uuid_mapping[i["uuid"]] = i["cloud_uuid"]
|
||||
else:
|
||||
logger.error(f"添加物料失败: {response.text}")
|
||||
for u, n in old_uuids.items():
|
||||
if u in uuid_mapping:
|
||||
n.res_content.uuid = uuid_mapping[u]
|
||||
for c in n.children:
|
||||
c.res_content.parent_uuid = n.res_content.uuid
|
||||
else:
|
||||
logger.warning(f"资源UUID未更新: {u}")
|
||||
return uuid_mapping
|
||||
|
||||
def resource_tree_get(self, uuid_list: List[str], with_children: bool) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
添加资源
|
||||
|
||||
Args:
|
||||
uuid_list: List[str]
|
||||
Returns:
|
||||
Dict[str, str]: 旧UUID到新UUID的映射关系 {old_uuid: new_uuid}
|
||||
"""
|
||||
with open(os.path.join(BasicConfig.working_dir, "req_resource_tree_get.json"), "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"uuids": uuid_list, "with_children": with_children}, indent=4))
|
||||
response = requests.post(
|
||||
f"{self.remote_addr}/edge/material/query",
|
||||
json={"uuids": uuid_list, "with_children": with_children},
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=100,
|
||||
)
|
||||
with open(os.path.join(BasicConfig.working_dir, "res_resource_tree_get.json"), "w", encoding="utf-8") as f:
|
||||
f.write(f"{response.status_code}" + "\n" + response.text)
|
||||
if response.status_code == 200:
|
||||
res = response.json()
|
||||
if "code" in res and res["code"] != 0:
|
||||
logger.error(f"查询物料失败: {response.text}")
|
||||
else:
|
||||
data = res["data"]["nodes"]
|
||||
return data
|
||||
else:
|
||||
logger.error(f"查询物料失败: {response.text}")
|
||||
return []
|
||||
|
||||
def resource_add(self, resources: List[Dict[str, Any]]) -> requests.Response:
|
||||
"""
|
||||
添加资源
|
||||
@@ -105,12 +193,16 @@ class HTTPClient:
|
||||
Returns:
|
||||
Dict: 返回的资源数据
|
||||
"""
|
||||
with open(os.path.join(BasicConfig.working_dir, "req_resource_get.json"), "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"id": id, "with_children": with_children}, indent=4))
|
||||
response = requests.get(
|
||||
f"{self.remote_addr}/lab/material",
|
||||
params={"id": id, "with_children": with_children},
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=20,
|
||||
)
|
||||
with open(os.path.join(BasicConfig.working_dir, "res_resource_get.json"), "w", encoding="utf-8") as f:
|
||||
f.write(f"{response.status_code}" + "\n" + response.text)
|
||||
return response.json()
|
||||
|
||||
def resource_del(self, id: str) -> requests.Response:
|
||||
@@ -220,7 +312,7 @@ class HTTPClient:
|
||||
Response: API响应对象
|
||||
"""
|
||||
response = requests.get(
|
||||
f"{self.remote_addr}/lab/resource/graph_info/",
|
||||
f"{self.remote_addr}/edge/material/download",
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=(3, 30),
|
||||
)
|
||||
|
||||
@@ -19,9 +19,12 @@ import websockets
|
||||
import ssl as ssl_module
|
||||
from queue import Queue, Empty
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, Dict, Any, Callable, List, Set
|
||||
from typing import Optional, Dict, Any, List
|
||||
from urllib.parse import urlparse
|
||||
from enum import Enum
|
||||
|
||||
from jedi.inference.gradual.typing import TypedDict
|
||||
|
||||
from unilabos.app.model import JobAddReq
|
||||
from unilabos.ros.nodes.presets.host_node import HostNode
|
||||
from unilabos.utils.type_check import serialize_result_info
|
||||
@@ -96,6 +99,14 @@ class WebSocketMessage:
|
||||
timestamp: float = field(default_factory=time.time)
|
||||
|
||||
|
||||
class WSResourceChatData(TypedDict):
|
||||
uuid: str
|
||||
device_uuid: str
|
||||
device_id: str
|
||||
device_old_uuid: str
|
||||
device_old_id: str
|
||||
|
||||
|
||||
class DeviceActionManager:
|
||||
"""设备动作管理器 - 管理每个device_action_key的任务队列"""
|
||||
|
||||
@@ -250,29 +261,28 @@ class DeviceActionManager:
|
||||
device_key = job_info.device_action_key
|
||||
|
||||
# 如果是正在执行的任务
|
||||
if (
|
||||
device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id
|
||||
): # 后面需要和cancel_goal进行联动,而不是在这里进行处理,现在默认等待这个job结束
|
||||
# del self.active_jobs[device_key]
|
||||
# job_info.status = JobStatus.ENDED
|
||||
# # 从all_jobs中移除
|
||||
# del self.all_jobs[job_id]
|
||||
# job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
# logger.info(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}")
|
||||
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
||||
# 清理active job状态
|
||||
del self.active_jobs[device_key]
|
||||
job_info.status = JobStatus.ENDED
|
||||
# 从all_jobs中移除
|
||||
del self.all_jobs[job_id]
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.info(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}")
|
||||
|
||||
# # 启动下一个任务
|
||||
# if device_key in self.device_queues and self.device_queues[device_key]:
|
||||
# next_job = self.device_queues[device_key].pop(0)
|
||||
# # 将下一个job设置为READY状态并放入active_jobs
|
||||
# next_job.status = JobStatus.READY
|
||||
# next_job.update_timestamp()
|
||||
# next_job.set_ready_timeout(10)
|
||||
# self.active_jobs[device_key] = next_job
|
||||
# next_job_log = format_job_log(next_job.job_id, next_job.task_id,
|
||||
# next_job.device_id, next_job.action_name)
|
||||
# logger.info(f"[DeviceActionManager] Next job {next_job_log} can start after cancel")
|
||||
# return True
|
||||
pass
|
||||
# 启动下一个任务
|
||||
if device_key in self.device_queues and self.device_queues[device_key]:
|
||||
next_job = self.device_queues[device_key].pop(0)
|
||||
# 将下一个job设置为READY状态并放入active_jobs
|
||||
next_job.status = JobStatus.READY
|
||||
next_job.update_timestamp()
|
||||
next_job.set_ready_timeout(10)
|
||||
self.active_jobs[device_key] = next_job
|
||||
next_job_log = format_job_log(
|
||||
next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name
|
||||
)
|
||||
logger.info(f"[DeviceActionManager] Next job {next_job_log} can start after cancel")
|
||||
return True
|
||||
|
||||
# 如果是排队中的任务
|
||||
elif device_key in self.device_queues:
|
||||
@@ -543,7 +553,7 @@ class MessageProcessor:
|
||||
async def _process_message(self, data: Dict[str, Any]):
|
||||
"""处理收到的消息"""
|
||||
message_type = data.get("action", "")
|
||||
message_data = data.get("data", {})
|
||||
message_data = data.get("data")
|
||||
|
||||
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
||||
|
||||
@@ -556,8 +566,12 @@ class MessageProcessor:
|
||||
await self._handle_job_start(message_data)
|
||||
elif message_type == "cancel_action" or message_type == "cancel_task":
|
||||
await self._handle_cancel_action(message_data)
|
||||
elif message_type == "":
|
||||
return
|
||||
elif message_type == "add_material":
|
||||
await self._handle_resource_tree_update(message_data, "add")
|
||||
elif message_type == "update_material":
|
||||
await self._handle_resource_tree_update(message_data, "update")
|
||||
elif message_type == "remove_material":
|
||||
await self._handle_resource_tree_update(message_data, "remove")
|
||||
else:
|
||||
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
||||
|
||||
@@ -574,6 +588,7 @@ class MessageProcessor:
|
||||
async def _handle_query_action_state(self, data: Dict[str, Any]):
|
||||
"""处理query_action_state消息"""
|
||||
device_id = data.get("device_id", "")
|
||||
device_uuid = data.get("device_uuid", "")
|
||||
action_name = data.get("action_name", "")
|
||||
task_id = data.get("task_id", "")
|
||||
job_id = data.get("job_id", "")
|
||||
@@ -725,31 +740,51 @@ class MessageProcessor:
|
||||
job_info.action_name if job_info else "",
|
||||
)
|
||||
|
||||
# 按job_id取消单个job
|
||||
# 先通知HostNode取消ROS2 action(如果存在)
|
||||
host_node = HostNode.get_instance(0)
|
||||
ros_cancel_success = False
|
||||
if host_node:
|
||||
ros_cancel_success = host_node.cancel_goal(job_id)
|
||||
if ros_cancel_success:
|
||||
logger.info(f"[MessageProcessor] ROS2 cancel request sent for job {job_log}")
|
||||
else:
|
||||
logger.debug(
|
||||
f"[MessageProcessor] Job {job_log} not in ROS2 goals " "(may be queued or already finished)"
|
||||
)
|
||||
|
||||
# 按job_id取消单个job(清理状态机)
|
||||
success = self.device_manager.cancel_job(job_id)
|
||||
if success:
|
||||
# 通知HostNode取消
|
||||
host_node = HostNode.get_instance(0)
|
||||
if host_node:
|
||||
host_node.cancel_goal(job_id)
|
||||
logger.info(f"[MessageProcessor] Job {job_log} cancelled")
|
||||
logger.info(f"[MessageProcessor] Job {job_log} cancelled from queue/active list")
|
||||
|
||||
# 通知QueueProcessor有队列更新
|
||||
if self.queue_processor:
|
||||
self.queue_processor.notify_queue_update()
|
||||
else:
|
||||
logger.warning(f"[MessageProcessor] Failed to cancel job {job_log}")
|
||||
logger.warning(f"[MessageProcessor] Failed to cancel job {job_log} from queue")
|
||||
|
||||
elif task_id:
|
||||
# 按task_id取消所有相关job
|
||||
# 先通知HostNode取消所有ROS2 actions
|
||||
# 需要先获取所有相关job_ids
|
||||
jobs_to_cancel = []
|
||||
with self.device_manager.lock:
|
||||
jobs_to_cancel = [
|
||||
job_info for job_info in self.device_manager.all_jobs.values() if job_info.task_id == task_id
|
||||
]
|
||||
|
||||
host_node = HostNode.get_instance(0)
|
||||
if host_node and jobs_to_cancel:
|
||||
ros_cancelled_count = 0
|
||||
for job_info in jobs_to_cancel:
|
||||
if host_node.cancel_goal(job_info.job_id):
|
||||
ros_cancelled_count += 1
|
||||
logger.info(
|
||||
f"[MessageProcessor] Sent ROS2 cancel for " f"{ros_cancelled_count}/{len(jobs_to_cancel)} jobs"
|
||||
)
|
||||
|
||||
# 按task_id取消所有相关job(清理状态机)
|
||||
cancelled_job_ids = self.device_manager.cancel_jobs_by_task_id(task_id)
|
||||
if cancelled_job_ids:
|
||||
# 通知HostNode取消所有job
|
||||
host_node = HostNode.get_instance(0)
|
||||
if host_node:
|
||||
for cancelled_job_id in cancelled_job_ids:
|
||||
host_node.cancel_goal(cancelled_job_id)
|
||||
|
||||
logger.info(f"[MessageProcessor] Cancelled {len(cancelled_job_ids)} jobs for task_id: {task_id}")
|
||||
|
||||
# 通知QueueProcessor有队列更新
|
||||
@@ -760,6 +795,92 @@ class MessageProcessor:
|
||||
else:
|
||||
logger.warning("[MessageProcessor] Cancel request missing both task_id and job_id")
|
||||
|
||||
async def _handle_resource_tree_update(self, resource_uuid_list: List[WSResourceChatData], action: str):
|
||||
"""处理资源树更新消息(add_material/update_material/remove_material)"""
|
||||
if not resource_uuid_list:
|
||||
return
|
||||
|
||||
# 按device_id和action分组
|
||||
# device_action_groups: {(device_id, action): [uuid_list]}
|
||||
device_action_groups = {}
|
||||
|
||||
for item in resource_uuid_list:
|
||||
device_id = item["device_id"]
|
||||
if not device_id:
|
||||
device_id = "host_node"
|
||||
|
||||
# 特殊处理update action: 检查是否设备迁移
|
||||
if action == "update":
|
||||
device_old_id = item.get("device_old_id", "")
|
||||
if not device_old_id:
|
||||
device_old_id = "host_node"
|
||||
|
||||
# 设备迁移:device_id != device_old_id
|
||||
if device_id != device_old_id:
|
||||
# 给旧设备发送remove
|
||||
key_remove = (device_old_id, "remove")
|
||||
if key_remove not in device_action_groups:
|
||||
device_action_groups[key_remove] = []
|
||||
device_action_groups[key_remove].append(item["uuid"])
|
||||
|
||||
# 给新设备发送add
|
||||
key_add = (device_id, "add")
|
||||
if key_add not in device_action_groups:
|
||||
device_action_groups[key_add] = []
|
||||
device_action_groups[key_add].append(item["uuid"])
|
||||
|
||||
logger.info(
|
||||
f"[MessageProcessor] Resource migrated: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
||||
)
|
||||
else:
|
||||
# 正常update
|
||||
key = (device_id, "update")
|
||||
if key not in device_action_groups:
|
||||
device_action_groups[key] = []
|
||||
device_action_groups[key].append(item["uuid"])
|
||||
else:
|
||||
# add或remove action,直接分组
|
||||
key = (device_id, action)
|
||||
if key not in device_action_groups:
|
||||
device_action_groups[key] = []
|
||||
device_action_groups[key].append(item["uuid"])
|
||||
|
||||
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
||||
|
||||
# 为每个(device_id, action)创建独立的更新线程
|
||||
for (device_id, actual_action), items in device_action_groups.items():
|
||||
logger.info(f"设备 {device_id} 物料更新 {actual_action} 数量: {len(items)}")
|
||||
|
||||
def _notify_resource_tree(dev_id, act, item_list):
|
||||
try:
|
||||
host_node = HostNode.get_instance(timeout=5)
|
||||
if not host_node:
|
||||
logger.error(f"[MessageProcessor] HostNode instance not available for {act}")
|
||||
return
|
||||
|
||||
success = host_node.notify_resource_tree_update(dev_id, act, item_list)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
f"[MessageProcessor] Resource tree {act} completed for device {dev_id}, "
|
||||
f"items: {len(item_list)}"
|
||||
)
|
||||
else:
|
||||
logger.warning(f"[MessageProcessor] Resource tree {act} failed for device {dev_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Error in resource tree {act} for device {dev_id}: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
# 在新线程中执行通知
|
||||
thread = threading.Thread(
|
||||
target=_notify_resource_tree,
|
||||
args=(device_id, actual_action, items),
|
||||
daemon=True,
|
||||
name=f"ResourceTreeUpdate-{actual_action}-{device_id}",
|
||||
)
|
||||
thread.start()
|
||||
|
||||
async def _send_action_state_response(
|
||||
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
||||
):
|
||||
@@ -954,11 +1075,19 @@ class QueueProcessor:
|
||||
"""处理任务完成"""
|
||||
# 获取job信息用于日志
|
||||
job_info = self.device_manager.get_job_info(job_id)
|
||||
|
||||
# 如果job不存在,说明可能已被手动取消
|
||||
if not job_info:
|
||||
logger.debug(
|
||||
f"[QueueProcessor] Job {job_id[:8]} not found in manager " "(may have been cancelled manually)"
|
||||
)
|
||||
return
|
||||
|
||||
job_log = format_job_log(
|
||||
job_id,
|
||||
job_info.task_id if job_info else "",
|
||||
job_info.device_id if job_info else "",
|
||||
job_info.action_name if job_info else "",
|
||||
job_info.task_id,
|
||||
job_info.device_id,
|
||||
job_info.action_name,
|
||||
)
|
||||
|
||||
logger.info(f"[QueueProcessor] Job {job_log} completed with status: {status}")
|
||||
@@ -1008,6 +1137,8 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
|
||||
# 构建WebSocket URL
|
||||
self.websocket_url = self._build_websocket_url()
|
||||
if not self.websocket_url:
|
||||
self.websocket_url = "" # 默认空字符串,避免None
|
||||
|
||||
# 两个核心线程
|
||||
self.message_processor = MessageProcessor(self.websocket_url, self.send_queue, self.device_manager)
|
||||
|
||||
@@ -2,7 +2,7 @@ import base64
|
||||
import traceback
|
||||
import os
|
||||
import importlib.util
|
||||
from typing import Optional
|
||||
from typing import Optional, Literal
|
||||
from unilabos.utils import logger
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ class BasicConfig:
|
||||
vis_2d_enable = False
|
||||
enable_resource_load = True
|
||||
communication_protocol = "websocket"
|
||||
log_level: Literal['TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] = "DEBUG" # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
||||
|
||||
@classmethod
|
||||
def auth_secret(cls):
|
||||
|
||||
19
unilabos/device_comms/opcua_client/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# OPC UA 通用客户端
|
||||
|
||||
本模块提供了一个通用的 OPC UA 客户端实现,可以通过外部配置(CSV文件)来定义节点,并通过JSON配置来执行工作流。
|
||||
|
||||
## 特点
|
||||
|
||||
- 支持通过 CSV 文件配置 OPC UA 节点(只需提供名称、类型和数据类型,支持节点为中文名,需指定NodeLanguage)
|
||||
- 自动查找服务器中的节点,无需知道确切的节点ID
|
||||
- 提供工作流机制
|
||||
- 支持通过 JSON 配置创建工作流
|
||||
|
||||
## 使用方法
|
||||
|
||||
step1: 准备opcua_nodes.csv文件
|
||||
step2: 编写opcua_workflow_example.json,以定义工作流。指定opcua_nodes.csv
|
||||
step3: 编写工作流对应action
|
||||
step4: 编写opcua_example.yaml注册表
|
||||
step5: 编写opcua_example.json组态图。指定opcua_workflow_example.json定义工作流文件
|
||||
|
||||
9
unilabos/device_comms/opcua_client/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from unilabos.device_comms.opcua_client.node.uniopcua import Variable, Method, Object, NodeType, DataType
|
||||
|
||||
__all__ = [
|
||||
'Variable',
|
||||
'Method',
|
||||
'Object',
|
||||
'NodeType',
|
||||
'DataType',
|
||||
]
|
||||
1380
unilabos/device_comms/opcua_client/client.py
Normal file
10
unilabos/device_comms/opcua_client/node/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from unilabos.device_comms.opcua_client.node.uniopcua import Variable, Method, Object, NodeType, DataType, Base
|
||||
|
||||
__all__ = [
|
||||
'Variable',
|
||||
'Method',
|
||||
'Object',
|
||||
'NodeType',
|
||||
'DataType',
|
||||
'Base',
|
||||
]
|
||||
180
unilabos/device_comms/opcua_client/node/uniopcua.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# coding=utf-8
|
||||
from enum import Enum
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Tuple, Union, Optional, Any, List
|
||||
|
||||
from opcua import Client, Node
|
||||
from opcua.ua import NodeId, NodeClass, VariantType
|
||||
|
||||
|
||||
class DataType(Enum):
|
||||
BOOLEAN = VariantType.Boolean
|
||||
SBYTE = VariantType.SByte
|
||||
BYTE = VariantType.Byte
|
||||
INT16 = VariantType.Int16
|
||||
UINT16 = VariantType.UInt16
|
||||
INT32 = VariantType.Int32
|
||||
UINT32 = VariantType.UInt32
|
||||
INT64 = VariantType.Int64
|
||||
UINT64 = VariantType.UInt64
|
||||
FLOAT = VariantType.Float
|
||||
DOUBLE = VariantType.Double
|
||||
STRING = VariantType.String
|
||||
DATETIME = VariantType.DateTime
|
||||
BYTESTRING = VariantType.ByteString
|
||||
|
||||
|
||||
class NodeType(Enum):
|
||||
VARIABLE = NodeClass.Variable
|
||||
OBJECT = NodeClass.Object
|
||||
METHOD = NodeClass.Method
|
||||
OBJECTTYPE = NodeClass.ObjectType
|
||||
VARIABLETYPE = NodeClass.VariableType
|
||||
REFERENCETYPE = NodeClass.ReferenceType
|
||||
DATATYPE = NodeClass.DataType
|
||||
VIEW = NodeClass.View
|
||||
|
||||
|
||||
class Base(ABC):
|
||||
def __init__(self, client: Client, name: str, node_id: str, typ: NodeType, data_type: DataType):
|
||||
self._node_id: str = node_id
|
||||
self._client = client
|
||||
self._name = name
|
||||
self._type = typ
|
||||
self._data_type = data_type
|
||||
self._node: Optional[Node] = None
|
||||
|
||||
def _get_node(self) -> Node:
|
||||
if self._node is None:
|
||||
try:
|
||||
# 检查是否是NumericNodeId(ns=X;i=Y)格式
|
||||
if "NumericNodeId" in self._node_id:
|
||||
# 从字符串中提取命名空间和标识符
|
||||
import re
|
||||
match = re.search(r'ns=(\d+);i=(\d+)', self._node_id)
|
||||
if match:
|
||||
ns = int(match.group(1))
|
||||
identifier = int(match.group(2))
|
||||
node_id = NodeId(identifier, ns)
|
||||
self._node = self._client.get_node(node_id)
|
||||
else:
|
||||
raise ValueError(f"无法解析节点ID: {self._node_id}")
|
||||
else:
|
||||
# 直接使用节点ID字符串
|
||||
self._node = self._client.get_node(self._node_id)
|
||||
except Exception as e:
|
||||
print(f"获取节点失败: {self._node_id}, 错误: {e}")
|
||||
raise
|
||||
return self._node
|
||||
|
||||
@abstractmethod
|
||||
def read(self) -> Tuple[Any, bool]:
|
||||
"""读取节点值,返回(值, 是否出错)"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write(self, value: Any) -> bool:
|
||||
"""写入节点值,返回是否出错"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def type(self) -> NodeType:
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def node_id(self) -> str:
|
||||
return self._node_id
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
|
||||
class Variable(Base):
|
||||
def __init__(self, client: Client, name: str, node_id: str, data_type: DataType):
|
||||
super().__init__(client, name, node_id, NodeType.VARIABLE, data_type)
|
||||
|
||||
def read(self) -> Tuple[Any, bool]:
|
||||
try:
|
||||
value = self._get_node().get_value()
|
||||
return value, False
|
||||
except Exception as e:
|
||||
print(f"读取变量 {self._name} 失败: {e}")
|
||||
return None, True
|
||||
|
||||
def write(self, value: Any) -> bool:
|
||||
try:
|
||||
self._get_node().set_value(value)
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"写入变量 {self._name} 失败: {e}")
|
||||
return True
|
||||
|
||||
|
||||
class Method(Base):
|
||||
def __init__(self, client: Client, name: str, node_id: str, parent_node_id: str, data_type: DataType):
|
||||
super().__init__(client, name, node_id, NodeType.METHOD, data_type)
|
||||
self._parent_node_id = parent_node_id
|
||||
self._parent_node = None
|
||||
|
||||
def _get_parent_node(self) -> Node:
|
||||
if self._parent_node is None:
|
||||
try:
|
||||
# 检查是否是NumericNodeId(ns=X;i=Y)格式
|
||||
if "NumericNodeId" in self._parent_node_id:
|
||||
# 从字符串中提取命名空间和标识符
|
||||
import re
|
||||
match = re.search(r'ns=(\d+);i=(\d+)', self._parent_node_id)
|
||||
if match:
|
||||
ns = int(match.group(1))
|
||||
identifier = int(match.group(2))
|
||||
node_id = NodeId(identifier, ns)
|
||||
self._parent_node = self._client.get_node(node_id)
|
||||
else:
|
||||
raise ValueError(f"无法解析父节点ID: {self._parent_node_id}")
|
||||
else:
|
||||
# 直接使用节点ID字符串
|
||||
self._parent_node = self._client.get_node(self._parent_node_id)
|
||||
except Exception as e:
|
||||
print(f"获取父节点失败: {self._parent_node_id}, 错误: {e}")
|
||||
raise
|
||||
return self._parent_node
|
||||
|
||||
def read(self) -> Tuple[Any, bool]:
|
||||
"""方法节点不支持读取操作"""
|
||||
return None, True
|
||||
|
||||
def write(self, value: Any) -> bool:
|
||||
"""方法节点不支持写入操作"""
|
||||
return True
|
||||
|
||||
def call(self, *args) -> Tuple[Any, bool]:
|
||||
"""调用方法,返回(返回值, 是否出错)"""
|
||||
try:
|
||||
result = self._get_parent_node().call_method(self._get_node(), *args)
|
||||
return result, False
|
||||
except Exception as e:
|
||||
print(f"调用方法 {self._name} 失败: {e}")
|
||||
return None, True
|
||||
|
||||
|
||||
class Object(Base):
|
||||
def __init__(self, client: Client, name: str, node_id: str):
|
||||
super().__init__(client, name, node_id, NodeType.OBJECT, None)
|
||||
|
||||
def read(self) -> Tuple[Any, bool]:
|
||||
"""对象节点不支持直接读取操作"""
|
||||
return None, True
|
||||
|
||||
def write(self, value: Any) -> bool:
|
||||
"""对象节点不支持直接写入操作"""
|
||||
return True
|
||||
|
||||
def get_children(self) -> Tuple[List[Node], bool]:
|
||||
"""获取子节点列表,返回(子节点列表, 是否出错)"""
|
||||
try:
|
||||
children = self._get_node().get_children()
|
||||
return children, False
|
||||
except Exception as e:
|
||||
print(f"获取对象 {self._name} 的子节点失败: {e}")
|
||||
return [], True
|
||||
98
unilabos/device_comms/opcua_client/opcua_config.json
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"register_node_list_from_csv_path": {
|
||||
"path": "simple_opcua_nodes.csv"
|
||||
},
|
||||
"create_flow": [
|
||||
{
|
||||
"name": "温度控制流程",
|
||||
"action": [
|
||||
{
|
||||
"name": "温度控制动作",
|
||||
"node_function_to_create": [
|
||||
{
|
||||
"func_name": "read_temperature",
|
||||
"node_name": "Temperature",
|
||||
"mode": "read"
|
||||
},
|
||||
{
|
||||
"func_name": "read_heating_status",
|
||||
"node_name": "HeatingStatus",
|
||||
"mode": "read"
|
||||
},
|
||||
{
|
||||
"func_name": "set_heating",
|
||||
"node_name": "HeatingEnabled",
|
||||
"mode": "write",
|
||||
"value": true
|
||||
}
|
||||
],
|
||||
"create_init_function": {
|
||||
"func_name": "init_setpoint",
|
||||
"node_name": "Setpoint",
|
||||
"mode": "write",
|
||||
"value": 25.0
|
||||
},
|
||||
"create_start_function": {
|
||||
"func_name": "start_heating_control",
|
||||
"node_name": "HeatingEnabled",
|
||||
"mode": "write",
|
||||
"write_functions": [
|
||||
"set_heating"
|
||||
],
|
||||
"condition_functions": [
|
||||
"read_temperature",
|
||||
"read_heating_status"
|
||||
],
|
||||
"stop_condition_expression": "read_temperature >= 25.0 and read_heating_status"
|
||||
},
|
||||
"create_stop_function": {
|
||||
"func_name": "stop_heating",
|
||||
"node_name": "HeatingEnabled",
|
||||
"mode": "write",
|
||||
"value": false
|
||||
},
|
||||
"create_cleanup_function": null
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "报警重置流程",
|
||||
"action": [
|
||||
{
|
||||
"name": "报警重置动作",
|
||||
"node_function_to_create": [
|
||||
{
|
||||
"func_name": "reset_alarm",
|
||||
"node_name": "ResetAlarm",
|
||||
"mode": "call",
|
||||
"value": []
|
||||
}
|
||||
],
|
||||
"create_init_function": null,
|
||||
"create_start_function": {
|
||||
"func_name": "start_reset_alarm",
|
||||
"node_name": "ResetAlarm",
|
||||
"mode": "call",
|
||||
"write_functions": [],
|
||||
"condition_functions": [
|
||||
"reset_alarm"
|
||||
],
|
||||
"stop_condition_expression": "True"
|
||||
},
|
||||
"create_stop_function": null,
|
||||
"create_cleanup_function": null
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "完整控制流程",
|
||||
"action": [
|
||||
"温度控制流程",
|
||||
"报警重置流程"
|
||||
]
|
||||
}
|
||||
],
|
||||
"execute_flow": [
|
||||
"完整控制流程"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
Name,EnglishName,NodeType,DataType,NodeLanguage
|
||||
中文名,EnglishName,VARIABLE,INT32,Chinese
|
||||
|
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"register_node_list_from_csv_path": {
|
||||
"path": "opcua_nodes_example.csv"
|
||||
},
|
||||
"create_flow": [
|
||||
{
|
||||
"name": "name",
|
||||
"description": "description",
|
||||
"parameters": ["parameter1", "parameter2"],
|
||||
"action": [
|
||||
{
|
||||
"init_function": {
|
||||
"func_name": "init_grab_params",
|
||||
"write_nodes": ["parameter1", "parameter2"]
|
||||
},
|
||||
"start_function": {
|
||||
"func_name": "start_grab",
|
||||
"write_nodes": {"parameter_start": true},
|
||||
"condition_nodes": ["parameter_condition"],
|
||||
"stop_condition_expression": "parameter_condition == True"
|
||||
},
|
||||
"stop_function": {
|
||||
"func_name": "stop_grab",
|
||||
"write_nodes": {"parameter_start": false}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
311
unilabos/device_comms/opcua_client/server.py
Normal file
@@ -0,0 +1,311 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
OPC UA测试服务器
|
||||
用于测试OPC UA客户端功能,特别是temperature_control和valve_control工作流
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
from opcua import Server, ua
|
||||
import threading
|
||||
|
||||
# 设置日志
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class OpcUaTestServer:
|
||||
"""OPC UA测试服务器类"""
|
||||
|
||||
def __init__(self, endpoint="opc.tcp://localhost:4840/freeopcua/server/"):
|
||||
"""
|
||||
初始化OPC UA服务器
|
||||
|
||||
Args:
|
||||
endpoint: 服务器端点URL
|
||||
"""
|
||||
self.server = Server()
|
||||
self.server.set_endpoint(endpoint)
|
||||
|
||||
# 设置服务器名称
|
||||
self.server.set_server_name("UniLabOS OPC UA Test Server")
|
||||
|
||||
# 设置服务器命名空间
|
||||
self.idx = self.server.register_namespace("http://unilabos.com/opcua/test")
|
||||
|
||||
# 获取Objects节点
|
||||
self.objects = self.server.get_objects_node()
|
||||
|
||||
# 创建设备对象
|
||||
self.device = self.objects.add_object(self.idx, "TestDevice")
|
||||
|
||||
# 存储所有节点的字典
|
||||
self.nodes = {}
|
||||
|
||||
# 初始化标志
|
||||
self.running = False
|
||||
|
||||
# 控制标志
|
||||
self.simulation_active = True
|
||||
|
||||
def add_variable(self, name, value, data_type=None):
|
||||
"""
|
||||
添加变量节点
|
||||
|
||||
Args:
|
||||
name: 变量名称
|
||||
value: 初始值
|
||||
data_type: 数据类型 (可选)
|
||||
"""
|
||||
if data_type is None:
|
||||
var = self.device.add_variable(self.idx, name, value)
|
||||
else:
|
||||
var = self.device.add_variable(self.idx, name, value, data_type)
|
||||
|
||||
# 设置变量可写
|
||||
var.set_writable()
|
||||
|
||||
# 存储节点
|
||||
self.nodes[name] = var
|
||||
logger.info(f"添加变量节点: {name}, 初始值: {value}")
|
||||
return var
|
||||
|
||||
def add_method(self, name, callback, inputs=None, outputs=None):
|
||||
"""
|
||||
添加方法节点
|
||||
|
||||
Args:
|
||||
name: 方法名称
|
||||
callback: 回调函数
|
||||
inputs: 输入参数列表 [(name, type), ...]
|
||||
outputs: 输出参数列表 [(name, type), ...]
|
||||
"""
|
||||
if inputs is None:
|
||||
inputs = []
|
||||
if outputs is None:
|
||||
outputs = []
|
||||
|
||||
# 创建输入参数
|
||||
input_args = []
|
||||
for arg_name, arg_type in inputs:
|
||||
input_args.append(ua.Argument())
|
||||
input_args[-1].Name = arg_name
|
||||
input_args[-1].DataType = arg_type
|
||||
input_args[-1].ValueRank = -1
|
||||
|
||||
# 创建输出参数
|
||||
output_args = []
|
||||
for arg_name, arg_type in outputs:
|
||||
output_args.append(ua.Argument())
|
||||
output_args[-1].Name = arg_name
|
||||
output_args[-1].DataType = arg_type
|
||||
output_args[-1].ValueRank = -1
|
||||
|
||||
# 添加方法
|
||||
method = self.device.add_method(
|
||||
self.idx,
|
||||
name,
|
||||
callback,
|
||||
input_args,
|
||||
output_args
|
||||
)
|
||||
|
||||
# 存储节点
|
||||
self.nodes[name] = method
|
||||
logger.info(f"添加方法节点: {name}")
|
||||
return method
|
||||
|
||||
def start(self):
|
||||
"""启动服务器"""
|
||||
if not self.running:
|
||||
self.server.start()
|
||||
self.running = True
|
||||
logger.info("OPC UA服务器已启动")
|
||||
|
||||
# 启动模拟线程
|
||||
self.simulation_thread = threading.Thread(target=self.run_simulation)
|
||||
self.simulation_thread.daemon = True
|
||||
self.simulation_thread.start()
|
||||
|
||||
def stop(self):
|
||||
"""停止服务器"""
|
||||
if self.running:
|
||||
self.simulation_active = False
|
||||
if hasattr(self, 'simulation_thread'):
|
||||
self.simulation_thread.join(timeout=2)
|
||||
self.server.stop()
|
||||
self.running = False
|
||||
logger.info("OPC UA服务器已停止")
|
||||
|
||||
def get_node(self, name):
|
||||
"""获取节点"""
|
||||
if name in self.nodes:
|
||||
return self.nodes[name]
|
||||
return None
|
||||
|
||||
def update_variable(self, name, value):
|
||||
"""更新变量值"""
|
||||
if name in self.nodes:
|
||||
self.nodes[name].set_value(value)
|
||||
logger.debug(f"更新变量 {name} = {value}")
|
||||
return True
|
||||
logger.warning(f"变量 {name} 不存在")
|
||||
return False
|
||||
|
||||
def run_simulation(self):
|
||||
"""运行模拟线程"""
|
||||
logger.info("启动模拟线程")
|
||||
|
||||
temp = 20.0
|
||||
valve_position = 0.0
|
||||
flow_rate = 0.0
|
||||
|
||||
while self.simulation_active and self.running:
|
||||
try:
|
||||
# 温度控制模拟
|
||||
heating_enabled = self.get_node("HeatingEnabled").get_value()
|
||||
setpoint = self.get_node("Setpoint").get_value()
|
||||
|
||||
if heating_enabled:
|
||||
self.update_variable("HeatingStatus", True)
|
||||
if temp < setpoint:
|
||||
temp += 0.5 # 加快温度上升速度
|
||||
else:
|
||||
temp -= 0.1
|
||||
else:
|
||||
self.update_variable("HeatingStatus", False)
|
||||
if temp > 20.0:
|
||||
temp -= 0.2
|
||||
|
||||
# 更新温度
|
||||
self.update_variable("Temperature", round(temp, 2))
|
||||
|
||||
# 阀门控制模拟
|
||||
valve_control = self.get_node("ValveControl").get_value()
|
||||
valve_setpoint = self.get_node("ValveSetpoint").get_value()
|
||||
|
||||
if valve_control:
|
||||
if valve_position < valve_setpoint:
|
||||
valve_position += 5.0 # 加快阀门开启速度
|
||||
if valve_position > valve_setpoint:
|
||||
valve_position = valve_setpoint
|
||||
else:
|
||||
valve_position -= 1.0
|
||||
if valve_position < 0:
|
||||
valve_position = 0
|
||||
else:
|
||||
if valve_position > 0:
|
||||
valve_position -= 5.0
|
||||
if valve_position < 0:
|
||||
valve_position = 0
|
||||
|
||||
# 更新阀门位置
|
||||
self.update_variable("ValvePosition", round(valve_position, 2))
|
||||
|
||||
# 流量模拟 - 与阀门位置成正比
|
||||
flow_rate = valve_position * 0.2 # 简单线性关系
|
||||
self.update_variable("FlowRate", round(flow_rate, 2))
|
||||
|
||||
# 更新系统状态
|
||||
status = []
|
||||
if heating_enabled:
|
||||
status.append("Heating")
|
||||
if valve_control:
|
||||
status.append("Valve_Open")
|
||||
|
||||
if status:
|
||||
self.update_variable("SystemStatus", "_".join(status))
|
||||
else:
|
||||
self.update_variable("SystemStatus", "Idle")
|
||||
|
||||
# 每200毫秒更新一次
|
||||
time.sleep(0.2)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"模拟线程错误: {e}")
|
||||
time.sleep(1) # 出错时稍等一会再继续
|
||||
|
||||
logger.info("模拟线程已停止")
|
||||
|
||||
def reset_alarm_callback(parent, *args):
|
||||
"""重置报警的回调函数"""
|
||||
logger.info("调用了重置报警方法")
|
||||
return True
|
||||
|
||||
def start_process_callback(parent, *args):
|
||||
"""启动流程的回调函数"""
|
||||
process_id = args[0] if args else 0
|
||||
logger.info(f"启动流程 ID: {process_id}")
|
||||
return process_id
|
||||
|
||||
def stop_process_callback(parent, *args):
|
||||
"""停止流程的回调函数"""
|
||||
process_id = args[0] if args else 0
|
||||
logger.info(f"停止流程 ID: {process_id}")
|
||||
return True
|
||||
|
||||
def main():
|
||||
"""主函数"""
|
||||
try:
|
||||
# 创建服务器
|
||||
server = OpcUaTestServer()
|
||||
|
||||
# 添加变量节点 - 温度控制相关
|
||||
server.add_variable("Temperature", 20.0, ua.VariantType.Float)
|
||||
server.add_variable("Setpoint", 22.0, ua.VariantType.Float)
|
||||
server.add_variable("HeatingEnabled", False, ua.VariantType.Boolean)
|
||||
server.add_variable("HeatingStatus", False, ua.VariantType.Boolean)
|
||||
|
||||
# 添加变量节点 - 阀门控制相关
|
||||
server.add_variable("ValvePosition", 0.0, ua.VariantType.Float)
|
||||
server.add_variable("ValveSetpoint", 0.0, ua.VariantType.Float)
|
||||
server.add_variable("ValveControl", False, ua.VariantType.Boolean)
|
||||
server.add_variable("FlowRate", 0.0, ua.VariantType.Float)
|
||||
|
||||
# 其他状态变量
|
||||
server.add_variable("SystemStatus", "Idle", ua.VariantType.String)
|
||||
|
||||
# 添加方法节点
|
||||
server.add_method(
|
||||
"ResetAlarm",
|
||||
reset_alarm_callback,
|
||||
[],
|
||||
[("Result", ua.VariantType.Boolean)]
|
||||
)
|
||||
|
||||
server.add_method(
|
||||
"StartProcess",
|
||||
start_process_callback,
|
||||
[("ProcessId", ua.VariantType.Int32)],
|
||||
[("Result", ua.VariantType.Int32)]
|
||||
)
|
||||
|
||||
server.add_method(
|
||||
"StopProcess",
|
||||
stop_process_callback,
|
||||
[("ProcessId", ua.VariantType.Int32)],
|
||||
[("Result", ua.VariantType.Boolean)]
|
||||
)
|
||||
|
||||
# 启动服务器
|
||||
server.start()
|
||||
logger.info("服务器已启动,按Ctrl+C停止")
|
||||
|
||||
# 保持服务器运行
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("收到键盘中断,正在停止服务器...")
|
||||
|
||||
# 停止服务器
|
||||
server.stop()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"服务器错误: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,25 @@
|
||||
dummy2_robot:
|
||||
kinematics:
|
||||
# DH parameters for Dummy2 6-DOF robot arm
|
||||
# [theta, d, a, alpha] for each joint
|
||||
joint_1: [0.0, 0.1, 0.0, 1.5708] # Base rotation
|
||||
joint_2: [0.0, 0.0, 0.2, 0.0] # Shoulder
|
||||
joint_3: [0.0, 0.0, 0.15, 0.0] # Elbow
|
||||
joint_4: [0.0, 0.1, 0.0, 1.5708] # Wrist roll
|
||||
joint_5: [0.0, 0.0, 0.0, -1.5708] # Wrist pitch
|
||||
joint_6: [0.0, 0.06, 0.0, 0.0] # Wrist yaw
|
||||
|
||||
# Tool center point offset from last joint
|
||||
tcp_offset:
|
||||
x: 0.0
|
||||
y: 0.0
|
||||
z: 0.04
|
||||
|
||||
# Workspace limits
|
||||
workspace:
|
||||
x_min: -0.5
|
||||
x_max: 0.5
|
||||
y_min: -0.5
|
||||
y_max: 0.5
|
||||
z_min: 0.0
|
||||
z_max: 0.6
|
||||
45
unilabos/device_mesh/devices/dummy2_robot/config/dummy2.srdf
Normal file
@@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--This does not replace URDF, and is not an extension of URDF.
|
||||
This is a format for representing semantic information about the robot structure.
|
||||
A URDF file must exist for this robot as well, where the joints and the links that are referenced are defined
|
||||
-->
|
||||
<robot name="dummy2">
|
||||
<!--GROUPS: Representation of a set of joints and links. This can be useful for specifying DOF to plan for, defining arms, end effectors, etc-->
|
||||
<!--LINKS: When a link is specified, the parent joint of that link (if it exists) is automatically included-->
|
||||
<!--JOINTS: When a joint is specified, the child link of that joint (which will always exist) is automatically included-->
|
||||
<!--CHAINS: When a chain is specified, all the links along the chain (including endpoints) are included in the group. Additionally, all the joints that are parents to included links are also included. This means that joints along the chain and the parent joint of the base link are included in the group-->
|
||||
<!--SUBGROUPS: Groups can also be formed by referencing to already defined group names-->
|
||||
<group name="dummy2_arm">
|
||||
<joint name="virtual_joint"/>
|
||||
<joint name="Joint1"/>
|
||||
<joint name="Joint2"/>
|
||||
<joint name="Joint3"/>
|
||||
<joint name="Joint4"/>
|
||||
<joint name="Joint5"/>
|
||||
<joint name="Joint6"/>
|
||||
</group>
|
||||
<!--GROUP STATES: Purpose: Define a named state for a particular group, in terms of joint values. This is useful to define states like 'folded arms'-->
|
||||
<group_state name="home" group="dummy2_arm">
|
||||
<joint name="Joint1" value="0"/>
|
||||
<joint name="Joint2" value="0"/>
|
||||
<joint name="Joint3" value="0"/>
|
||||
<joint name="Joint4" value="0"/>
|
||||
<joint name="Joint5" value="0"/>
|
||||
<joint name="Joint6" value="0"/>
|
||||
</group_state>
|
||||
<!--VIRTUAL JOINT: Purpose: this element defines a virtual joint between a robot link and an external frame of reference (considered fixed with respect to the robot)-->
|
||||
<virtual_joint name="virtual_joint" type="fixed" parent_frame="world" child_link="base_link"/>
|
||||
<!--DISABLE COLLISIONS: By default it is assumed that any link of the robot could potentially come into collision with any other link in the robot. This tag disables collision checking between a specified pair of links. -->
|
||||
<disable_collisions link1="J1_1" link2="J2_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="J1_1" link2="J3_1" reason="Never"/>
|
||||
<disable_collisions link1="J1_1" link2="J4_1" reason="Never"/>
|
||||
<disable_collisions link1="J1_1" link2="base_link" reason="Adjacent"/>
|
||||
<disable_collisions link1="J2_1" link2="J3_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="J3_1" link2="J4_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="J3_1" link2="J5_1" reason="Never"/>
|
||||
<disable_collisions link1="J3_1" link2="J6_1" reason="Never"/>
|
||||
<disable_collisions link1="J3_1" link2="base_link" reason="Never"/>
|
||||
<disable_collisions link1="J4_1" link2="J5_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="J4_1" link2="J6_1" reason="Never"/>
|
||||
<disable_collisions link1="J5_1" link2="J6_1" reason="Adjacent"/>
|
||||
</robot>
|
||||
@@ -0,0 +1,70 @@
|
||||
<?xml version="1.0" ?>
|
||||
<robot name="dummy2" xmlns:xacro="http://www.ros.org/wiki/xacro" >
|
||||
|
||||
<transmission name="Joint1_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint1">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint1_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
<transmission name="Joint2_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint2">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint2_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
<transmission name="Joint3_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint3">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint3_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
<transmission name="Joint4_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint4">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint4_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
<transmission name="Joint5_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint5">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint5_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
<transmission name="Joint6_tran">
|
||||
<type>transmission_interface/SimpleTransmission</type>
|
||||
<joint name="Joint6">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
</joint>
|
||||
<actuator name="Joint6_actr">
|
||||
<hardwareInterface>hardware_interface/EffortJointInterface</hardwareInterface>
|
||||
<mechanicalReduction>1</mechanicalReduction>
|
||||
</actuator>
|
||||
</transmission>
|
||||
|
||||
</robot>
|
||||
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0"?>
|
||||
<robot xmlns:xacro="http://www.ros.org/wiki/xacro" name="dummy2">
|
||||
<xacro:arg name="initial_positions_file" default="initial_positions.yaml" />
|
||||
|
||||
<!-- Import dummy2 urdf file -->
|
||||
<xacro:include filename="$(find dummy2_description)/urdf/dummy2.xacro" />
|
||||
|
||||
<!-- Import control_xacro -->
|
||||
<xacro:include filename="dummy2.ros2_control.xacro" />
|
||||
|
||||
|
||||
<xacro:dummy2_ros2_control name="FakeSystem" initial_positions_file="$(arg initial_positions_file)"/>
|
||||
|
||||
</robot>
|
||||
@@ -0,0 +1,73 @@
|
||||
###############################################
|
||||
# Modify all parameters related to servoing here
|
||||
###############################################
|
||||
# adapt to dummy2 by Muzhxiaowen, check out the details on bilibili.com
|
||||
|
||||
use_gazebo: false # Whether the robot is started in a Gazebo simulation environment
|
||||
|
||||
## Properties of incoming commands
|
||||
command_in_type: "unitless" # "unitless"> in the range [-1:1], as if from joystick. "speed_units"> cmds are in m/s and rad/s
|
||||
scale:
|
||||
# Scale parameters are only used if command_in_type=="unitless"
|
||||
linear: 0.4 # Max linear velocity. Unit is [m/s]. Only used for Cartesian commands.
|
||||
rotational: 0.8 # Max angular velocity. Unit is [rad/s]. Only used for Cartesian commands.
|
||||
# Max joint angular/linear velocity. Only used for joint commands on joint_command_in_topic.
|
||||
joint: 0.5
|
||||
|
||||
# Optionally override Servo's internal velocity scaling when near singularity or collision (0.0 = use internal velocity scaling)
|
||||
# override_velocity_scaling_factor = 0.0 # valid range [0.0:1.0]
|
||||
|
||||
## Properties of outgoing commands
|
||||
publish_period: 0.034 # 1/Nominal publish rate [seconds]
|
||||
low_latency_mode: false # Set this to true to publish as soon as an incoming Twist command is received (publish_period is ignored)
|
||||
|
||||
# What type of topic does your robot driver expect?
|
||||
# Currently supported are std_msgs/Float64MultiArray or trajectory_msgs/JointTrajectory
|
||||
command_out_type: trajectory_msgs/JointTrajectory
|
||||
|
||||
# What to publish? Can save some bandwidth as most robots only require positions or velocities
|
||||
publish_joint_positions: true
|
||||
publish_joint_velocities: true
|
||||
publish_joint_accelerations: false
|
||||
|
||||
## Plugins for smoothing outgoing commands
|
||||
smoothing_filter_plugin_name: "online_signal_smoothing::ButterworthFilterPlugin"
|
||||
|
||||
# If is_primary_planning_scene_monitor is set to true, the Servo server's PlanningScene advertises the /get_planning_scene service,
|
||||
# which other nodes can use as a source for information about the planning environment.
|
||||
# NOTE: If a different node in your system is responsible for the "primary" planning scene instance (e.g. the MoveGroup node),
|
||||
# then is_primary_planning_scene_monitor needs to be set to false.
|
||||
is_primary_planning_scene_monitor: true
|
||||
|
||||
## MoveIt properties
|
||||
move_group_name: dummy2_arm # Often 'manipulator' or 'arm'
|
||||
planning_frame: base_link # The MoveIt planning frame. Often 'base_link' or 'world'
|
||||
|
||||
## Other frames
|
||||
ee_frame_name: J6_1 # The name of the end effector link, used to return the EE pose
|
||||
robot_link_command_frame: base_link # commands must be given in the frame of a robot link. Usually either the base or end effector
|
||||
|
||||
## Stopping behaviour
|
||||
incoming_command_timeout: 0.1 # Stop servoing if X seconds elapse without a new command
|
||||
# If 0, republish commands forever even if the robot is stationary. Otherwise, specify num. to publish.
|
||||
# Important because ROS may drop some messages and we need the robot to halt reliably.
|
||||
num_outgoing_halt_msgs_to_publish: 4
|
||||
|
||||
## Configure handling of singularities and joint limits
|
||||
lower_singularity_threshold: 170.0 # Start decelerating when the condition number hits this (close to singularity)
|
||||
hard_stop_singularity_threshold: 3000.0 # Stop when the condition number hits this
|
||||
joint_limit_margin: 0.1 # added as a buffer to joint limits [radians]. If moving quickly, make this larger.
|
||||
leaving_singularity_threshold_multiplier: 2.0 # Multiply the hard stop limit by this when leaving singularity (see https://github.com/ros-planning/moveit2/pull/620)
|
||||
|
||||
## Topic names
|
||||
cartesian_command_in_topic: ~/delta_twist_cmds # Topic for incoming Cartesian twist commands
|
||||
joint_command_in_topic: ~/delta_joint_cmds # Topic for incoming joint angle commands
|
||||
joint_topic: /joint_states
|
||||
status_topic: ~/status # Publish status to this topic
|
||||
command_out_topic: /dummy2_arm_controller/joint_trajectory # Publish outgoing commands here
|
||||
|
||||
## Collision checking for the entire robot body
|
||||
check_collisions: true # Check collisions?
|
||||
collision_check_rate: 10.0 # [Hz] Collision-checking can easily bog down a CPU if done too often.
|
||||
self_collision_proximity_threshold: 0.001 # Start decelerating when a self-collision is this far [m]
|
||||
scene_collision_proximity_threshold: 0.002 # Start decelerating when a scene collision is this far [m]
|
||||
@@ -0,0 +1,9 @@
|
||||
# Default initial positions for dummy2's ros2_control fake system
|
||||
|
||||
initial_positions:
|
||||
Joint1: 0
|
||||
Joint2: 0
|
||||
Joint3: 0
|
||||
Joint4: 0
|
||||
Joint5: 0
|
||||
Joint6: 0
|
||||
@@ -0,0 +1,40 @@
|
||||
# joint_limits.yaml allows the dynamics properties specified in the URDF to be overwritten or augmented as needed
|
||||
|
||||
# For beginners, we downscale velocity and acceleration limits.
|
||||
# You can always specify higher scaling factors (<= 1.0) in your motion requests. # Increase the values below to 1.0 to always move at maximum speed.
|
||||
default_velocity_scaling_factor: 0.1
|
||||
default_acceleration_scaling_factor: 0.1
|
||||
|
||||
# Specific joint properties can be changed with the keys [max_position, min_position, max_velocity, max_acceleration]
|
||||
# Joint limits can be turned off with [has_velocity_limits, has_acceleration_limits]
|
||||
joint_limits:
|
||||
joint_1:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
joint_2:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
joint_3:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
joint_4:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
joint_5:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
joint_6:
|
||||
has_velocity_limits: true
|
||||
max_velocity: 2.0
|
||||
has_acceleration_limits: false
|
||||
max_acceleration: 0
|
||||
@@ -0,0 +1,4 @@
|
||||
dummy2_arm:
|
||||
kinematics_solver: kdl_kinematics_plugin/KDLKinematicsPlugin
|
||||
kinematics_solver_search_resolution: 0.0050000000000000001
|
||||
kinematics_solver_timeout: 0.5
|
||||
@@ -0,0 +1,60 @@
|
||||
<?xml version="1.0"?>
|
||||
<robot xmlns:xacro="http://www.ros.org/wiki/xacro">
|
||||
<xacro:macro name="dummy2_robot_ros2_control" params="device_name mesh_path">
|
||||
<xacro:property name="initial_positions" value="${load_yaml(mesh_path + '/devices/dummy2_robot/config/initial_positions.yaml')['initial_positions']}"/>
|
||||
|
||||
<ros2_control name="${device_name}dummy2" type="system">
|
||||
<hardware>
|
||||
<!-- By default, set up controllers for simulation. This won't work on real hardware -->
|
||||
<plugin>mock_components/GenericSystem</plugin>
|
||||
</hardware>
|
||||
|
||||
<!-- <plugin>mock_components/GenericSystem</plugin> -->
|
||||
<joint name="${device_name}Joint1">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint1']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
<joint name="${device_name}Joint2">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint2']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
<joint name="${device_name}Joint3">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint3']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
<joint name="${device_name}Joint4">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint4']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
<joint name="${device_name}Joint5">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint5']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
<joint name="${device_name}Joint6">
|
||||
<command_interface name="position"/>
|
||||
<state_interface name="position">
|
||||
<param name="initial_value">${initial_positions['Joint6']}</param>
|
||||
</state_interface>
|
||||
<state_interface name="velocity"/>
|
||||
</joint>
|
||||
|
||||
|
||||
</ros2_control>
|
||||
</xacro:macro>
|
||||
|
||||
</robot>
|
||||
@@ -0,0 +1,49 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<robot xmlns:xacro="http://www.ros.org/wiki/xacro">
|
||||
<xacro:macro name="dummy2_robot_srdf" params="device_name">
|
||||
|
||||
<!--GROUPS: Representation of a set of joints and links. This can be useful for specifying DOF to plan for, defining arms, end effectors, etc-->
|
||||
<!--LINKS: When a link is specified, the parent joint of that link (if it exists) is automatically included-->
|
||||
<!--JOINTS: When a joint is specified, the child link of that joint (which will always exist) is automatically included-->
|
||||
<!--CHAINS: When a chain is specified, all the links along the chain (including endpoints) are included in the group. Additionally, all the joints that are parents to included links are also included. This means that joints along the chain and the parent joint of the base link are included in the group-->
|
||||
<!--SUBGROUPS: Groups can also be formed by referencing to already defined group names
|
||||
This is a format for representing semantic information about the robot structure.
|
||||
A URDF file must exist for this robot as well, where the joints and the links that are referenced are defined
|
||||
-->
|
||||
<group name="${device_name}dummy2_arm">
|
||||
<joint name="${device_name}virtual_joint"/>
|
||||
<joint name="${device_name}Joint1"/>
|
||||
<joint name="${device_name}Joint2"/>
|
||||
<joint name="${device_name}Joint3"/>
|
||||
<joint name="${device_name}Joint4"/>
|
||||
<joint name="${device_name}Joint5"/>
|
||||
<joint name="${device_name}Joint6"/>
|
||||
</group>
|
||||
<!--GROUP STATES: Purpose: Define a named state for a particular group, in terms of joint values. This is useful to define states like 'folded arms'-->
|
||||
<group_state name="home" group="${device_name}dummy2_arm">
|
||||
<joint name="${device_name}Joint1" value="0"/>
|
||||
<joint name="${device_name}Joint2" value="0"/>
|
||||
<joint name="${device_name}Joint3" value="0"/>
|
||||
<joint name="${device_name}Joint4" value="0"/>
|
||||
<joint name="${device_name}Joint5" value="0"/>
|
||||
<joint name="${device_name}Joint6" value="0"/>
|
||||
</group_state>
|
||||
<!--VIRTUAL JOINT: Purpose: this element defines a virtual joint between a robot link and an external frame of reference (considered fixed with respect to the robot)-->
|
||||
<virtual_joint name="${device_name}virtual_joint" type="fixed" parent_frame="world" child_link="${device_name}base_link"/>
|
||||
<!--DISABLE COLLISIONS: By default it is assumed that any link of the robot could potentially come into collision with any other link in the robot. This tag disables collision checking between a specified pair of links. -->
|
||||
<disable_collisions link1="${device_name}J1_1" link2="${device_name}J2_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="${device_name}J1_1" link2="${device_name}J3_1" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J1_1" link2="${device_name}J4_1" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J1_1" link2="${device_name}base_link" reason="Adjacent"/>
|
||||
<disable_collisions link1="${device_name}J2_1" link2="${device_name}J3_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="${device_name}J3_1" link2="${device_name}J4_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="${device_name}J3_1" link2="${device_name}J5_1" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J3_1" link2="${device_name}J6_1" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J3_1" link2="${device_name}base_link" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J4_1" link2="${device_name}J5_1" reason="Adjacent"/>
|
||||
<disable_collisions link1="${device_name}J4_1" link2="${device_name}J6_1" reason="Never"/>
|
||||
<disable_collisions link1="${device_name}J5_1" link2="${device_name}J6_1" reason="Adjacent"/>
|
||||
|
||||
</xacro:macro>
|
||||
|
||||
</robot>
|
||||