diff --git a/.commitlintrc.yml b/.commitlintrc.yml
new file mode 100644
index 0000000..dab71d5
--- /dev/null
+++ b/.commitlintrc.yml
@@ -0,0 +1,17 @@
+# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: CC0-1.0
+rules:
+ body-leading-blank: [2, always]
+ footer-leading-blank: [2, always]
+ scope-case: [2, always, lower-case]
+ subject-case: [2, always, sentence-case]
+ subject-empty: [2, never]
+ subject-full-stop: [2, never, .]
+ subject-max-length: [2, always, 72]
+ type-empty: [2, never]
+ type-enum:
+ [
+ 2,
+ always,
+ [build, chore, ci, docs, feat, fix, merge, perf, refactor, revert, test],
+ ]
diff --git a/.git_archival.txt b/.git_archival.txt
index 1c1d2e8..5a4899c 100644
--- a/.git_archival.txt
+++ b/.git_archival.txt
@@ -1,4 +1,4 @@
-Copyright DB Netz AG and contributors
+Copyright DB InfraGO AG and contributors
SPDX-License-Identifier: CC0-1.0
node: $Format:%H$
diff --git a/.gitattributes b/.gitattributes
index de62da1..e9dd5d5 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,4 +1,4 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
* text=auto
diff --git a/.github/workflows/build-test-publish.yml b/.github/workflows/build-test-publish.yml
index 7e7b3af..a7e547a 100644
--- a/.github/workflows/build-test-publish.yml
+++ b/.github/workflows/build-test-publish.yml
@@ -1,75 +1,74 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
name: Build
on:
- push:
- branches: ["*"]
- pull_request: [master]
- tags: ["v*.*.*"]
+ push:
+ branches: ["*"]
+ pull_request: [master]
+ tags: ["v*.*.*"]
jobs:
- test:
- name: Test with Python ${{matrix.python_version}} on ${{matrix.os}}
- runs-on: ${{matrix.os}}
- strategy:
- fail-fast: false
- matrix:
- os: [ubuntu-latest]
- python_version:
- - "3.10"
- include:
- - os: windows-latest
- python_version: "3.10"
- steps:
- - uses: actions/checkout@v2
- - name: Set up Python ${{matrix.python_version}}
- uses: actions/setup-python@v2
- with:
- python-version: ${{matrix.python_version}}
- - uses: actions/cache@v2
- with:
- path: ~/.cache/pip
- key: ${{runner.os}}-pip-${{hashFiles('pyproject.toml')}}
- restore-keys: |
- ${{runner.os}}-pip-
- ${{runner.os}}-
- - name: Upgrade Pip
- run: |-
- python -m pip install -U pip
- - name: Install test dependencies
- run: |-
- python -m pip install '.[test]'
- - name: Run unit tests
- run: |-
- python -m pytest --cov-report=term --cov=rosidl2capella --rootdir=.
+ test:
+ name: Test with Python ${{matrix.python_version}} on ${{matrix.os}}
+ runs-on: ${{matrix.os}}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest]
+ python_version:
+ - "3.10"
+ - "3.11"
+ - "3.12"
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python ${{matrix.python_version}}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{matrix.python_version}}
+ - uses: actions/cache@v2
+ with:
+ path: ~/.cache/pip
+ key: ${{runner.os}}-pip-${{hashFiles('pyproject.toml')}}
+ restore-keys: |
+ ${{runner.os}}-pip-
+ ${{runner.os}}-
+ - name: Upgrade Pip
+ run: |-
+ python -m pip install -U pip
+ - name: Install test dependencies
+ run: |-
+ python -m pip install '.[test]'
+ - name: Run unit tests
+ run: |-
+ python -m pytest --cov-report=term --cov=capella_ros_tools --rootdir=.
- publish:
- name: Publish artifacts
- runs-on: ubuntu-latest
- needs: test
- steps:
- - uses: actions/checkout@v2
- - name: Setup Python
- uses: actions/setup-python@v2
- with:
- python-version: "3.10"
- - name: Install dependencies
- run: |-
- python -m pip install -U pip
- python -m pip install build twine
- - name: Build packages
- run: |-
- python -m build
- - name: Verify packages
- run: |-
- python -m twine check dist/*
- - name: Upload artifacts
- uses: actions/upload-artifact@v2
- with:
- name: Artifacts
- path: 'dist/*'
- - name: Publish to PyPI (release only)
- if: startsWith(github.ref, 'refs/tags/v')
- run: python -m twine upload -u __token__ -p ${{ secrets.PYPI_TOKEN }} --non-interactive dist/*
+ publish:
+ name: Publish artifacts
+ runs-on: ubuntu-latest
+ needs: test
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.10"
+ - name: Install dependencies
+ run: |-
+ python -m pip install -U pip
+ python -m pip install build twine
+ - name: Build packages
+ run: |-
+ python -m build
+ - name: Verify packages
+ run: |-
+ python -m twine check dist/*
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: Artifacts
+ path: "dist/*"
+ - name: Publish to PyPI (release only)
+ if: startsWith(github.ref, 'refs/tags/v')
+ run: python -m twine upload -u __token__ -p ${{ secrets.PYPI_TOKEN }} --non-interactive dist/*
diff --git a/.github/workflows/commit-check.yml b/.github/workflows/commit-check.yml
new file mode 100644
index 0000000..47e1be6
--- /dev/null
+++ b/.github/workflows/commit-check.yml
@@ -0,0 +1,57 @@
+# SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: CC0-1.0
+
+name: Conventional Commits
+
+on:
+ pull_request:
+ branches: [master]
+
+jobs:
+ conventional-commits:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - name: Install commitlint
+ run: npm install -g @commitlint/cli
+ - name: Validate commit messages
+ id: conventional-commits
+ env:
+ SHA_FROM: ${{ github.event.pull_request.base.sha }}
+ SHA_TO: ${{ github.event.pull_request.head.sha }}
+ run: |
+ delim="_EOF_$(uuidgen)"
+ echo "validation-result<<$delim" >> "$GITHUB_OUTPUT"
+ r=0
+ commitlint --from "$SHA_FROM" --to "$SHA_TO" >> "$GITHUB_OUTPUT" 2>&1 || r=$?
+ echo "$delim" >> "$GITHUB_OUTPUT"
+ exit $r
+ - name: Post comment if validation failed
+ if: always() && steps.conventional-commits.outcome == 'failure'
+ uses: actions/github-script@v6
+ env:
+ TEXT: |-
+ The pull request does not conform to the conventional commit specification. Please ensure that your commit messages follow the spec: .
+ We also strongly recommend that you set up your development environment with pre-commit, as described in our [CONTRIBUTING guidelines](https://github.com/DSD-DBS/capella-ros-tools/blob/master/CONTRIBUTING.md). This will run all the important checks right before you commit your changes, and avoids lengthy CI wait time and round trips.
+
+ This is the commit validation log:
+ ```
+ ${{ steps.conventional-commits.outputs.validation-result }}
+ ```
+
+ Here are some examples of valid commit messages:
+ ```
+ build: Bump dependency versions
+ docs(user): Add model creation workflow
+ feat: Add a monitoring dashboard
+ ```
+ with:
+ script: |
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: process.env.TEXT
+ })
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 7d57a32..e3ba8be 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -1,39 +1,41 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
name: Docs
on:
- push:
- branches: ["master"]
+ workflow_dispatch:
+ push:
+ branches: [master]
jobs:
- sphinx:
- runs-on: ubuntu-latest
- permissions:
- contents: write
- steps:
- - uses: actions/checkout@v2
- with:
- fetch-depth: 0
- - uses: actions/setup-python@v2
- with:
- python-version: "3.10"
- - name: Upgrade pip
- run: |
- python -m pip install -U pip
- - name: Install dependencies
- run: |
- python -m pip install '.[docs]'
- - name: Auto-generate APIDOC sources
- run: |-
- sphinx-apidoc --output-dir docs/source/code --force .
- - name: Create docs
- run: |
- make -C docs html
- - name: Deploy
- uses: peaceiris/actions-gh-pages@v3
- with:
- force_orphan: true
- github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./docs/build/html
+ sphinx:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - uses: actions/setup-python@v4
+ with:
+ cache: pip
+ cache-dependency-path: pyproject.toml
+ python-version: "3.12"
+ - name: Upgrade pip
+ run: python -m pip install -U pip
+ - name: Install dependencies
+ run: |
+ sudo apt-get install -y pandoc
+ python -m pip install '.[docs]'
+ - name: Auto-generate APIDOC sources
+ run: make -C docs apidoc
+ - name: Create docs
+ run: make -C docs html
+ - name: Deploy
+ if: github.ref == 'refs/heads/master'
+ uses: peaceiris/actions-gh-pages@v3
+ with:
+ force_orphan: true
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./docs/build/html
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 3a3b97d..e0335ea 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -1,4 +1,4 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
name: Lint
@@ -39,4 +39,4 @@ jobs:
python -m pip install pylint
- name: Run pylint
run: |-
- pylint -dfixme rosidl2capella || exit $(($? & ~24))
+ pylint -dfixme capella_ros_tools || exit $(($? & ~24))
diff --git a/.gitignore b/.gitignore
index 975287e..6dddc2e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,4 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
# Byte-compiled / optimized / DLL files
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index caf8bff..e92677f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,103 +1,105 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
default_install_hook_types: [commit-msg, pre-commit]
default_stages: [commit, merge-commit]
repos:
- - repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
- hooks:
- - id: check-added-large-files
- - id: check-ast
- - id: check-builtin-literals
- - id: check-case-conflict
- - id: check-executables-have-shebangs
- - id: check-json
- - id: check-merge-conflict
- - id: check-shebang-scripts-are-executable
- - id: check-symlinks
- - id: check-toml
- - id: check-vcs-permalinks
- - id: check-xml
- - id: check-yaml
- - id: debug-statements
- - id: destroyed-symlinks
- - id: end-of-file-fixer
- - id: fix-byte-order-marker
- - id: trailing-whitespace
- - repo: https://github.com/psf/black-pre-commit-mirror
- rev: 23.9.1
- hooks:
- - id: black
- - repo: https://github.com/PyCQA/isort
- rev: 5.12.0
- hooks:
- - id: isort
- - repo: https://github.com/PyCQA/docformatter
- rev: v1.7.5
- hooks:
- - id: docformatter
- additional_dependencies:
- - docformatter[tomli]
- - repo: https://github.com/PyCQA/pydocstyle
- rev: 6.3.0
- hooks:
- - id: pydocstyle
- exclude: '^tests/'
- additional_dependencies:
- - pydocstyle[toml]
- - repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.5.1
- hooks:
- - id: mypy
- - repo: https://github.com/Lucas-C/pre-commit-hooks
- rev: v1.4.2
- hooks:
- - id: insert-license
- name: Insert license headers (shell-style comments)
- files: '(?:^|/)(?:.*\.(?:py|sh|toml|ya?ml)|Dockerfile|Makefile)$'
- exclude: '(?:^|/)\..+|^docs/Makefile$'
- args:
- - --detect-license-in-X-top-lines=15
- - --license-filepath
- - LICENSES/.license_header.txt
- - --comment-style
- - '#'
- - id: insert-license
- name: Insert license headers (XML-style comments)
- files: '\.(?:html|md|xml)$'
- exclude: '(?:^|/)\..+'
- args:
- - --detect-license-in-X-top-lines=15
- - --license-filepath
- - LICENSES/.license_header.txt
- - --comment-style
- - ''
- - id: insert-license
- name: Insert license headers (C-style comments)
- files: '\.(?:css|js|ts)$'
- exclude: '(?:^|/)\..+'
- args:
- - --detect-license-in-X-top-lines=15
- - --license-filepath
- - LICENSES/.license_header.txt
- - --comment-style
- - '/*| *| */'
- - id: insert-license
- name: Insert license headers (reST comments)
- files: '\.rst$'
- exclude: '(?:^|/)\..+'
- args:
- - --detect-license-in-X-top-lines=15
- - --license-filepath
- - LICENSES/.license_header.txt
- - --comment-style
- - '..| |'
- - repo: https://github.com/fsfe/reuse-tool
- rev: v2.1.0
- hooks:
- - id: reuse
- - repo: https://github.com/qoomon/git-conventional-commits
- rev: v2.6.5
- hooks:
- - id: conventional-commits
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-ast
+ - id: check-builtin-literals
+ - id: check-case-conflict
+ - id: check-executables-have-shebangs
+ - id: check-json
+ - id: check-merge-conflict
+ - id: check-shebang-scripts-are-executable
+ - id: check-symlinks
+ - id: check-toml
+ - id: check-vcs-permalinks
+ - id: check-xml
+ - id: check-yaml
+ exclude: '^tests/data/data_model/[^/]+\.ya?ml$'
+ - id: debug-statements
+ - id: destroyed-symlinks
+ - id: end-of-file-fixer
+ - id: fix-byte-order-marker
+ - id: trailing-whitespace
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 24.1.1
+ hooks:
+ - id: black
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.13.2
+ hooks:
+ - id: isort
+ - repo: https://github.com/PyCQA/docformatter
+ rev: v1.7.5
+ hooks:
+ - id: docformatter
+ additional_dependencies:
+ - docformatter[tomli]
+ - repo: https://github.com/PyCQA/pydocstyle
+ rev: 6.3.0
+ hooks:
+ - id: pydocstyle
+ exclude: "^tests/"
+ additional_dependencies:
+ - pydocstyle[toml]
+ - repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.8.0
+ hooks:
+ - id: mypy
+ - repo: https://github.com/Lucas-C/pre-commit-hooks
+ rev: v1.5.4
+ hooks:
+ - id: insert-license
+ name: Insert license headers (shell-style comments)
+ files: '(?:^|/)(?:.*\.(?:py|sh|toml|ya?ml)|Dockerfile|Makefile)$'
+ exclude: '(?:^|/)\..+|^docs/Makefile$'
+ args:
+ - --detect-license-in-X-top-lines=15
+ - --license-filepath
+ - LICENSES/.license_header.txt
+ - --comment-style
+ - "#"
+ - id: insert-license
+ name: Insert license headers (XML-style comments)
+ files: '\.(?:html|md|xml)$'
+ exclude: '(?:^|/)\..+'
+ args:
+ - --detect-license-in-X-top-lines=15
+ - --license-filepath
+ - LICENSES/.license_header.txt
+ - --comment-style
+ - ""
+ - id: insert-license
+ name: Insert license headers (C-style comments)
+ files: '\.(?:css|js|ts)$'
+ exclude: '(?:^|/)\..+'
+ args:
+ - --detect-license-in-X-top-lines=15
+ - --license-filepath
+ - LICENSES/.license_header.txt
+ - --comment-style
+ - "/*| *| */"
+ - id: insert-license
+ name: Insert license headers (reST comments)
+ files: '\.rst$'
+ exclude: '(?:^|/)\..+'
+ args:
+ - --detect-license-in-X-top-lines=15
+ - --license-filepath
+ - LICENSES/.license_header.txt
+ - --comment-style
+ - "..| |"
+ - repo: https://github.com/fsfe/reuse-tool
+ rev: v3.0.1
+ hooks:
+ - id: reuse
+ - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
+ rev: v9.11.0
+ hooks:
+ - id: commitlint
+ stages: [commit-msg]
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000..df92bd3
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,70 @@
+{
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "Export local",
+ "type": "debugpy",
+ "request": "launch",
+ "module": "capella_ros_tools",
+ "justMyCode": false,
+ "args": [
+ "export",
+ "-m",
+ "tests/data/melody_model_60",
+ "-l",
+ "la",
+ "-o",
+ "tests/data/melody_msgs"
+ ]
+ },
+ {
+ "name": "Export git",
+ "type": "debugpy",
+ "request": "launch",
+ "module": "capella_ros_tools",
+ "justMyCode": false,
+ "args": [
+ "export",
+ "-m",
+ "git+https://github.com/DSD-DBS/coffee-machine",
+ "-l",
+ "sa",
+ "-o",
+ "tests/data/coffee_msgs"
+ ]
+ },
+ {
+ "name": "Import local",
+ "type": "debugpy",
+ "request": "launch",
+ "module": "capella_ros_tools",
+ "justMyCode": false,
+ "args": [
+ "import",
+ "-i",
+ "tests/data/data_model/example_msgs",
+ "-m",
+ "tests/data/empty_project_60",
+ "-l",
+ "la",
+ "--no-deps"
+ ]
+ },
+ {
+ "name": "Import git",
+ "type": "debugpy",
+ "request": "launch",
+ "module": "capella_ros_tools",
+ "justMyCode": false,
+ "args": [
+ "import",
+ "-i",
+ "git+https://github.com/DSD-DBS/dsd-ros-msg-definitions-oss",
+ "-m",
+ "tests/data/empty_project_60",
+ "-l",
+ "la"
+ ]
+ }
+ ]
+}
diff --git a/.vscode/launch.json.license b/.vscode/launch.json.license
new file mode 100644
index 0000000..544def3
--- /dev/null
+++ b/.vscode/launch.json.license
@@ -0,0 +1,2 @@
+SPDX-FileCopyrightText: Copyright DB InfraGO AG
+SPDX-License-Identifier: CC0-1.0
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 79d4dca..7997c85 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,5 +1,5 @@
@@ -16,8 +16,8 @@ reduce the burden on our maintainers, please make sure that your code follows
our style guidelines outlined below.
-[open an issue]: https://github.com/DSD-DBS/rosidl2capella/issues
-[open a pull request]: https://github.com/DSD-DBS/rosidl2capella/pulls
+[open an issue]: https://github.com/DSD-DBS/capella-ros-tools/issues
+[open a pull request]: https://github.com/DSD-DBS/capella-ros-tools/pulls
## Developing
diff --git a/LICENSES/.license_header.txt b/LICENSES/.license_header.txt
index c3fb022..02c8c23 100644
--- a/LICENSES/.license_header.txt
+++ b/LICENSES/.license_header.txt
@@ -1,2 +1,2 @@
-Copyright DB Netz AG and contributors
+Copyright DB InfraGO AG and contributors
SPDX-License-Identifier: Apache-2.0
diff --git a/README.md b/README.md
index 6fc2661..4469f33 100644
--- a/README.md
+++ b/README.md
@@ -1,33 +1,79 @@
-# rosidl2capella
+# Capella ROS Tools
-![image](https://github.com/DSD-DBS/rosidl2capella/actions/workflows/build-test-publish.yml/badge.svg)
-![image](https://github.com/DSD-DBS/rosidl2capella/actions/workflows/lint.yml/badge.svg)
+![image](https://github.com/DSD-DBS/capella-ros-tools/actions/workflows/build-test-publish.yml/badge.svg)
+![image](https://github.com/DSD-DBS/capella-ros-tools/actions/workflows/lint.yml/badge.svg)
-Tool for parsing ROS IDL (.msg files) and converting them to Capella model.
+Tools for importing ROS .msg files into Capella `DataPackage`, `DataType` and
+`Class` objects, or exporting those objects to .msg files.
+
+![Showcase](https://i.imgur.com/hs4EUnL.gif)
# Documentation
-Read the [full documentation on Github pages](https://dsd-dbs.github.io/rosidl2capella).
+Read the [full documentation on Github pages](https://dsd-dbs.github.io/capella-ros-tools).
+
+# Examples
+
+Import local ROS .msg files to Capella model layer's root data package:
+
+```sh
+python -m capella_ros_tools \
+import \
+-i tests/data/data_model/example_msgs \
+-m tests/data/empty_project_60 \
+-l la \
+--no-deps
+```
+
+Import remote ROS .msg files to Capella model layer's root data package:
+
+```sh
+python -m capella_ros_tools \
+import \
+-i git+https://github.com/DSD-DBS/dsd-ros-msg-definitions-oss \
+-m tests/data/empty_project_60 \
+-l la
+```
+
+Export local Capella model layer's root data package as ROS .msg files:
+
+```sh
+python -m capella_ros_tools \
+export \
+-m tests/data/melody_model_60 \
+-l la \
+-o tests/data/melody_msgs
+```
+
+Export remote Capella model layer's root data package as ROS .msg files:
+
+```sh
+python -m capella_ros_tools \
+export \
+-m git+https://github.com/DSD-DBS/coffee-machine \
+-l sa \
+-o tests/data/coffee_msgs
+```
# Installation
You can install the latest released version directly from PyPI.
```sh
-pip install rosidl2capella
+pip install capella-ros-tools
```
To set up a development environment, clone the project and install it into a
virtual environment.
```sh
-git clone https://github.com/DSD-DBS/rosidl2capella
-cd rosidl2capella
+git clone https://github.com/DSD-DBS/capella-ros-tools
+cd capella-ros-tools
python -m venv .venv
source .venv/bin/activate.sh # for Linux / Mac
@@ -48,7 +94,7 @@ look at our [guidelines for contributors](CONTRIBUTING.md) for details.
This project is compliant with the
[REUSE Specification Version 3.0](https://git.fsfe.org/reuse/docs/src/commit/d173a27231a36e1a2a3af07421f5e557ae0fec46/spec.md).
-Copyright DB Netz AG, licensed under Apache 2.0 (see full text in
+Copyright DB InfraGO AG, licensed under Apache 2.0 (see full text in
[LICENSES/Apache-2.0.txt](LICENSES/Apache-2.0.txt))
Dot-files are licensed under CC0-1.0 (see full text in
diff --git a/capella_ros_tools/.license_header.txt b/capella_ros_tools/.license_header.txt
new file mode 100644
index 0000000..8c17559
--- /dev/null
+++ b/capella_ros_tools/.license_header.txt
@@ -0,0 +1,2 @@
+# SPDX-FileCopyrightText: Copyright DB Netz AG
+# SPDX-License-Identifier: Apache-2.0
diff --git a/capella_ros_tools/__init__.py b/capella_ros_tools/__init__.py
new file mode 100644
index 0000000..3d8be91
--- /dev/null
+++ b/capella_ros_tools/__init__.py
@@ -0,0 +1,14 @@
+# Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: Apache-2.0
+"""The capella_ros_tools package."""
+import logging
+from importlib import metadata
+
+try:
+ __version__ = metadata.version("capella_ros_tools")
+except metadata.PackageNotFoundError: # pragma: no cover
+ __version__ = "0.0.0+unknown"
+del metadata
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
diff --git a/capella_ros_tools/__main__.py b/capella_ros_tools/__main__.py
new file mode 100644
index 0000000..0cda4d0
--- /dev/null
+++ b/capella_ros_tools/__main__.py
@@ -0,0 +1,155 @@
+# Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: Apache-2.0
+"""Main entry point into Capella ROS Tools."""
+
+import io
+import pathlib
+import uuid
+
+import capellambse
+import click
+from capellambse import cli_helpers, decl
+
+import capella_ros_tools
+from capella_ros_tools import exporter, importer
+
+from . import logger
+
+
+@click.group()
+@click.version_option(
+ version=capella_ros_tools.__version__,
+ prog_name="capella-ros-tools",
+ message="%(prog)s %(version)s",
+)
+def cli():
+ """Console script for Capella ROS Tools."""
+
+
+@cli.command("import")
+@click.option(
+ "-i",
+ "--input",
+ type=str,
+ required=True,
+ help="Path to the ROS message package.",
+)
+@click.option(
+ "-m",
+ "--model",
+ type=cli_helpers.ModelCLI(),
+ required=True,
+ help="Path to the Capella model.",
+)
+@click.option(
+ "-l",
+ "--layer",
+ type=click.Choice(["oa", "la", "sa", "pa"], case_sensitive=False),
+ help="The layer to import the messages to.",
+)
+@click.option(
+ "-r",
+ "--root",
+ type=click.UUID,
+ help="The UUID of the root package to import the messages to.",
+)
+@click.option(
+ "-t",
+ "--types",
+ type=click.UUID,
+ help="The UUID of the types package to import the created data types to.",
+)
+@click.option(
+ "--no-deps",
+ "no_deps",
+ is_flag=True,
+ help="Don’t install message dependencies.",
+)
+@click.option(
+ "-o",
+ "--output",
+ type=click.Path(path_type=pathlib.Path, dir_okay=False),
+ help="Output file path for decl YAML.",
+)
+def import_msgs(
+ input: str,
+ model: capellambse.MelodyModel,
+ layer: str,
+ root: uuid.UUID,
+ types: uuid.UUID,
+ no_deps: bool,
+ output: pathlib.Path,
+) -> None:
+ """Import ROS messages into a Capella data package."""
+
+ if root:
+ root_uuid = str(root)
+ elif layer:
+ root_uuid = getattr(model, layer).data_package.uuid
+ else:
+ raise click.UsageError("Either --root or --layer must be provided")
+
+ if types:
+ params = {"types_uuid": str(types)}
+ else:
+ params = {"types_parent_uuid": model.sa.data_package.uuid}
+
+ parsed = importer.Importer(input, no_deps)
+ logger.info("Loaded %d packages", len(parsed.messages.packages))
+
+ yml = parsed.to_yaml(root_uuid, **params)
+ if output:
+ logger.info("Writing to file %s", output)
+ output.write_text(yml, encoding="utf-8")
+ else:
+ logger.info("Writing to model %s", model.name)
+ decl.apply(model, io.StringIO(yml))
+ model.save()
+
+
+@cli.command("export")
+@click.option(
+ "-m",
+ "--model",
+ type=cli_helpers.ModelCLI(),
+ required=True,
+ help="Path to the Capella model.",
+)
+@click.option(
+ "-l",
+ "--layer",
+ type=click.Choice(["oa", "la", "sa", "pa"], case_sensitive=False),
+ help="The layer to export the model objects from.",
+)
+@click.option(
+ "-r",
+ "--root",
+ type=click.UUID,
+ help="The UUID of the root package to import the messages from.",
+)
+@click.option(
+ "-o",
+ "--output",
+ type=click.Path(path_type=pathlib.Path, file_okay=False),
+ default=pathlib.Path.cwd() / "data-package",
+ help="Output directory for the .msg files.",
+)
+def export_capella(
+ model: capellambse.MelodyModel,
+ layer: str,
+ root: uuid.UUID,
+ output: pathlib.Path,
+):
+ """Export Capella data package to ROS messages."""
+ if root:
+ current_pkg = model.search("DataPkg").by_uuid(str(root))
+ elif layer:
+ current_pkg = getattr(model, layer).data_package
+ else:
+ raise click.UsageError("Either --root or --layer must be provided")
+
+ exporter.export(current_pkg, output) # type: ignore
+
+
+if __name__ == "__main__":
+ cli()
diff --git a/capella_ros_tools/data_model.py b/capella_ros_tools/data_model.py
new file mode 100644
index 0000000..4777e4a
--- /dev/null
+++ b/capella_ros_tools/data_model.py
@@ -0,0 +1,401 @@
+# Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: Apache-2.0
+"""Tool for parsing ROS messages."""
+
+from __future__ import annotations
+
+import os
+import pathlib
+import re
+import typing as t
+from dataclasses import dataclass
+
+from capellambse.filehandler import abc
+
+LICENSE_HEADER = (
+ pathlib.Path(__file__)
+ .parent.joinpath(".license_header.txt")
+ .read_text(encoding="utf-8")
+)
+PACKAGE_NAME_MESSAGE_TYPE_SEPARATOR = "/"
+COMMENT_DELIMITER = "#"
+CONSTANT_SEPARATOR = "="
+UPPER_BOUND_TOKEN = "<="
+
+VALID_MESSAGE_NAME_PATTERN = "[A-Z][A-Za-z0-9]*"
+VALID_CONSTANT_NAME_PATTERN = "[A-Z](?:[A-Z0-9_]*[A-Z0-9])?"
+VALID_REF_COMMENT_PATTERN = re.compile(
+ r".*cf\.\s*"
+ rf"({VALID_MESSAGE_NAME_PATTERN})"
+ r"(?:,\s*"
+ rf"({VALID_CONSTANT_NAME_PATTERN}))?"
+ r"\s*.*"
+)
+
+HTML_TAG_PATTERN = re.compile("<.*?>")
+
+
+def _clean_html(raw_html: str):
+ return re.sub(HTML_TAG_PATTERN, "", raw_html)
+
+
+def _clean_comment(comment: str) -> str:
+ return comment.strip(COMMENT_DELIMITER).strip()
+
+
+class Range(t.NamedTuple):
+ """Define range of values."""
+
+ min: str
+ max: str
+
+
+@dataclass
+class TypeDef:
+ """Type definition."""
+
+ name: str
+ card: Range
+ package: str | None = None
+
+ def __str__(self) -> str:
+ """Return string representation of the type."""
+ out = self.name
+ if self.card.min == self.card.max:
+ out += f"[{self.card.max}]" if self.card.max != "1" else ""
+ else:
+ out += (
+ f"[{UPPER_BOUND_TOKEN}{self.card.max}]"
+ if self.card.max != "*"
+ else "[]"
+ )
+ if self.package:
+ out = f"{self.package}{PACKAGE_NAME_MESSAGE_TYPE_SEPARATOR}{out}"
+ return out
+
+ @classmethod
+ def from_string(cls, type_str: str) -> TypeDef:
+ """Create a type definition from a string."""
+ name = type_str
+ card = Range("1", "1")
+ if type_str.endswith("]"):
+ name, _, max_card = type_str.partition("[")
+ max_card = max_card.removesuffix("]")
+ if max_card.startswith(UPPER_BOUND_TOKEN):
+ max_card = max_card.removeprefix(UPPER_BOUND_TOKEN)
+ card = Range("0", max_card)
+ else:
+ card = (
+ Range(max_card, max_card) if max_card else Range("0", "*")
+ )
+
+ if len(temp := name.split(PACKAGE_NAME_MESSAGE_TYPE_SEPARATOR)) == 2:
+ package, name = temp
+ else:
+ package = None
+
+ return cls(name, card, package)
+
+
+@dataclass
+class FieldDef:
+ """Definition of a field in a ROS message."""
+
+ type: TypeDef
+ name: str
+ description: str
+
+ def __str__(self) -> str:
+ """Return string representation of the field."""
+ out = f"{self.type} {self.name}"
+ if self.description:
+ out += f" # {_clean_html(self.description)}"
+ return out
+
+
+@dataclass
+class ConstantDef:
+ """Definition of a constant in a ROS message."""
+
+ type: TypeDef
+ name: str
+ value: str
+ description: str
+
+ def __str__(self) -> str:
+ """Return string representation of the constant."""
+ out = f"{self.type} {self.name} = {self.value}"
+ if self.description:
+ out += f" # {_clean_html(self.description)}"
+ return out
+
+
+@dataclass
+class EnumDef:
+ """Definition of an enum in a ROS message."""
+
+ name: str
+ literals: list[ConstantDef]
+ description: str
+
+ def __str__(self) -> str:
+ """Return string representation of the enum."""
+ out = f"# {_clean_html(self.description)}" if self.description else ""
+ for literal in self.literals:
+ out += f"\n{literal}"
+ return out
+
+ def __eq__(self, other: object) -> bool:
+ """Return whether the enum is equal to another."""
+ if not isinstance(other, EnumDef):
+ return NotImplemented
+ return (
+ other.name == self.name
+ and all(literal in self.literals for literal in other.literals)
+ and other.description == self.description
+ )
+
+
+def _process_block_comment(line: str) -> str:
+ if comment := _clean_comment(line):
+ return f"{comment} "
+ return "
"
+
+
+def _extract_file_level_comments(
+ msg_string: str,
+) -> t.Tuple[str, list[str]]:
+ """Extract comments at the beginning of the message."""
+ lines = msg_string.lstrip("\n").splitlines()
+ lines.append("")
+ file_level_comments = ""
+ i = 0
+ for i, line in enumerate(lines):
+ line = line.strip()
+ if not line.startswith(COMMENT_DELIMITER):
+ if line:
+ return "", lines
+ else:
+ break
+ file_level_comments += _process_block_comment(line)
+
+ file_content = lines[i:]
+ return file_level_comments, file_content
+
+
+@dataclass
+class MessageDef:
+ """Definition of a ROS message."""
+
+ name: str
+ fields: list[FieldDef]
+ enums: list[EnumDef]
+ description: str
+
+ def __str__(self) -> str:
+ """Return string representation of the message."""
+ if self.description:
+ out = f"# {_clean_html(self.description)}\n\n"
+ else:
+ out = ""
+ for enum in self.enums:
+ out += f"{enum}\n\n"
+ for field in self.fields:
+ out += f"{field}\n"
+ return out
+
+ def __eq__(self, other: object) -> bool:
+ """Return whether the message is equal to another."""
+ if not isinstance(other, MessageDef):
+ return NotImplemented
+ return (
+ other.name == self.name
+ and all(field in self.fields for field in other.fields)
+ and all(enum in self.enums for enum in other.enums)
+ and other.description == self.description
+ )
+
+ @classmethod
+ def from_file(
+ cls, file: abc.AbstractFilePath | pathlib.Path
+ ) -> MessageDef:
+ """Create message definition from a .msg file."""
+ msg_name = file.stem
+ msg_string = file.read_text()
+ msg_string = msg_string.removeprefix(LICENSE_HEADER)
+ return cls.from_string(msg_name, msg_string)
+
+ @classmethod
+ def from_string(cls, msg_name: str, msg_string: str) -> MessageDef:
+ """Create message definition from a string."""
+ msg_comments, lines = _extract_file_level_comments(msg_string)
+ msg = cls(msg_name, [], [], msg_comments)
+ last_element: t.Any = None
+ block_comments = ""
+ index = -1
+ values: list[str] = []
+
+ for line in lines:
+ line = line.rstrip()
+ if not line:
+ # new block
+ if index != 0:
+ block_comments = ""
+ continue
+
+ last_index = index
+ index = line.find(COMMENT_DELIMITER)
+ if index == -1:
+ # no comment
+ comment = ""
+ elif index == 0:
+ # block comment
+ if last_index > 0:
+ # block comments were used
+ block_comments = ""
+ block_comments += _process_block_comment(line)
+ continue
+ else:
+ # inline comment
+ comment = _clean_comment(line[index:])
+ line = line[:index].rstrip()
+ if not line:
+ # indented comment
+ last_element.description += (
+ f"{comment} " if comment else "
"
+ )
+ continue
+ comment = f"{comment} "
+
+ type_string, _, rest = line.partition(" ")
+ name, _, value = rest.partition(CONSTANT_SEPARATOR)
+ name = name.strip()
+ value = value.strip()
+ if value:
+ # constant
+ if (
+ value in values
+ or not msg.enums
+ or not isinstance(last_element, ConstantDef)
+ ):
+ # new enum
+ enum_def = EnumDef("", [], block_comments)
+ block_comments = ""
+ msg.enums.append(enum_def)
+ values = []
+ constant_def = ConstantDef(
+ TypeDef.from_string(type_string),
+ name,
+ value,
+ block_comments + comment,
+ )
+ msg.enums[-1].literals.append(constant_def)
+ values.append(value)
+ last_element = constant_def
+ else:
+ # field
+ field_def = FieldDef(
+ TypeDef.from_string(type_string),
+ name,
+ block_comments + comment,
+ )
+ msg.fields.append(field_def)
+ last_element = field_def
+
+ if not msg.fields and len(msg.enums) == 1:
+ enum = msg.enums[0]
+ _process_enums(enum)
+ enum.name = msg_name
+ return msg
+
+ for field in msg.fields:
+ _process_comment(field)
+
+ for enum in msg.enums:
+
+ common_prefix = _process_enums(enum)
+
+ if common_prefix:
+ enum.name = _get_enum_identifier(common_prefix)
+ else:
+ enum.name = msg_name if not msg.fields else msg_name + "Type"
+
+ matched_field = None
+ for field in msg.fields:
+ if field.type.name == enum.literals[0].type.name:
+ matched_field = matched_field or field
+ if field.name.lower() == enum.name.lower():
+ field.type.name = enum.name
+ field.type.package = msg_name
+ break
+ else:
+ if matched_field:
+ enum.name = msg_name + matched_field.name.capitalize()
+ matched_field.type.name = enum.name
+ matched_field.type.package = msg_name
+
+ return msg
+
+
+def _process_enums(enum: EnumDef) -> str:
+ common_prefix = os.path.commonprefix(
+ [literal.name for literal in enum.literals]
+ )
+ if not common_prefix.endswith("_"):
+ if index := common_prefix.rfind("_"):
+ common_prefix = common_prefix[: index + 1]
+ else:
+ common_prefix = ""
+
+ for literal in enum.literals:
+ literal.name = literal.name.removeprefix(common_prefix)
+
+ return common_prefix
+
+
+def _process_comment(field: FieldDef) -> None:
+ """Process comment of a field."""
+ if match := VALID_REF_COMMENT_PATTERN.match(field.description):
+ ref_msg_name, ref_const_name = match.groups()
+ field.type.package = ref_msg_name
+ if ref_const_name:
+ field.type.name = _get_enum_identifier(
+ ref_const_name.rstrip("_XXX")
+ )
+ else:
+ field.type.name = ref_msg_name
+
+
+def _get_enum_identifier(common_prefix: str) -> str:
+ """Get the identifier of an enum."""
+ return "".join([x.capitalize() for x in common_prefix.split("_")])
+
+
+@dataclass
+class MessagePkgDef:
+ """Definition of a ROS message package."""
+
+ name: str
+ messages: list[MessageDef]
+ packages: list[MessagePkgDef]
+
+ def __eq__(self, other: object) -> bool:
+ """Return whether the message package is equal to another."""
+ if not isinstance(other, MessagePkgDef):
+ return NotImplemented
+ return (
+ other.name == self.name
+ and all(message in self.messages for message in other.messages)
+ and all(package in self.packages for package in other.packages)
+ )
+
+ @classmethod
+ def from_msg_folder(
+ cls, pkg_name: str, msg_path: abc.AbstractFilePath | pathlib.Path
+ ) -> MessagePkgDef:
+ """Create a message package definition from a folder."""
+ out = cls(pkg_name, [], [])
+ for msg_file in sorted(msg_path.rglob("*.msg"), key=os.fspath):
+ msg_def = MessageDef.from_file(msg_file)
+ out.messages.append(msg_def)
+ return out
diff --git a/capella_ros_tools/exporter.py b/capella_ros_tools/exporter.py
new file mode 100644
index 0000000..68983c1
--- /dev/null
+++ b/capella_ros_tools/exporter.py
@@ -0,0 +1,81 @@
+# Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: Apache-2.0
+"""Tool for exporting a Capella data package to ROS messages."""
+
+import pathlib
+import re
+
+from capellambse.model.crosslayer import information
+
+from capella_ros_tools import data_model
+
+from . import logger
+
+
+def _clean_name(name: str) -> str:
+ return re.sub(r"\W", "", name)
+
+
+def export(current_pkg: information.DataPkg, current_path: pathlib.Path):
+ """Export a Capella data package to ROS messages."""
+ current_path.mkdir(parents=True, exist_ok=True)
+ for cls_obj in current_pkg.classes:
+ fields = []
+ for prop_obj in cls_obj.owned_properties:
+ try:
+ card = data_model.Range(
+ prop_obj.min_card.value, prop_obj.max_card.value
+ )
+ except AttributeError:
+ card = data_model.Range("1", "1")
+ type_def = data_model.TypeDef(name=prop_obj.type.name, card=card)
+ prop_def = data_model.FieldDef(
+ type=type_def,
+ name=prop_obj.name,
+ description=prop_obj.description or "",
+ )
+ fields.append(prop_def)
+ cls_def = data_model.MessageDef(
+ name=cls_obj.name,
+ fields=fields,
+ enums=[],
+ description=cls_obj.description or "",
+ )
+ (current_path / f"{_clean_name(cls_obj.name)}.msg").write_text(
+ str(cls_def)
+ )
+
+ for enum_obj in current_pkg.enumerations:
+ literals = []
+ for i, lit_obj in enumerate(enum_obj.owned_literals):
+ try:
+ type_name = lit_obj.value.type.name
+ except AttributeError:
+ type_name = "uint8"
+ try:
+ literal_value = lit_obj.value.value
+ except AttributeError:
+ literal_value = i
+ type_def = data_model.TypeDef(
+ type_name, data_model.Range("1", "1")
+ )
+ lit_def = data_model.ConstantDef(
+ type=type_def,
+ name=lit_obj.name,
+ value=literal_value,
+ description=lit_obj.description or "",
+ )
+ literals.append(lit_def)
+ enum_def = data_model.EnumDef(
+ name=enum_obj.name,
+ literals=literals,
+ description=enum_obj.description or "",
+ )
+ (current_path / f"{_clean_name(enum_obj.name)}.msg").write_text(
+ str(enum_def)
+ )
+
+ for pkg_obj in current_pkg.packages:
+ pkg_path = current_path / _clean_name(pkg_obj.name)
+ export(pkg_obj, pkg_path)
+ logger.info("Exported package %s to %s", pkg_obj.name, pkg_path)
diff --git a/capella_ros_tools/importer.py b/capella_ros_tools/importer.py
new file mode 100644
index 0000000..d546773
--- /dev/null
+++ b/capella_ros_tools/importer.py
@@ -0,0 +1,267 @@
+# Copyright DB InfraGO AG and contributors
+# SPDX-License-Identifier: Apache-2.0
+"""Tool for importing ROS messages to a Capella data package."""
+
+import os
+import typing as t
+
+from capellambse import decl, filehandler, helpers
+
+from capella_ros_tools import data_model
+
+from . import logger
+
+ROS2_INTERFACES = {
+ "common_interfaces": "git+https://github.com/ros2/common_interfaces",
+ "rcl_interfaces": "git+https://github.com/ros2/rcl_interfaces",
+ "unique_identifier_msgs": (
+ "git+https://github.com/ros2/unique_identifier_msgs"
+ ),
+}
+
+
+class Importer:
+ """Class for importing ROS messages to a Capella data package."""
+
+ def __init__(
+ self,
+ msg_path: str,
+ no_deps: bool,
+ ):
+ self.messages = data_model.MessagePkgDef("root", [], [])
+ self._promise_ids: dict[str, None] = {}
+ self._promise_id_refs: dict[str, None] = {}
+
+ self._add_packages("ros_msgs", msg_path)
+ if no_deps:
+ return
+
+ for interface_name, interface_url in ROS2_INTERFACES.items():
+ self._add_packages(interface_name, interface_url)
+
+ def _add_packages(self, name: str, path: str) -> None:
+ root = filehandler.get_filehandler(path).rootdir
+ for dir in sorted(root.rglob("msg"), key=os.fspath):
+ pkg_name = dir.parent.name or name
+ pkg_def = data_model.MessagePkgDef.from_msg_folder(pkg_name, dir)
+ self.messages.packages.append(pkg_def)
+ logger.info("Loaded package %s from %s", pkg_name, dir)
+
+ def _convert_datatype(self, promise_id: str) -> dict[str, t.Any]:
+ name = promise_id.split(".", 1)[-1]
+ if any(t in name for t in ["char", "str"]):
+ _type = "StringType"
+ elif any(t in name for t in ["bool", "byte"]):
+ _type = "BooleanType"
+ else:
+ _type = "NumericType"
+ yml = {
+ "promise_id": promise_id,
+ "find": {
+ "name": name,
+ "_type": _type,
+ },
+ }
+ return yml
+
+ def _convert_package(
+ self,
+ pkg_def: data_model.MessagePkgDef,
+ ) -> dict[str, t.Any]:
+ classes = []
+ enums = []
+ packages = []
+ associations = []
+
+ for msg_def in pkg_def.messages:
+ if msg_def.fields:
+ cls_yml, cls_associations = self._convert_class(
+ pkg_def.name, msg_def
+ )
+ classes.append(cls_yml)
+ associations.extend(cls_associations)
+ for enum_def in msg_def.enums:
+ enums.append(self._convert_enum(msg_def.name, enum_def))
+
+ for new_pkg in pkg_def.packages:
+ new_yml = {
+ "find": {
+ "name": new_pkg.name,
+ },
+ } | self._convert_package(new_pkg)
+ packages.append(new_yml)
+
+ sync = {}
+ if classes:
+ sync["classes"] = classes
+ if enums:
+ sync["enumerations"] = enums
+ if packages:
+ sync["packages"] = packages
+ if associations:
+ sync["owned_associations"] = associations
+
+ yml = {}
+ if sync:
+ yml["sync"] = sync
+
+ return yml
+
+ def _convert_class(
+ self, pkg_name: str, msg_def: data_model.MessageDef
+ ) -> tuple[dict[str, t.Any], list[dict[str, t.Any]]]:
+ promise_id = f"{pkg_name}.{msg_def.name}"
+ self._promise_ids[promise_id] = None
+ props = []
+ associations = []
+ for field_def in msg_def.fields:
+ prop_promise_id = f"{promise_id}.{field_def.name}"
+ promise_ref = (
+ f"{field_def.type.package or pkg_name}.{field_def.type.name}"
+ )
+ self._promise_id_refs[promise_ref] = None
+ prop_yml = {
+ "promise_id": prop_promise_id,
+ "find": {
+ "name": field_def.name,
+ },
+ "set": {
+ "type": decl.Promise(promise_ref),
+ "kind": "COMPOSITION",
+ "description": field_def.description,
+ "min_card": decl.NewObject(
+ "LiteralNumericValue", value=field_def.type.card.min
+ ),
+ "max_card": decl.NewObject(
+ "LiteralNumericValue", value=field_def.type.card.max
+ ),
+ },
+ }
+ props.append(prop_yml)
+
+ associations.append(
+ {
+ "find": {
+ "navigable_members": [decl.Promise(prop_promise_id)],
+ },
+ "sync": {
+ "members": [
+ {
+ "find": {
+ "type": decl.Promise(promise_id),
+ },
+ "set": {
+ "_type": "Property",
+ "kind": "ASSOCIATION",
+ "min_card": decl.NewObject(
+ "LiteralNumericValue", value="1"
+ ),
+ "max_card": decl.NewObject(
+ "LiteralNumericValue", value="1"
+ ),
+ },
+ }
+ ],
+ },
+ }
+ )
+
+ yml = {
+ "promise_id": promise_id,
+ "find": {
+ "name": msg_def.name,
+ },
+ "set": {
+ "description": msg_def.description,
+ },
+ "sync": {
+ "properties": props,
+ },
+ }
+ return yml, associations
+
+ def _convert_enum(
+ self, pkg_name: str, enum_def: data_model.EnumDef
+ ) -> dict[str, t.Any]:
+ promise_id = f"{pkg_name}.{enum_def.name}"
+ self._promise_ids[promise_id] = None
+ literals = []
+ for literal in enum_def.literals:
+ literal_yml = {
+ "find": {
+ "name": literal.name,
+ },
+ "set": {
+ "description": literal.description,
+ "value": decl.NewObject(
+ "LiteralNumericValue", value=literal.value
+ ),
+ },
+ }
+ literals.append(literal_yml)
+ yml = {
+ "promise_id": promise_id,
+ "find": {
+ "name": enum_def.name,
+ },
+ "set": {
+ "description": enum_def.description,
+ },
+ "sync": {
+ "literals": literals,
+ },
+ }
+
+ return yml
+
+ def to_yaml(
+ self,
+ root_uuid: str,
+ types_parent_uuid: str = "",
+ types_uuid: str = "",
+ ) -> str:
+ """Import ROS messages into a Capella data package."""
+ logger.info("Generating decl YAML")
+ instructions = [
+ {"parent": decl.UUIDReference(helpers.UUIDString(root_uuid))}
+ | self._convert_package(self.messages),
+ ]
+ needed_types = [
+ p for p in self._promise_id_refs if p not in self._promise_ids
+ ]
+ if not needed_types:
+ return decl.dump(instructions)
+
+ datatypes = [
+ self._convert_datatype(promise_id) for promise_id in needed_types
+ ]
+ if types_uuid:
+ instructions.append(
+ {
+ "parent": decl.UUIDReference(
+ helpers.UUIDString(types_uuid)
+ ),
+ "sync": {"datatypes": datatypes},
+ }
+ )
+ elif types_parent_uuid:
+ instructions.append(
+ {
+ "parent": decl.UUIDReference(
+ helpers.UUIDString(types_parent_uuid)
+ ),
+ "sync": {
+ "packages": [
+ {
+ "find": {"name": "Data Types"},
+ "sync": {"datatypes": datatypes},
+ }
+ ],
+ },
+ }
+ )
+ else:
+ raise ValueError(
+ "Either types_parent_uuid or types_uuid must be provided"
+ )
+ return decl.dump(instructions)
diff --git a/docs/Makefile b/docs/Makefile
index fdfe666..3a2dfee 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -1,4 +1,4 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: CC0-1.0
# Minimal makefile for Sphinx documentation
@@ -21,3 +21,19 @@ help:
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+
+# Auto-generate API documentation
+apidoc:
+ sphinx-apidoc --module-first --output-dir source/code --force ..
+
+clean-apidoc:
+ rm -rfv source/code
+
+.PHONY: apidoc clean-apidoc
+clean: clean-apidoc
+
+.PHONY: serve
+html: apidoc
+serve: html
+ cd build/html && exec python -m http.server --bind 127.0.0.1
diff --git a/docs/make.bat b/docs/make.bat
index ab614db..9f24710 100644
--- a/docs/make.bat
+++ b/docs/make.bat
@@ -1,5 +1,5 @@
@ECHO OFF
-REM Copyright DB Netz AG and contributors
+REM Copyright DB InfraGO AG and contributors
REM SPDX-License-Identifier: CC0-1.0
pushd %~dp0
diff --git a/docs/source/_static/github-logo.svg b/docs/source/_static/github-logo.svg
index a407b96..2843ea7 100644
--- a/docs/source/_static/github-logo.svg
+++ b/docs/source/_static/github-logo.svg
@@ -1,9 +1,8 @@
+~ SPDX-FileCopyrightText: Copyright DB InfraGO AG and contributors
+~ SPDX-License-Identifier: Apache-2.0
+-->
diff --git a/docs/source/conf.py b/docs/source/conf.py
index e3f086b..f114da7 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -1,4 +1,4 @@
-# Copyright DB Netz AG and contributors
+# Copyright DB InfraGO AG and contributors
# SPDX-License-Identifier: Apache-2.0
"""Configuration file for Sphinx."""
@@ -14,7 +14,7 @@
sys.path.insert(0, os.path.abspath("../.."))
-import rosidl2capella
+import capella_ros_tools
# -- Project information -----------------------------------------------------
@@ -25,7 +25,7 @@
with open("../../pyproject.toml", "rb") as f:
_metadata = tomllib.load(f)["project"]
-project = "rosidl2capella"
+project = "capella-ros-tools"
author = _metadata["authors"][0]["name"]
copyright = f"{author} and the {_metadata['name']} contributors"
@@ -36,6 +36,7 @@
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
+ "nbsphinx",
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
@@ -58,7 +59,7 @@
# built documents.
# The full version, including alpha/beta/rc tags.
-version = rosidl2capella.__version__
+version = capella_ros_tools.__version__
rst_epilog = f"""
.. |Project| replace:: {project}
.. |Version| replace:: {version}
@@ -95,7 +96,7 @@
"footer_icons": [
{
"name": "GitHub",
- "url": "https://github.com/DSD-DBS/rosidl2capella",
+ "url": "https://github.com/DSD-DBS/capella-ros-tools",
"html": '',
"class": "",
},
diff --git a/docs/source/howtos.rst b/docs/source/howtos.rst
new file mode 100644
index 0000000..697a831
--- /dev/null
+++ b/docs/source/howtos.rst
@@ -0,0 +1,58 @@
+..
+ Copyright DB InfraGO AG and contributors
+ SPDX-License-Identifier: Apache-2.0
+
+.. _howtos:
+
+********
+Examples
+********
+
+This section contains a collection of examples that demonstrate how to use the library.
+
+Using the CLI
+=============
+
+Import ROS2 Messages:
+---------------------
+.. code-block:: bash
+
+ python -m capella_ros_tools \
+ import \
+ -i tests/data/data_model/example_msgs \
+ -m tests/data/empty_project_60 \
+ -l la \
+ --no-deps
+
+Import ROS2 Messages from Git Repository:
+-----------------------------------------
+.. code-block:: bash
+
+ python -m capella_ros_tools \
+ import \
+ -i git+https://github.com/DSD-DBS/dsd-ros-msg-definitions-oss \
+ -m tests/data/empty_project_60 \
+ -l la
+
+Export Capella data package:
+------------------------------------
+.. code-block:: bash
+
+ python -m capella_ros_tools \
+ export \
+ -m tests/data/melody_model_60 \
+ -l la \
+ -o tests/data/melody_msgs
+
+Export Capella data package from Git Repository:
+--------------------------------------------------------
+.. code-block:: bash
+
+ python -m capella_ros_tools \
+ export \
+ -m git+https://github.com/DSD-DBS/coffee-machine \
+ -l oa \
+ -o tests/data/coffee_msgs
+
+.. note::
+ When exporting Capella enumerations, if the enumeration literal values are not defined in the Capella model, the values will be assumed to be 0, 1, 2, 3, etc. and the value's type will be set to unit8.
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 963fdcb..d4c6365 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -1,17 +1,39 @@
..
- Copyright DB Netz AG and contributors
+ Copyright DB InfraGO AG and contributors
SPDX-License-Identifier: Apache-2.0
-Welcome to rosidl2capella's documentation!
-==========================================
+
+***********************************************
+Welcome to the Capella ROS Tools documentation!
+***********************************************
+
+Overview
+========
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Black
+
+**Date**: |today| **Version**: |Version|
+
+Capella ROS Tools is a command-line application written in Python, designed to facilitate the seamless integration of ROS2 and Capella MBSE tools. Key features include:
+
+* Export Capella model elements as ROS2 message (.msg) files.
+* Import ROS2 message (.msg) files as Capella model elements.
+* Works with local and remote message files/Capella projects.
+
.. toctree::
:maxdepth: 2
:caption: Contents:
+ usage
+ howtos
+ messages
+
.. toctree::
:maxdepth: 3
- :caption: API reference
+ :caption: API reference:
code/modules
diff --git a/docs/source/messages.rst b/docs/source/messages.rst
new file mode 100644
index 0000000..c987a43
--- /dev/null
+++ b/docs/source/messages.rst
@@ -0,0 +1,128 @@
+..
+ Copyright DB InfraGO AG and contributors
+ SPDX-License-Identifier: Apache-2.0
+
+.. _messages:
+
+*******************
+ROS2 Message Layout
+*******************
+
+The Capella ROS Tools API expects ROS2 messages to be organized in a specific way:
+
+Package Definition
+==================
+* A package is a directory containing a `msg` directory.
+* The `msg` directory contains `.msg` files which contain class and enum definitions.
+
+::
+
+ folders
+ ├── package1
+ │ └── msg
+ │ ├── class1.msg
+ │ └── types
+ │ └── enum1.msg
+ └── package2
+ └── msg
+ └── class2.msg
+
+The above folder structure would translate to the following package definition (assuming class1.msg, class2.msg contain class definitions and enum1.msg contains an enum definition):
+
+::
+
+ packages
+ ├── Package: package1
+ │ ├── Class: class1
+ │ └── Enum: enum1
+ └── Package: package2
+ └── Class: class3
+
+
+Class Definition
+================
+* A `.msg` file can contain one class definition.
+* The comment at the top of the file followed by an empty line is added to the class description.
+* **Inline Comments:** Comments on the same line as a property definition are directly added to that property's description.
+* **Indented Comment Lines:** Comments on a line of their own but indented are added to the description of the last encountered property.
+* **Block Comments:** Comments on a line of their own and not indented are added to the description of the next properties until an empty line and the block comment has been used.
+
+.. literalinclude:: ../../tests/data/data_model/example_msgs/package1/msg/SampleClass.msg
+ :language: python
+
+
+Enum definition
+===============
+* A `.msg` file can contain multiple enum definitions.
+* Enum names are determined based on the common prefix of all enum literals in the enum definition.
+* If no common prefix exists, the enum name is derived from the file name (excluding the extension).
+* Two or more enums must not have literal names without a common prefix.
+* **Inline Comments:** Comments on the same line as an enum literal definition are directly added to the that enum literal's description.
+* **Indented Comment Lines:** Comments on a line of their own but indented are added to the description of the last encountered enum literal.
+* **Block Comments:** Comments on a line of their own and not indented are added to the description of the next enum definition or the next enum literal definitions until an empty line and the block comment has been used.
+
+.. literalinclude:: ../../tests/data/data_model/example_msgs/package1/msg/types/SampleEnum.msg
+ :language: python
+
+Enum and Class Definition
+=========================
+* A `.msg` file can contain one class definition and multiple enum definitions.
+* Enums without a common literal name prefix are named using the file name plus the suffix "Type".
+* There can only be one or no enum whose literal names do not share a common prefix.
+* Comments at the top of the file are added to the class description.
+* **Inline Comments:** Comments on the same line as a property or enum literal are directly added to the description of that element.
+* **Indented Comment Lines:** Comments on a line of their own but indented are added to the description of the last encountered property or enum literal.
+* **Block Comments:** Comments on a line of their own and not indented are added to the descriptions of the next properties, enum or enum literal until an empty line and the block comment has been used.
+
+.. code-block:: python
+
+ # SampleClassEnum.msg
+ # Properties in SampleClassEnum can reference
+ # enums in the same file.
+
+ # This block comment is added to the
+ # enum description of SampleClassEnumType.
+ byte OK = 0
+ byte WARN = 1
+ byte ERROR = 2
+ byte STALE = 3
+
+ # This block comment is added to the
+ # enum description of Color.
+ byte COLOR_RED = 0
+ byte COLOR_BLUE = 1
+ byte COLOR_YELLOW = 2
+
+ uint8 field1 # This inline comment is added to
+ # the description of field1.
+ uint8 field2
+
+
+Referencing enums
+=================
+
+In the Same File
+----------------
+* In files that define a class along with enums, the class properties can reference enums defined in the same file. This can be achieved in two ways:
+
+ * **Name Match:** The property name matches the enum name.
+ * **Type Match:** The property type matches the enum literals type, in which case the updated enum name is derived from the file name plus the property name.
+
+* Name matching takes precedence over type matching.
+
+.. literalinclude:: ../../tests/data/data_model/example_msgs/package2/msg/SampleClassEnum.msg
+ :language: python
+
+In another file
+---------------
+* If a property definition references an enum in the comments, the property type is updated based on this reference.
+* The reference should follow either of the following formats:
+
+ * **cf. :** The enum name was derived from the file name (excluding the extension).
+ * **cf. , _XXX:** The enum name was derived from the longest common prefix of all enum literals in the definition.
+
+.. literalinclude:: ../../tests/data/data_model/example_msgs/package1/msg/SampleEnum.msg
+ :language: python
+
+.. literalinclude:: ../../tests/data/data_model/example_msgs/package1/msg/SampleClass.msg
+ :language: python
diff --git a/docs/source/usage.rst b/docs/source/usage.rst
new file mode 100644
index 0000000..ab61512
--- /dev/null
+++ b/docs/source/usage.rst
@@ -0,0 +1,36 @@
+..
+ Copyright DB InfraGO AG and contributors
+ SPDX-License-Identifier: Apache-2.0
+
+.. _usage:
+
+*****
+Usage
+*****
+
+This section describes how to use the Capella ROS Tools CLI.
+
+Import ROS2 Messages:
+----------------------
+.. code-block:: bash
+
+ python -m capella_ros_tools import -i -m -l -o