Skip to content
Open
28 changes: 28 additions & 0 deletions .github/workflows/list-packages.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: List package versions (PyPI / Test PyPI)

on:
workflow_dispatch:
schedule:
# Run every Monday at 9:00 AM UTC
- cron: '0 9 * * 1'
Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@maciejmajek, let me know whether this is necessary. I don't mind taking this out.


jobs:
list-packages:
name: List local and PyPI versions
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install packaging

- name: Discover local packages and check PyPI versions
run: |
python scripts/discover_packages.py --output-format json > packages.json
python scripts/list_package_versions.py --output-format markdown --packages-json packages.json >> $GITHUB_STEP_SUMMARY
300 changes: 300 additions & 0 deletions .github/workflows/pkg-publish.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,300 @@
name: Publish package to PyPI / Test PyPI

on:
workflow_dispatch:
inputs:
package:
description: "Package(s) to publish. Single package: 'rai_core'. Multiple packages (comma-separated): 'rai_core,rai-perception'."
required: true
type: string
publish_target:
description: "Publish target"
required: true
type: choice
options:
- test-pypi
- pypi

jobs:
discover-packages:
name: Discover packages and validate
runs-on: ubuntu-latest
outputs:
packages_json: ${{ steps.discover.outputs.packages_json }}
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'

- name: Discover and validate packages
id: discover
run: |
python scripts/discover_packages.py --output-format json > packages.json
python scripts/validate_packages.py validate packages.json "${{ inputs.package }}" "${{ inputs.publish_target }}" "$GITHUB_OUTPUT"

build_wheels:
name: Build wheels
runs-on: ubuntu-latest
needs: [discover-packages]
strategy:
fail-fast: false
matrix:
package: ${{ fromJson(needs.discover-packages.outputs.packages_json) }}
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'

- name: Install build tools
# once we move to python 3.11+, we can upgrade cibuildwheel to 3.3.0
run: |
python -m pip install --upgrade pip wheel build
python -m pip install cibuildwheel==2.21.3 poetry

- name: Check if package has C extensions
id: check_extensions
run: |
python scripts/validate_packages.py check-c-ext
env:
PACKAGE_JSON: ${{ toJson(matrix.package) }}

- name: Build wheels (pure Python)
if: steps.check_extensions.outputs.has_c_extensions == 'false'
working-directory: ${{ matrix.package.path }}
run: |
# Build universal wheels (py3-none-any.whl) for pure Python packages
# This ensures PyPI accepts the wheels without platform tag issues
python -m build --wheel
mkdir -p wheelhouse
cp dist/*.whl wheelhouse/

- name: Build wheels (with C extensions)
if: steps.check_extensions.outputs.has_c_extensions == 'true'
working-directory: ${{ matrix.package.path }}
env:
CIBW_BUILD: cp310-* cp312-*
# Alpine Linux (musllinux) is not supported due to Rust compilation issues on musl-based systems
CIBW_SKIP: pp* *-win32 *-manylinux_i686 *-musllinux*
CIBW_BEFORE_BUILD: |
set -e
# Upgrade pip first to ensure it can find pre-built wheels
python -m pip install --upgrade pip wheel setuptools
# Install Rust compiler for packages like tiktoken that need to build from source
if ! command -v rustc &> /dev/null; then
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable
export PATH="$HOME/.cargo/bin:$PATH"
fi
# Install Poetry (already installed in main step, but needed in container)
python -m pip install poetry
# Configure pip to prefer binary wheels over source builds
export PIP_PREFER_BINARY=1
# Only use Poetry if poetry.lock exists
if [ -f poetry.lock ]; then
poetry install --no-interaction --no-root --only main
fi
CIBW_BUILD_FRONTEND: build
CIBW_PROJECT_REQUIRES_PYTHON: ">=3.10,<3.13"
CIBW_MANYLINUX_X86_64_IMAGE: manylinux_2_28
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux_2_28
# add Rust's bin directory for compiling pkg like tiktoken
CIBW_ENVIRONMENT: 'PATH="$HOME/.cargo/bin:$PATH"'
run: |
python -m cibuildwheel --output-dir wheelhouse

- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.package.name }}
path: ${{ matrix.package.path }}/wheelhouse/*.whl
retention-days: 7

build_sdist:
name: Build source distributions
runs-on: ubuntu-latest
needs: [discover-packages]
strategy:
fail-fast: false
matrix:
package: ${{ fromJson(needs.discover-packages.outputs.packages_json) }}
steps:
- uses: actions/checkout@v4

- name: Install Poetry
uses: snok/install-poetry@v1
with:
version: 2.1.1

- name: Build source distribution
working-directory: ${{ matrix.package.path }}
run: |
poetry build --format sdist

- name: Upload source distribution
uses: actions/upload-artifact@v4
with:
name: sdist-${{ matrix.package.name }}
path: ${{ matrix.package.path }}/dist/*.tar.gz
retention-days: 7

publish_test_pypi:
name: Publish to Test PyPI
if: inputs.publish_target == 'test-pypi'
runs-on: ubuntu-latest
needs: [discover-packages, build_wheels, build_sdist]
environment: test-pypi
permissions:
id-token: write

steps:
- uses: actions/checkout@v4

- name: Download wheel artifacts
uses: actions/download-artifact@v4
continue-on-error: true
with:
pattern: wheels-*
merge-multiple: true
path: artifacts/wheels

- name: Download sdist artifacts
uses: actions/download-artifact@v4
continue-on-error: true
with:
pattern: sdist-*
merge-multiple: true
path: artifacts/sdist

- name: Organize artifacts
run: |
mkdir -p dist
# Debug: show artifacts structure
echo "Artifacts directory structure:"
find artifacts -type f 2>/dev/null | head -20 || echo "No artifacts found"
# Copy wheels
if [ -d "artifacts/wheels" ]; then
echo "Copying wheels from artifacts/wheels"
find artifacts/wheels -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} dist/ \;
fi
# Copy sdist
if [ -d "artifacts/sdist" ]; then
echo "Copying sdist from artifacts/sdist"
find artifacts/sdist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} dist/ \;
fi
# Also check if files are directly in artifacts subdirectories
for whl in artifacts/wheels/*.whl artifacts/sdist/*.whl; do
if [ -f "$whl" ]; then
echo "Copying wheel: $whl"
cp "$whl" dist/ 2>/dev/null || true
fi
done
for tarball in artifacts/wheels/*.tar.gz artifacts/sdist/*.tar.gz; do
if [ -f "$tarball" ]; then
echo "Copying sdist: $tarball"
cp "$tarball" dist/ 2>/dev/null || true
fi
done
# List contents for debugging
echo "Contents of dist directory:"
ls -la dist/ || true
# Fail if no packages found
if [ -z "$(ls -A dist/ 2>/dev/null)" ]; then
echo "Error: No distribution packages found in dist/"
exit 1
fi

- name: Publish to Test PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
packages-dir: dist
repository-url: https://test.pypi.org/legacy/

- name: Verify installation from Test PyPI
run: |
PACKAGES_JSON='${{ needs.discover-packages.outputs.packages_json }}'
PACKAGE_NAMES=$(python scripts/validate_packages.py extract-names "$PACKAGES_JSON")
pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ $PACKAGE_NAMES
echo "Packages published to Test PyPI: $PACKAGE_NAMES"

publish_pypi:
name: Publish to PyPI
if: inputs.publish_target == 'pypi'
runs-on: ubuntu-latest
needs: [discover-packages, build_wheels, build_sdist]
environment: pypi
permissions:
id-token: write

steps:
- uses: actions/checkout@v4

- name: Download wheel artifacts
uses: actions/download-artifact@v4
continue-on-error: true
with:
pattern: wheels-*
merge-multiple: true
path: artifacts/wheels

- name: Download sdist artifacts
uses: actions/download-artifact@v4
continue-on-error: true
with:
pattern: sdist-*
merge-multiple: true
path: artifacts/sdist

- name: Organize artifacts
run: |
mkdir -p dist
# Debug: show artifacts structure
echo "Artifacts directory structure:"
find artifacts -type f 2>/dev/null | head -20 || echo "No artifacts found"
# Copy wheels
if [ -d "artifacts/wheels" ]; then
echo "Copying wheels from artifacts/wheels"
find artifacts/wheels -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} dist/ \;
fi
# Copy sdist
if [ -d "artifacts/sdist" ]; then
echo "Copying sdist from artifacts/sdist"
find artifacts/sdist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} dist/ \;
fi
# Also check if files are directly in artifacts subdirectories
for whl in artifacts/wheels/*.whl artifacts/sdist/*.whl; do
if [ -f "$whl" ]; then
echo "Copying wheel: $whl"
cp "$whl" dist/ 2>/dev/null || true
fi
done
for tarball in artifacts/wheels/*.tar.gz artifacts/sdist/*.tar.gz; do
if [ -f "$tarball" ]; then
echo "Copying sdist: $tarball"
cp "$tarball" dist/ 2>/dev/null || true
fi
done
# List contents for debugging
echo "Contents of dist directory:"
ls -la dist/ || true
# Fail if no packages found
if [ -z "$(ls -A dist/ 2>/dev/null)" ]; then
echo "Error: No distribution packages found in dist/"
exit 1
fi

- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
packages-dir: dist

- name: Verify installation from PyPI
run: |
PACKAGES_JSON='${{ needs.discover-packages.outputs.packages_json }}'
PACKAGE_NAMES=$(python scripts/validate_packages.py extract-names "$PACKAGES_JSON")
pip install $PACKAGE_NAMES
echo "Packages published to PyPI: $PACKAGE_NAMES"
21 changes: 21 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,27 @@ colcon build --symlink-install
pytest tests/
```

### Package naming for PyPI releases

When creating a new package for PyPI release, please use **hyphens** (`-`) in the package name defined in `pyproject.toml`. For example, use `rai-awesomepackage` rather than `rai_awesomepackage`.

Define the package name in your `pyproject.toml` like this:

```toml
[tool.poetry]
name = "rai-awesomepackage"
version = "0.1.0"
# ...
```

This convention:

- Aligns with PyPI best practices and improves readability
- Works seamlessly with our publishing workflow (which supports both formats for backward compatibility)
- Makes package names more user-friendly in URLs and command-line usage

Note: The Python import name (in the `packages` field) typically uses underscores to match Python module naming conventions, which is perfectly fine. The distinction is between the PyPI package name (hyphens) and the Python module name (underscores).

### Starting the discussion

Always try to engage in discussion first. Browse Issues and RFCs or start a discussion on
Expand Down
Loading
Loading