Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/.idea/
.venv/
build
/build/
cmake/sodium_version
cmake/curl_version
cmake/zlib_version
Expand Down
1 change: 1 addition & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ include:
- local: .gitlab/stage_test.yml
- local: .gitlab/stage_unit.yml
- local: .gitlab/end_to_end.yml
- local: .gitlab/stage_deploy_pypi.yml

stages:
- pipeline-serialize
Expand Down
48 changes: 48 additions & 0 deletions .gitlab/build/build_python_client_image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
---
stages:
- build

include:
- local: .gitlab/common.yml

build-python-client:
extends: .docker_build_script
stage: build
variables:
PROJECT: "datafed"
COMPONENT: "python-client"
GIT_STRATEGY: clone
DOCKER_FILE_PATH: "python/datafed_pkg/docker/Dockerfile"
DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count
BUILD_INTERMEDIATE: "FALSE"
tags:
- docker
rules:
- changes:
- python/**/*
- common/**/*
- CMakeLists.txt
- cmake/**/*
- .gitlab-ci.yml
when: on_success

retag-image:
extends: .docker_retag_image
stage: build
variables:
PROJECT: "datafed"
COMPONENT: "python-client"
GIT_STRATEGY: clone
DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count
BUILD_INTERMEDIATE: "FALSE"
tags:
- docker
rules:
- changes:
- python/**/*
- common/**/*
- CMakeLists.txt
- cmake/**/*
- .gitlab-ci.yml
when: never
- when: on_success
19 changes: 19 additions & 0 deletions .gitlab/build/force_build_python_client_image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
---
stages:
- build

include:
- local: .gitlab/common.yml

build-python-client:
extends: .docker_build_script
stage: build
variables:
PROJECT: "datafed"
COMPONENT: "python-client"
GIT_STRATEGY: clone
DOCKER_FILE_PATH: "python/datafed_pkg/docker/Dockerfile"
DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count
BUILD_INTERMEDIATE: "FALSE"
tags:
- docker
16 changes: 16 additions & 0 deletions .gitlab/stage_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -102,3 +102,19 @@ run-foxx-build-job:
REGISTRY: "${REGISTRY}"
HARBOR_USER: "${HARBOR_USER}"
HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN: "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}"

run-python-client-build-job:
needs:
- job: run-build-dependencies
- job: check-python-client-image
artifacts: true
stage: build
trigger:
include:
- artifact: python_client_image.yml
job: check-python-client-image
strategy: depend
variables:
REGISTRY: "${REGISTRY}"
HARBOR_USER: "${HARBOR_USER}"
HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN: "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}"
92 changes: 92 additions & 0 deletions .gitlab/stage_deploy_pypi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
# GitLab CI configuration for deploying DataFed Python package to PyPI
#
# This job builds the Python package fresh and deploys it to PyPI using twine.
# The version numbers are automatically determined from cmake/Version.cmake.
#
# Required GitLab CI/CD Variables:
# - PYPI_PASSWORD: PyPI password or API token
#
# Optional Variables (with defaults):
# - PYPI_USERNAME: PyPI username (default: "__token__" for API token auth)
# - PYPI_PRERELEASE: If "true", append pre-release identifier (default: "true")
# - PYPI_REPOSITORY_URL: PyPI repository URL (default: "https://upload.pypi.org/legacy/" for production PyPI)
# Use "https://test.pypi.org/legacy/" for TestPyPI
# - DATAFED_PYPI_REPO: PyPI package name (default: "datafed")
# Use a different name like "datafed-dev" to upload to a different package on PyPI

.deploy_pypi_base:
stage: deploy-pypi-package
variables:
PYPI_USERNAME: "__token__"
PYPI_PRERELEASE: "true"
PYPI_REPOSITORY_URL: "https://upload.pypi.org/legacy/"
DATAFED_PYPI_REPO: "datafed"
before_script:
- docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}"
script:
- DOWNSTREAM_SHA=$( git submodule status ./external/DataFedDependencies/ | awk '{print $1}' )
- DOWNSTREAM_SHA=${DOWNSTREAM_SHA#-}
- mkdir -p dist

# Modify Version.cmake to add pre-release identifier if needed
- |
if [ "${PYPI_PRERELEASE}" = "true" ]; then
echo "Building package with pre-release identifier (rc${CI_PIPELINE_IID})..."
sed -i "s/set(DATAFED_PYTHON_CLIENT_RELEASE_TYPE \"\")/set(DATAFED_PYTHON_CLIENT_RELEASE_TYPE \"rc\")/" cmake/Version.cmake
sed -i "s/set(DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER \"\")/set(DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER \"${CI_PIPELINE_IID}\")/" cmake/Version.cmake
else
echo "Building package as full release..."
fi

# Build the Python package using existing Dockerfile
- |
docker build \
--build-arg DEPENDENCIES="${REGISTRY}/datafed/dependencies:$DOWNSTREAM_SHA" \
--build-arg DATAFED_PYPI_REPO="${DATAFED_PYPI_REPO}" \
-f python/datafed_pkg/docker/Dockerfile \
-t datafed-python-client-deploy:latest \
.

# Extract packages
- docker run --rm -v "$(pwd)/dist:/output" datafed-python-client-deploy:latest sh -c "cp /dist/* /output/"

# Restore Version.cmake to original state if modified
- |
if [ "${PYPI_PRERELEASE}" = "true" ]; then
git checkout cmake/Version.cmake
fi

# Verify packages exist
- ls -lh dist/

# Upload to PyPI using twine
- |
docker run --rm \
-v "$(pwd)/dist:/dist" \
-e PYPI_USERNAME="${PYPI_USERNAME}" \
-e PYPI_PASSWORD="${PYPI_PASSWORD}" \
-e PYPI_REPOSITORY_URL="${PYPI_REPOSITORY_URL}" \
datafed-python-client-deploy:latest \
sh -c "twine upload --non-interactive --repository-url \$PYPI_REPOSITORY_URL --username \$PYPI_USERNAME --password \$PYPI_PASSWORD /dist/*"
artifacts:
paths:
- dist/
expire_in: 1 week
tags:
- docker

# Manual deployment job - can be triggered via button in GitLab UI
deploy:pypi:manual:
extends: .deploy_pypi_base
when: manual
rules:
- when: manual
Comment on lines +78 to +83
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is the best choice too.


# Automatic deployment on tags (optional - commented out by default)
# Uncomment to enable automatic deployment when tags are pushed
# deploy:pypi:tag:
# extends: .deploy_pypi_base
# needs:
# - job: run-python-client-build-job
# only:
# - tags
8 changes: 8 additions & 0 deletions .gitlab/stage_image_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,11 @@ check-foxx-image:
PROJECT: "datafed"
COMPONENT: "foxx"
BUILD_INTERMEDIATE: "FALSE"

check-python-client-image:
extends: .image_check
stage: image-check
variables:
PROJECT: "datafed"
COMPONENT: "python_client"
BUILD_INTERMEDIATE: "FALSE"
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ endif()

if( BUILD_PYTHON_CLIENT )
# make target = pydatafed
file(COPY ${PROJECT_SOURCE_DIR}/external/DataFedDependencies/python/datafed_pkg/requirements.txt DESTINATION ${PROJECT_SOURCE_DIR}/python/datafed_pkg/requirements.txt)
file(COPY ${PROJECT_SOURCE_DIR}/external/DataFedDependencies/python/datafed_pkg/requirements.txt DESTINATION ${PROJECT_SOURCE_DIR}/python/datafed_pkg/)
add_subdirectory( python EXCLUDE_FROM_ALL )
endif()

Expand Down
3 changes: 0 additions & 3 deletions docs/_static/js/html5shiv-printshiv.min.js
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@
f = RegExp("^(?:" + d().join("|") + ")$", "i"),
g = [];
e--;

)
((b = c[e]), f.test(b.nodeName) && g.push(b.applyElement(l(b))));
return g;
Expand All @@ -100,7 +99,6 @@
d = c.length,
e = a.ownerDocument.createElement(A + ":" + a.nodeName);
d--;

)
((b = c[d]), b.specified && e.setAttribute(b.nodeName, b.nodeValue));
return ((e.style.cssText = a.style.cssText), e);
Expand All @@ -113,7 +111,6 @@
f = RegExp("(^|[\\s,>+~])(" + d().join("|") + ")(?=[[\\s,>+~#.:]|$)", "gi"),
g = "$1" + A + "\\:$2";
e--;

)
((b = c[e] = c[e].split("}")),
(b[b.length - 1] = b[b.length - 1].replace(f, g)),
Expand Down
93 changes: 93 additions & 0 deletions python/datafed_pkg/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# NOTE this image must be built with respect to the base of the project i.e.
# cd ${PROJECT_ROOT} or cd DataFed
# docker build -f python/datafed_pkg/docker/Dockerfile .

ARG BUILD_BASE="debian:bookworm-slim"
ARG DEPENDENCIES="dependencies"
ARG DATAFED_DIR="/datafed"
ARG DATAFED_INSTALL_PATH="/opt/datafed"
ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies"
ARG BUILD_DIR="$DATAFED_DIR/source"
ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies"
ARG DATAFED_PYPI_REPO="datafed"

FROM ${DEPENDENCIES} AS python-client-build

SHELL ["/bin/bash", "-c"]

ARG DATAFED_DIR
ARG BUILD_DIR
ARG DATAFED_INSTALL_PATH
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
ARG DATAFED_DEPENDENCIES_ROOT
ARG DATAFED_PYPI_REPO

ENV DATAFED_INSTALL_PATH="${DATAFED_INSTALL_PATH}"
ENV DATAFED_DEPENDENCIES_INSTALL_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
ENV DATAFED_PYPI_REPO="${DATAFED_PYPI_REPO}"

RUN mkdir -p ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
mv ./scripts/dependency_versions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
mv ./scripts/generate_dependencies_config.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/

COPY ./common ${BUILD_DIR}/common
COPY ./python ${BUILD_DIR}/python
COPY ./CMakeLists.txt ${BUILD_DIR}
COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/
COPY ./cmake ${BUILD_DIR}/cmake
COPY ./external/DataFedDependencies ${BUILD_DIR}/external/DataFedDependencies

# Configure and build the Python package (generates protobuf files and VERSION.py)
RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \
${BUILD_DIR}/scripts/generate_datafed.sh && \
${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
-DBUILD_REPO_SERVER=False \
-DBUILD_AUTHZ=False \
-DBUILD_CORE_SERVER=False \
-DBUILD_WEB_SERVER=False \
-DBUILD_DOCS=False \
-DBUILD_PYTHON_CLIENT=True \
-DBUILD_FOXX=False \
-DENABLE_INTEGRATION_TESTS=False

RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build --target pydatafed

# Install Python build tools and package dependencies
RUN apt-get update && apt-get install -y python3-pip && rm -rf /var/lib/apt/lists/*
RUN pip3 install --no-cache-dir --upgrade pip setuptools wheel twine --break-system-packages

# Install package dependencies from requirements.txt
WORKDIR ${BUILD_DIR}/python/datafed_pkg
RUN pip3 install --no-cache-dir -r requirements.txt --break-system-packages

# Navigate to the package build directory and copy requirements.txt
WORKDIR ${BUILD_DIR}/build/python/datafed_pkg
RUN cp ${BUILD_DIR}/python/datafed_pkg/requirements.txt .

# Clean any previous builds and build distributions
RUN rm -rf dist/ build/ *.egg-info/ && \
python3 setup.py sdist bdist_wheel

# Create output directory for artifacts
RUN mkdir -p /output && cp -r dist/* /output/

# Final stage for deployment - minimal image with just the built packages
FROM ${BUILD_BASE} AS python-client-deploy

ARG DATAFED_DIR
ARG BUILD_DIR

# Install Python and twine for PyPI upload
RUN apt-get update && \
apt-get install -y python3-pip && \
rm -rf /var/lib/apt/lists/*

RUN pip3 install --no-cache-dir --upgrade pip twine --break-system-packages

# Copy built distributions from build stage
COPY --from=python-client-build /output /dist

WORKDIR /dist

# The deploy command will be run via CI
CMD ["/bin/bash"]
Loading