# ========================================================================================= # Copyright (C) 2021 Orange & contributors # # This program is free software; you can redistribute it and/or modify it under the terms # of the GNU Lesser General Public License as published by the Free Software Foundation; # either version 3 of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along with this # program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth # Floor, Boston, MA 02110-1301, USA. # ========================================================================================= variables: # Change pip's cache directory to be inside the project directory since we can # only cache local items. PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" # Poetry support: force virtualenv not in project dir & use local cache dir POETRY_CACHE_DIR: "$CI_PROJECT_DIR/.cache/poetry" POETRY_VIRTUALENVS_IN_PROJECT: "false" PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv" PYTHON_IMAGE: python:3 # Default Python project root directory PYTHON_PROJECT_DIR: . REQUIREMENTS_FILE: requirements.txt TEST_REQUIREMENTS_FILE: test-requirements.txt SETUP_PY_DIR: "." # default production ref name (pattern) PROD_REF: '/^(master|main)$/' # default integration ref name (pattern) INTEG_REF: '/^develop$/' # compileall PYTHON_COMPILE_ARGS: "*" BANDIT_ARGS: "--recursive ." # Safety tool SAFETY_ARGS: "--full-report" # Trivy tool PYTHON_TRIVY_IMAGE: aquasec/trivy:latest PYTHON_TRIVY_ARGS: "--vuln-type library" RELEASE_VERSION_PART: "minor" # By default, publish on the Packages registry of the project # https://docs.gitlab.com/ee/user/packages/pypi_repository/#authenticate-with-a-ci-job-token TWINE_REPOSITORY_URL: ${CI_SERVER_URL}/api/v4/projects/${CI_PROJECT_ID}/packages/pypi TWINE_USERNAME: 'gitlab-ci-token' TWINE_PASSWORD: $CI_JOB_TOKEN .python-scripts: &python-scripts | # BEGSCRIPT set -e function log_info() { echo -e "[\\e[1;94mINFO\\e[0m] $*" } function log_warn() { echo -e "[\\e[1;93mWARN\\e[0m] $*" } function log_error() { echo -e "[\\e[1;91mERROR\\e[0m] $*" } function assert_defined() { if [[ -z "$1" ]] then log_error "$2" exit 1 fi } function install_ca_certs() { certs=$1 if [[ -z "$certs" ]] then return fi # import in system if echo "$certs" >> /etc/ssl/certs/ca-certificates.crt then log_info "CA certificates imported in \\e[33;1m/etc/ssl/certs/ca-certificates.crt\\e[0m" fi if echo "$certs" >> /etc/ssl/cert.pem then log_info "CA certificates imported in \\e[33;1m/etc/ssl/cert.pem\\e[0m" fi # import in Java keystore (if keytool command found) if command -v keytool > /dev/null then # shellcheck disable=SC2046 javahome=${JAVA_HOME:-$(dirname $(readlink -f $(command -v java)))/..} # shellcheck disable=SC2086 keystore=${JAVA_KEYSTORE_PATH:-$(ls -1 $javahome/jre/lib/security/cacerts 2>/dev/null || ls -1 $javahome/lib/security/cacerts 2>/dev/null || echo "")} if [[ -f "$keystore" ]] then storepass=${JAVA_KEYSTORE_PASSWORD:-changeit} nb_certs=$(echo "$certs" | grep -c 'END CERTIFICATE') log_info "importing $nb_certs certificates in Java keystore \\e[33;1m$keystore\\e[0m..." for idx in $(seq 0 $((nb_certs - 1))) do # TODO: use keytool option -trustcacerts ? if echo "$certs" | awk "n==$idx { print }; /END CERTIFICATE/ { n++ }" | keytool -noprompt -import -alias "imported CA Cert $idx" -keystore "$keystore" -storepass "$storepass" then log_info "... CA certificate [$idx] successfully imported" else log_warn "... Failed importing CA certificate [$idx]: abort" return fi done else log_warn "Java keystore \\e[33;1m$keystore\\e[0m not found: could not import CA certificates" fi fi # variable REQUESTS_CA_BUNDLE for Python if Python installed if command -v python > /dev/null then export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt log_info "Python requests \\e[33;1m\$REQUESTS_CA_BUNDLE\\e[0m variable set" fi } function unscope_variables() { _scoped_vars=$(env | awk -F '=' "/^scoped__[a-zA-Z0-9_]+=/ {print \$1}" | sort) if [[ -z "$_scoped_vars" ]]; then return; fi log_info "Processing scoped variables..." for _scoped_var in $_scoped_vars do _fields=${_scoped_var//__/:} _condition=$(echo "$_fields" | cut -d: -f3) case "$_condition" in if) _not="";; ifnot) _not=1;; *) log_warn "... unrecognized condition \\e[1;91m$_condition\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac _target_var=$(echo "$_fields" | cut -d: -f2) _cond_var=$(echo "$_fields" | cut -d: -f4) _cond_val=$(eval echo "\$${_cond_var}") _test_op=$(echo "$_fields" | cut -d: -f5) case "$_test_op" in defined) if [[ -z "$_not" ]] && [[ -z "$_cond_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" ]]; then continue; fi ;; equals|startswith|endswith|contains|in|equals_ic|startswith_ic|endswith_ic|contains_ic|in_ic) # comparison operator # sluggify actual value _cond_val=$(echo "$_cond_val" | tr '[:punct:]' '_') # retrieve comparison value _cmp_val_prefix="scoped__${_target_var}__${_condition}__${_cond_var}__${_test_op}__" _cmp_val=${_scoped_var#"$_cmp_val_prefix"} # manage 'ignore case' if [[ "$_test_op" == *_ic ]] then # lowercase everything _cond_val=$(echo "$_cond_val" | tr '[:upper:]' '[:lower:]') _cmp_val=$(echo "$_cmp_val" | tr '[:upper:]' '[:lower:]') fi case "$_test_op" in equals*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val" ]]; then continue; fi ;; startswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val"* ]]; then continue; fi ;; endswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val" ]]; then continue; fi ;; contains*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val"* ]]; then continue; fi ;; in*) if [[ -z "$_not" ]] && [[ "__${_cmp_val}__" != *"__${_cond_val}__"* ]]; then continue; elif [[ "$_not" ]] && [[ "__${_cmp_val}__" == *"__${_cond_val}__"* ]]; then continue; fi ;; esac ;; *) log_warn "... unrecognized test operator \\e[1;91m${_test_op}\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac # matches _val=$(eval echo "\$${_target_var}") log_info "... apply \\e[32m${_target_var}\\e[0m from \\e[32m\$${_scoped_var}\\e[0m${_val:+ (\\e[33;1moverwrite\\e[0m)}" _val=$(eval echo "\$${_scoped_var}") export "${_target_var}"="${_val}" done log_info "... done" } function guess_build_system() { if [[ "$PYTHON_BUILD_SYSTEM" ]] then case "$PYTHON_BUILD_SYSTEM" in poetry) log_info "--- Build system explictly declared: Poetry" return ;; setuptools) log_info "--- Build system explictly declared: Setuptools" return ;; pipenv) log_info "--- Build system explictly declared: Pipenv" return ;; reqfile) log_info "--- Build system explictly declared: requirements file" return ;; *) log_warn "--- Unknown declared build system: \\e[33;1m${PYTHON_BUILD_SYSTEM}\\e[0m: please read template doc" ;; esac fi if [[ -f "pyproject.toml" ]] then # that might be PEP 517 if a build-backend is specified # otherwise it might be only used as configuration file for development tools... build_backend=$(sed -rn 's/^build-backend *= *"([^"]*)".*/\1/p' pyproject.toml) if [[ "$build_backend" ]] then case "$build_backend" in poetry.core.masonry.api) log_info "--- Build system auto-detected: PEP 517 with Poetry backend" export PYTHON_BUILD_SYSTEM="poetry" return ;; setuptools.build_meta) log_info "--- Build system auto-detected: PEP 517 with Setuptools backend" export PYTHON_BUILD_SYSTEM="setuptools" return ;; *) log_error "--- Build system auto-detected: PEP 517 with unsupported backend \\e[33;1m${build_backend}\\e[0m: please read template doc" exit 1 ;; esac fi fi if [[ -f "setup.py" ]] then log_info "--- Build system auto-detected: Setuptools (legacy)" export PYTHON_BUILD_SYSTEM="setuptools" elif [[ -f "Pipfile" ]] then log_info "--- Build system auto-detected: Pipenv" export PYTHON_BUILD_SYSTEM="pipenv" elif [[ -f "${REQUIREMENTS_FILE}" ]] then log_info "--- Build system auto-detected: requirements file" export PYTHON_BUILD_SYSTEM="reqfile" else log_error "--- Build system auto-detect failed: please read template doc" exit 1 fi } # install requirements function install_requirements() { case "$PYTHON_BUILD_SYSTEM" in poetry) if [[ ! -f "poetry.lock" ]]; then log_warn "Using Poetry but \\e[33;1mpoetry.lock\\e[0m file not found: you shall commit it with your project files" fi # shellcheck disable=SC2086 pip install ${PIP_OPTS} poetry poetry install ${PYTHON_EXTRA_DEPS:+--extras "$PYTHON_EXTRA_DEPS"} ;; setuptools) # shellcheck disable=SC2086 pip install ${PIP_OPTS} setuptools # shellcheck disable=SC2086 pip install ${PIP_OPTS} ".${PYTHON_EXTRA_DEPS:+[$PYTHON_EXTRA_DEPS]}" ;; pipenv) # shellcheck disable=SC2086 pip install ${PIP_OPTS} pipenv if [[ ! -f "Pipfile.lock" ]]; then log_warn "Using Pipenv but \\e[33;1mPipfile.lock\\e[0m file not found: you shall commit it with your project files" pipenv install --dev --system else pipenv sync --dev --system fi ;; reqfile) if [[ -f "${REQUIREMENTS_FILE}" ]]; then log_info "--- installing build requirements from \\e[33;1m${REQUIREMENTS_FILE}\\e[0m" # shellcheck disable=SC2086 pip install ${PIP_OPTS} -r "${REQUIREMENTS_FILE}" if [[ -f "${TEST_REQUIREMENTS_FILE}" ]]; then log_info "--- installing test requirements from \\e[33;1m${TEST_REQUIREMENTS_FILE}\\e[0m" # shellcheck disable=SC2086 pip install ${PIP_OPTS} -r "${TEST_REQUIREMENTS_FILE}" fi else log_warn "--- requirements build system defined, but no ${REQUIREMENTS_FILE} file found" fi ;; esac } function _run() { if [[ "${PYTHON_BUILD_SYSTEM}" == "poetry" ]] then # shellcheck disable=SC2086 if ! command -v poetry > /dev/null; then pip install ${PIP_OPTS} poetry; fi poetry run "$@" else "$@" fi } function _python() { _run python "$@" } function _pip() { # shellcheck disable=SC2086 _run pip ${PIP_OPTS} "$@" } function _package() { case "$PYTHON_BUILD_SYSTEM" in poetry) # shellcheck disable=SC2086 if ! command -v poetry > /dev/null; then pip install ${PIP_OPTS} poetry; fi poetry build ;; setuptools) # shellcheck disable=SC2086 pip install ${PIP_OPTS} setuptools build python -m build ;; *) log_error "--- packaging is unsupported with $PYTHON_BUILD_SYSTEM build system: read template doc" exit 1 ;; esac } function _publish() { case "$PYTHON_BUILD_SYSTEM" in poetry) # shellcheck disable=SC2086 if ! command -v poetry > /dev/null; then pip install ${PIP_OPTS} poetry; fi poetry config repositories.user_defined "$TWINE_REPOSITORY_URL" poetry publish --username "$TWINE_USERNAME" --password "$TWINE_PASSWORD" --repository user_defined ;; setuptools) # shellcheck disable=SC2086 pip install ${PIP_OPTS} twine twine upload --verbose dist/*.tar.gz twine upload --verbose dist/*.whl ;; *) log_error "--- publish is unsupported with $PYTHON_BUILD_SYSTEM build system: read template doc" exit 1 ;; esac } function _release() { if [[ "${PYTHON_BUILD_SYSTEM}" == "poetry" ]] then # shellcheck disable=SC2086 if ! command -v poetry > /dev/null; then pip install ${PIP_OPTS} poetry; fi poetry version "${RELEASE_VERSION_PART}" else # shellcheck disable=SC2086 pip install ${PIP_OPTS} bumpversion if [[ -f ".bumpversion.cfg" ]]; then log_info "--- .bumpversion.cfg file found " export bumpversion_args="${RELEASE_VERSION_PART} --verbose" else log_info "--- No .bumpversion.cfg file found " if [[ -f "setup.py" ]]; then log_info "--- Getting current version of setup.py file " current_version=$(python setup.py --version) export bumpversion_args=" --verbose --current-version ${current_version} --tag --tag-name {new_version} --commit ${RELEASE_VERSION_PART} setup.py" else log_warn "--- No setup.py file found. Cannot perform release." fi fi log_info "--- Release args: ${bumpversion_args}" bumpversion "${bumpversion_args}" fi } function get_latest_template_version() { tag_json=$(wget -T 5 -q -O - "$CI_API_V4_URL/projects/to-be-continuous%2F$1/repository/tags?per_page=1" || echo "") echo "$tag_json" | sed -rn 's/^.*"name":"([^"]*)".*$/\1/p' } function check_for_update() { template="$1" actual="$2" latest=$(get_latest_template_version "$template") if [[ -n "$latest" ]] && [[ "$latest" != "$actual" ]] then log_warn "\\e[1;93m=======================================================================================================\\e[0m" log_warn "\\e[93mThe template \\e[32m$template\\e[93m:\\e[33m$actual\\e[93m you're using is not up-to-date: consider upgrading to version \\e[32m$latest\\e[0m" log_warn "\\e[93m(set \$TEMPLATE_CHECK_UPDATE_DISABLED to disable this message)\\e[0m" log_warn "\\e[1;93m=======================================================================================================\\e[0m" fi } if [[ "$TEMPLATE_CHECK_UPDATE_DISABLED" != "true" ]]; then check_for_update python "3.2.1"; fi unscope_variables # ENDSCRIPT ############################################################################################### # Generic python job # ############################################################################################### .python-base: image: $PYTHON_IMAGE services: - name: "$CI_REGISTRY/to-be-continuous/tools/tracking:master" command: ["--service", "python", "3.2.1"] # Cache downloaded dependencies and plugins between builds. # To keep cache across branches add 'key: "$CI_JOB_NAME"' cache: key: "$CI_COMMIT_REF_SLUG-python" paths: - ${PIP_CACHE_DIR} - ${POETRY_CACHE_DIR} - ${PIPENV_CACHE_DIR} before_script: - *python-scripts - install_ca_certs "${CUSTOM_CA_CERTS:-$DEFAULT_CA_CERTS}" - cd ${PYTHON_PROJECT_DIR} - guess_build_system ############################################################################################### # stages definition # ############################################################################################### stages: - build - test - package-build - publish ############################################################################################### # build stage # ############################################################################################### py-lint: extends: .python-base stage: build script: - mkdir -p reports - chmod o+rwx reports - install_requirements - _pip install pylint_gitlab - | if ! _run pylint --ignore=.cache --output-format=text ${PYLINT_ARGS} ${PYLINT_FILES:-$(find -type f -name "*.py")} then # failed: also generate codeclimate report _run pylint --ignore=.cache --output-format=pylint_gitlab.GitlabCodeClimateReporter ${PYLINT_ARGS} ${PYLINT_FILES:-$(find -type f -name "*.py")} > reports/pylint-codeclimate.json exit 1 else # success: generate empty codeclimate report (required by GitLab :( ) echo "[]" > reports/pylint-codeclimate.json fi artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: codequality: $PYTHON_PROJECT_DIR/reports/pylint-codeclimate.json paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $PYLINT_ENABLED is set - if: '$PYLINT_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $PYLINT_ENABLED is set - if: '$PYLINT_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $PYLINT_ENABLED set: auto & non-blocking - if: '$PYLINT_ENABLED == "true"' allow_failure: true py-compile: extends: .python-base stage: build script: - install_requirements - _python -m compileall $PYTHON_COMPILE_ARGS rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: only when none of supported unit test framework is enabled - if: '$UNITTEST_ENABLED != "true" && $PYTEST_ENABLED != "true" && $NOSETESTS_ENABLED != "true"' ############################################################################################### # test stage # ############################################################################################### py-unittest: extends: .python-base stage: build script: - mkdir -p reports - chmod o+rwx reports - install_requirements # code coverage - _pip install coverage # JUnit XML report - _pip install unittest-xml-reporting - _run coverage run -m xmlrunner discover -o "reports/" $UNITTEST_ARGS - _run coverage report -m - _run coverage xml -o "reports/coverage.xml" coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $UNITTEST_ENABLED is set - if: '$UNITTEST_ENABLED == "true"' py-pytest: extends: .python-base stage: build script: - mkdir -p reports - chmod o+rwx reports - install_requirements - _pip install pytest pytest-cov coverage - _python -m pytest --junit-xml=reports/TEST-pytests.xml --cov --cov-report term --cov-report xml:reports/coverage.xml ${PYTEST_ARGS} coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $PYTEST_ENABLED is set - if: '$PYTEST_ENABLED == "true"' py-nosetests: extends: .python-base stage: build script: - mkdir -p reports - chmod o+rwx reports - install_requirements - _run nosetests --with-xunit --xunit-file=reports/TEST-nosetests.xml --with-coverage --cover-erase --cover-xml --cover-xml-file=reports/coverage.xml --cover-html --cover-html-dir=reports/coverage ${NOSETESTS_ARGS} coverage: /^TOTAL.+?(\d+\%)$/ artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always reports: junit: - $PYTHON_PROJECT_DIR/reports/TEST-*.xml cobertura: $PYTHON_PROJECT_DIR/reports/coverage.xml paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on any branch: when $NOSETESTS_ENABLED is set - if: '$NOSETESTS_ENABLED == "true"' # Bandit (SAST) py-bandit: extends: .python-base stage: test # force no dependencies dependencies: [] script: - mkdir -p reports - chmod o+rwx reports - install_requirements - _pip install bandit - | if ! _run bandit ${TRACE+--verbose} ${BANDIT_ARGS} then # failed: also generate JSON report _run bandit ${TRACE+--verbose} --format json --output reports/bandit.json ${BANDIT_ARGS} exit 1 fi artifacts: when: always name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $BANDIT_ENABLED is set - if: '$BANDIT_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $BANDIT_ENABLED is set - if: '$BANDIT_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $BANDIT_ENABLED set: manual & non-blocking - if: '$BANDIT_ENABLED == "true"' when: manual allow_failure: true # Safety (dependency check) py-safety: extends: .python-base stage: test # force no dependencies dependencies: [] script: - mkdir -p reports - chmod o+rwx reports - install_requirements - _pip install safety - | if ! _pip freeze | _run safety check --stdin ${SAFETY_ARGS} then # failed: also generate JSON report _pip freeze | _run safety check --stdin --json --output reports/safety.json ${SAFETY_ARGS} exit 1 fi artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $SAFETY_ENABLED is set - if: '$SAFETY_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $PROD_REF' # on integration branch(es): if $SAFETY_ENABLED is set - if: '$SAFETY_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches, with $SAFETY_ENABLED set: manual & non-blocking - if: '$SAFETY_ENABLED == "true"' when: manual allow_failure: true # Trivy (dependency check) # Trivy only works if all dependencies are pinned to specific versions (e.g. with a poetry.lock file or a requirements.txt with all versions pinned) py-trivy: extends: .python-base image: name: $PYTHON_TRIVY_IMAGE entrypoint: [""] stage: test # force no dependencies dependencies: [] script: - mkdir -p reports - chmod o+rwx reports - | if [ $(trivy fs ${PYTHON_TRIVY_ARGS} --format table --exit-code 0 $PYTHON_PROJECT_DIR | grep -c "Number of language-specific files: 0") -eq 1 ]; then log_error "Could not find a file listing all dependencies with their versions." exit 1 fi trivy fs ${PYTHON_TRIVY_ARGS} --format table --exit-code 0 $PYTHON_PROJECT_DIR trivy fs ${PYTHON_TRIVY_ARGS} --format json --output reports/trivy-python.json --exit-code 1 $PYTHON_PROJECT_DIR artifacts: name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" expire_in: 1 day when: always paths: - $PYTHON_PROJECT_DIR/reports/ rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): if $TRIVY_ENABLED is set # exclude if $PYTHON_TRIVY_ENABLED not set - if: '$PYTHON_TRIVY_ENABLED != "true"' when: never # on production or integration branches: auto - if: '$CI_COMMIT_REF_NAME =~ $PROD_REF || $CI_COMMIT_REF_NAME =~ $INTEG_REF' # on non-production, non-integration branches: manual & non-blocking - if: '$PYTHON_TRIVY_ENABLED == "true"' # useless but prevents GitLab warning when: manual allow_failure: true ############################################################################################### # package stage # ############################################################################################### # (on tag creation): create packages as artifacts py-package: extends: .python-base stage: package-build script: - _package artifacts: paths: - $PYTHON_PROJECT_DIR/dist/*.tar.gz - $PYTHON_PROJECT_DIR/dist/*.whl rules: # on tags - if: '$CI_COMMIT_TAG' - if: '$PYTHON_FORCE_PACKAGE == "true"' ############################################################################################### # publish stage # ############################################################################################### # (on tag creation): performs a release py-publish: extends: .python-base stage: publish script: - assert_defined "$TWINE_USERNAME" 'Missing required env $TWINE_USERNAME' - assert_defined "$TWINE_PASSWORD" 'Missing required env $TWINE_PASSWORD' - _publish rules: # on tags with $PYTHON_PUBLISH_ENABLED set - if: '$PYTHON_PUBLISH_ENABLED == "true" && $CI_COMMIT_TAG' # (manual from master branch): triggers a release (tag creation) py-release: extends: .python-base stage: publish script: - git config --global user.email '$GITLAB_USER_EMAIL' - git config --global user.name '$GITLAB_USER_LOGIN' - git checkout -B $CI_BUILD_REF_NAME - _release - git_url_base=`echo ${CI_REPOSITORY_URL} | cut -d\@ -f2` - git push https://${RELEASE_USERNAME}:${RELEASE_ACCESS_TOKEN}@${git_url_base} --tags - git push https://${RELEASE_USERNAME}:${RELEASE_ACCESS_TOKEN}@${git_url_base} $CI_BUILD_REF_NAME rules: # exclude merge requests - if: $CI_MERGE_REQUEST_ID when: never # on production branch(es): manual & non-blocking if $RELEASE_USERNAME is set - if: '$RELEASE_USERNAME && $CI_COMMIT_REF_NAME =~ $PROD_REF' when: manual allow_failure: true # on integration branch(es): manual & non-blocking if $RELEASE_USERNAME is set - if: '$RELEASE_USERNAME && $CI_COMMIT_REF_NAME =~ $INTEG_REF' when: manual allow_failure: true