# =========================================================================================
# Copyright (C) 2021 Orange & contributors
#
# This program is free software; you can redistribute it and/or modify it under the terms
# of the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this
# program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA  02110-1301, USA.
# =========================================================================================
spec:
  inputs:
    image:
      description: The Docker image used to run Python - **set the version required by your project**
      default: registry.hub.docker.com/library/python:3-slim
    project-dir:
      description: Python project root directory
      default: .
    build-system:
      description: Python build-system to use to install dependencies, build and package the project
      options:
      - auto
      - setuptools
      - poetry
      - pipenv
      - reqfile
      - uv
      default: auto
    reqs-file:
      description: |-
        Main requirements file _(relative to `$PYTHON_PROJECT_DIR`)_

        For [Requirements Files](https://pip.pypa.io/en/stable/user_guide/#requirements-files) build-system only
      default: requirements.txt
    extra-reqs-files:
      description: |-
        Extra dev requirements file(s) to install _(relative to `$PYTHON_PROJECT_DIR`)_

        For [Requirements Files](https://pip.pypa.io/en/stable/user_guide/#requirements-files) build-system only
      default: requirements-dev.txt
    compile-args:
      description: '[`compileall` CLI options](https://docs.python.org/3/library/compileall.html)'
      default: '*'
    pip-opts:
      description: pip extra [options](https://pip.pypa.io/en/stable/cli/pip/#general-options)
      default: ''
    extra-deps:
      description: |-
        Extra sets of dependencies to install

        For [Setuptools](https://setuptools.pypa.io/en/latest/userguide/dependency_management.html?highlight=extras#optional-dependencies) or [Poetry](https://python-poetry.org/docs/pyproject/#extras) only
      default: ''
    package-enabled:
      description: Enable package
      type: boolean
      default: false
    pylint-enabled:
      description: Enable pylint
      type: boolean
      default: false
    pylint-args:
      description: Additional [pylint CLI options](http://pylint.pycqa.org/en/latest/user_guide/run.html#command-line-options)
      default: ''
    pylint-files:
      description: Files or directories to analyse
      default: ''
    unittest-enabled:
      description: Enable unittest
      type: boolean
      default: false
    unittest-args:
      description: Additional xmlrunner/unittest CLI options
      default: ''
    pytest-enabled:
      description: Enable pytest
      type: boolean
      default: false
    pytest-args:
      description: Additional [pytest](https://docs.pytest.org/en/stable/usage.html) or [pytest-cov](https://github.com/pytest-dev/pytest-cov#usage) CLI options
      default: ''
    nosetests-enabled:
      description: Enable nose
      type: boolean
      default: false
    nosetests-args:
      description: Additional [nose CLI options](https://nose.readthedocs.io/en/latest/usage.html#options)
      default: ''
    bandit-enabled:
      description: Enable Bandit
      type: boolean
      default: false
    bandit-args:
      description: Additional [Bandit CLI options](https://github.com/PyCQA/bandit#usage)
      default: --recursive .
    trivy-disabled:
      description: Disable Trivy
      type: boolean
      default: false
    trivy-dist-url:
      description: |-
        Url to the `tar.gz` package for `linux_amd64` of Trivy to use

        _When unset, the latest version will be used_
      default: ''
    trivy-args:
      description: Additional [Trivy CLI options](https://aquasecurity.github.io/trivy/latest/docs/references/configuration/cli/trivy_filesystem/)
      default: --ignore-unfixed --pkg-types library --detection-priority comprehensive
    sbom-disabled:
      description: Disable Software Bill of Materials
      type: boolean
      default: false
    sbom-syft-url:
      description: |-
        Url to the `tar.gz` package for `linux_amd64` of Syft to use

        _When unset, the latest version will be used_
      default: ''
    sbom-name:
      description: Component name of the emitted SBOM
      default: $CI_PROJECT_PATH/$PYTHON_PROJECT_DIR
    sbom-opts:
      description: Options for syft used for SBOM analysis
      default: --override-default-catalogers python-package-cataloger
    release-enabled:
      description: Enable Release
      type: boolean
      default: false
    auto-release-enabled:
      description: When set the job start automatically on production branch. When not set (default), the job is manual. Note that this behavior also depends on release-enabled being set.
      type: boolean
      default: false
    publish-enabled:
      description: Enable Publish Package
      type: boolean
      default: false    
    release-next:
      description: 'The part of the version to increase (one of: `major`, `minor`, `patch`)'
      options:
      - ''
      - major
      - minor
      - patch
      default: minor
    semrel-release-disabled:
      description: Disable semantic-release integration
      type: boolean
      default: false
    release-commit-message:
      description: The Git commit message to use on the release commit. This is templated using the [Python Format String Syntax](http://docs.python.org/2/library/string.html#format-string-syntax). Available in the template context are current_version and new_version.
      default: "chore(python-release): {current_version} \u2192 {new_version}"
    repository-url:
      description: |-
        Target PyPI repository to publish packages.

        _defaults to [GitLab project's packages repository](https://docs.gitlab.com/ee/user/packages/pypi_repository/)_
      default: ${CI_SERVER_URL}/api/v4/projects/${CI_PROJECT_ID}/packages/pypi
    black-enabled:
      description: Enable black
      type: boolean
      default: false
    isort-enabled:
      description: Enable isort
      type: boolean
      default: false
    ruff-enabled:
      description: Enable Ruff
      type: boolean
      default: false
    ruff-args:
      description: Additional [Ruff Linter CLI options](https://docs.astral.sh/ruff/configuration/#full-command-line-interface)
      default: ""
    ruff-format-enabled:
      description: Enable Ruff
      type: boolean
      default: false
    mypy-enabled:
      description: Enable mypy
      type: boolean
      default: false
    mypy-args:
      description: Additional [mypy CLI options](https://mypy.readthedocs.io/en/stable/command_line.html)
      default: ""
    mypy-files:
      description: Files or directories to analyse
      default: ''
---
# default workflow rules: Merge Request pipelines
workflow:
  rules:
    # prevent MR pipeline originating from production or integration branch(es)
    - if: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ $PROD_REF || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ $INTEG_REF'
      when: never
    # on non-prod, non-integration branches: prefer MR pipeline over branch pipeline
    - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*tag(,[^],]*)*\]/" && $CI_COMMIT_TAG'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*branch(,[^],]*)*\]/" && $CI_COMMIT_BRANCH'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*mr(,[^],]*)*\]/" && $CI_MERGE_REQUEST_ID'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*default(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $CI_DEFAULT_BRANCH'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*prod(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $PROD_REF'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*integ(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $INTEG_REF'
      when: never
    - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*dev(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF'
      when: never
    - when: always

# test job prototype: implement adaptive pipeline rules
.test-policy:
  rules:
    # on tag: auto & failing
    - if: $CI_COMMIT_TAG
    # on ADAPTIVE_PIPELINE_DISABLED: auto & failing
    - if: '$ADAPTIVE_PIPELINE_DISABLED == "true"'
    # on production or integration branch(es): auto & failing
    - if: '$CI_COMMIT_REF_NAME =~ $PROD_REF || $CI_COMMIT_REF_NAME =~ $INTEG_REF'
    # early stage (dev branch, no MR): manual & non-failing
    - if: '$CI_MERGE_REQUEST_ID == null && $CI_OPEN_MERGE_REQUESTS == null'
      when: manual
      allow_failure: true
    # Draft MR: auto & non-failing
    - if: '$CI_MERGE_REQUEST_TITLE =~ /^Draft:.*/'
      allow_failure: true
    # else (Ready MR): auto & failing
    - when: on_success

variables:
  # variabilized tracking image
  TBC_TRACKING_IMAGE: "registry.gitlab.com/to-be-continuous/tools/tracking:master"

  # PYTHON_IMAGE: "registry.hub.docker.com/library/python:3"
  PYTHON_IMAGE: $[[ inputs.image ]]
  # Default Python project root directory
  PYTHON_PROJECT_DIR: $[[ inputs.project-dir ]]

  PYTHON_REQS_FILE: $[[ inputs.reqs-file ]]
  PYTHON_EXTRA_REQS_FILES: $[[ inputs.extra-reqs-files ]]

  # default production ref name (pattern)
  PROD_REF: '/^(master|main)$/'
  # default integration ref name (pattern)
  INTEG_REF: '/^develop$/'
  # default release tag name (pattern)
  RELEASE_REF: '/^v?[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9-\.]+)?(\+[a-zA-Z0-9-\.]+)?$/'

  # compileall
  PYTHON_COMPILE_ARGS: $[[ inputs.compile-args ]]

  BANDIT_ARGS: $[[ inputs.bandit-args ]]

  # Trivy tool
  PYTHON_TRIVY_DISABLED: $[[ inputs.trivy-disabled ]]
  PYTHON_TRIVY_DIST_URL: $[[ inputs.trivy-dist-url ]]
  PYTHON_TRIVY_ARGS: $[[ inputs.trivy-args ]]

  PYTHON_SBOM_NAME: $[[ inputs.sbom-name ]]
  PYTHON_SBOM_OPTS: $[[ inputs.sbom-opts ]]

  PYTHON_RELEASE_NEXT: $[[ inputs.release-next ]]
  PYTHON_RELEASE_COMMIT_MESSAGE: $[[ inputs.release-commit-message ]]

  # By default, publish on the Packages registry of the project
  # https://docs.gitlab.com/ee/user/packages/pypi_repository/#authenticate-with-a-ci-job-token
  PYTHON_REPOSITORY_URL: $[[ inputs.repository-url ]]
  PYTHON_REPOSITORY_USERNAME: gitlab-ci-token
  PYTHON_REPOSITORY_PASSWORD: $CI_JOB_TOKEN

  PYTHON_BUILD_SYSTEM: $[[ inputs.build-system ]]
  PIP_OPTS: $[[ inputs.pip-opts ]]
  PYTHON_EXTRA_DEPS: $[[ inputs.extra-deps ]]
  PYTHON_PACKAGE_ENABLED: $[[ inputs.package-enabled ]]
  PYLINT_ENABLED: $[[ inputs.pylint-enabled ]]
  PYLINT_ARGS: $[[ inputs.pylint-args ]]
  PYLINT_FILES: $[[ inputs.pylint-files ]]
  UNITTEST_ENABLED: $[[ inputs.unittest-enabled ]]
  UNITTEST_ARGS: $[[ inputs.unittest-args ]]
  PYTEST_ENABLED: $[[ inputs.pytest-enabled ]]
  PYTEST_ARGS: $[[ inputs.pytest-args ]]
  NOSETESTS_ARGS: $[[ inputs.nosetests-args ]]

  PYTHON_SBOM_SYFT_URL: $[[ inputs.sbom-syft-url ]]

  PYTHON_SEMREL_RELEASE_DISABLED: $[[ inputs.semrel-release-disabled ]]

  NOSETESTS_ENABLED: $[[ inputs.nosetests-enabled ]]
  BANDIT_ENABLED: $[[ inputs.bandit-enabled ]]
  PYTHON_SBOM_DISABLED: $[[ inputs.sbom-disabled ]]
  PYTHON_RELEASE_ENABLED: $[[ inputs.release-enabled ]]
  PYTHON_PUBLISH_ENABLED: $[[ inputs.publish-enabled ]]
  PYTHON_AUTO_RELEASE_ENABLED: $[[ inputs.auto-release-enabled ]]

  PYTHON_BLACK_ENABLED: $[[ inputs.black-enabled ]]
  PYTHON_ISORT_ENABLED: $[[ inputs.isort-enabled ]]
  RUFF_ENABLED: $[[ inputs.ruff-enabled ]]
  RUFF_ARGS: $[[ inputs.ruff-args ]]
  RUFF_FORMAT_ENABLED: $[[ inputs.ruff-format-enabled ]]
  MYPY_ENABLED: $[[ inputs.mypy-enabled ]]
  MYPY_ARGS: $[[ inputs.mypy-args ]]
  MYPY_FILES: $[[ inputs.mypy-files ]]


.python-scripts: &python-scripts |
  # BEGSCRIPT
  set -e

  function log_info() {
      echo -e "[\\e[1;94mINFO\\e[0m] $*"
  }

  function log_warn() {
      echo -e "[\\e[1;93mWARN\\e[0m] $*"
  }

  function log_error() {
      echo -e "[\\e[1;91mERROR\\e[0m] $*"
  }

  function fail() {
    log_error "$*"
    exit 1
  }

  function assert_defined() {
    if [[ -z "$1" ]]
    then
      log_error "$2"
      exit 1
    fi
  }

  function install_ca_certs() {
    certs=$1
    if [[ -z "$certs" ]]
    then
      return
    fi

    # import in system
    if echo "$certs" >> /etc/ssl/certs/ca-certificates.crt
    then
      log_info "CA certificates imported in \\e[33;1m/etc/ssl/certs/ca-certificates.crt\\e[0m"
    fi
    if echo "$certs" >> /etc/ssl/cert.pem
    then
      log_info "CA certificates imported in \\e[33;1m/etc/ssl/cert.pem\\e[0m"
    fi

    # import in Java keystore (if keytool command found)
    if command -v keytool > /dev/null
    then
      # shellcheck disable=SC2046
      javahome=${JAVA_HOME:-$(dirname $(readlink -f $(command -v java)))/..}
      # shellcheck disable=SC2086
      keystore=${JAVA_KEYSTORE_PATH:-$(ls -1 $javahome/jre/lib/security/cacerts 2>/dev/null || ls -1 $javahome/lib/security/cacerts 2>/dev/null || echo "")}
      if [[ -f "$keystore" ]]
      then
        storepass=${JAVA_KEYSTORE_PASSWORD:-changeit}
        nb_certs=$(echo "$certs" | grep -c 'END CERTIFICATE')
        log_info "importing $nb_certs certificates in Java keystore \\e[33;1m$keystore\\e[0m..."
        for idx in $(seq 0 $((nb_certs - 1)))
        do
          # TODO: use keytool option -trustcacerts ?
          if echo "$certs" | awk "n==$idx { print }; /END CERTIFICATE/ { n++ }" | keytool -noprompt -import -alias "imported CA Cert $idx" -keystore "$keystore" -storepass "$storepass"
          then
            log_info "... CA certificate [$idx] successfully imported"
          else
            log_warn "... Failed importing CA certificate [$idx]: abort"
            return
          fi
        done
      else
        log_warn "Java keystore \\e[33;1m$keystore\\e[0m not found: could not import CA certificates"
      fi
    fi

    # variable REQUESTS_CA_BUNDLE for Python if Python installed
    if command -v python > /dev/null
    then
      export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
      log_info "Python requests \\e[33;1m\$REQUESTS_CA_BUNDLE\\e[0m variable set"
    fi
  }

  function unscope_variables() {
    _scoped_vars=$(env | awk -F '=' "/^scoped__[a-zA-Z0-9_]+=/ {print \$1}" | sort)
    if [[ -z "$_scoped_vars" ]]; then return; fi
    log_info "Processing scoped variables..."
    for _scoped_var in $_scoped_vars
    do
      _fields=${_scoped_var//__/:}
      _condition=$(echo "$_fields" | cut -d: -f3)
      case "$_condition" in
      if) _not="";;
      ifnot) _not=1;;
      *)
        log_warn "... unrecognized condition \\e[1;91m$_condition\\e[0m in \\e[33;1m${_scoped_var}\\e[0m"
        continue
      ;;
      esac
      _target_var=$(echo "$_fields" | cut -d: -f2)
      _cond_var=$(echo "$_fields" | cut -d: -f4)
      _cond_val=$(eval echo "\$${_cond_var}")
      _test_op=$(echo "$_fields" | cut -d: -f5)
      case "$_test_op" in
      defined)
        if [[ -z "$_not" ]] && [[ -z "$_cond_val" ]]; then continue;
        elif [[ "$_not" ]] && [[ "$_cond_val" ]]; then continue;
        fi
        ;;
      equals|startswith|endswith|contains|in|equals_ic|startswith_ic|endswith_ic|contains_ic|in_ic)
        # comparison operator
        # sluggify actual value
        _cond_val=$(echo "$_cond_val" | tr '[:punct:]' '_')
        # retrieve comparison value
        _cmp_val_prefix="scoped__${_target_var}__${_condition}__${_cond_var}__${_test_op}__"
        _cmp_val=${_scoped_var#"$_cmp_val_prefix"}
        # manage 'ignore case'
        if [[ "$_test_op" == *_ic ]]
        then
          # lowercase everything
          _cond_val=$(echo "$_cond_val" | tr '[:upper:]' '[:lower:]')
          _cmp_val=$(echo "$_cmp_val" | tr '[:upper:]' '[:lower:]')
        fi
        case "$_test_op" in
        equals*)
          if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val" ]]; then continue;
          elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val" ]]; then continue;
          fi
          ;;
        startswith*)
          if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val"* ]]; then continue;
          elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val"* ]]; then continue;
          fi
          ;;
        endswith*)
          if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val" ]]; then continue;
          elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val" ]]; then continue;
          fi
          ;;
        contains*)
          if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val"* ]]; then continue;
          elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val"* ]]; then continue;
          fi
          ;;
        in*)
          if [[ -z "$_not" ]] && [[ "__${_cmp_val}__" != *"__${_cond_val}__"* ]]; then continue;
          elif [[ "$_not" ]] && [[ "__${_cmp_val}__" == *"__${_cond_val}__"* ]]; then continue;
          fi
          ;;
        esac
        ;;
      *)
        log_warn "... unrecognized test operator \\e[1;91m${_test_op}\\e[0m in \\e[33;1m${_scoped_var}\\e[0m"
        continue
        ;;
      esac
      # matches
      _val=$(eval echo "\$${_target_var}")
      log_info "... apply \\e[32m${_target_var}\\e[0m from \\e[32m\$${_scoped_var}\\e[0m${_val:+ (\\e[33;1moverwrite\\e[0m)}"
      _val=$(eval echo "\$${_scoped_var}")
      export "${_target_var}"="${_val}"
    done
    log_info "... done"
  }

  # evaluate and export a secret
  # - $1: secret variable name
  function eval_secret() {
    name=$1
    value=$(eval echo "\$${name}")
    case "$value" in
    @b64@*)
      decoded=$(mktemp)
      errors=$(mktemp)
      if echo "$value" | cut -c6- | base64 -d > "${decoded}" 2> "${errors}"
      then
        # shellcheck disable=SC2086
        export ${name}="$(cat ${decoded})"
        log_info "Successfully decoded base64 secret \\e[33;1m${name}\\e[0m"
      else
        fail "Failed decoding base64 secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")"
      fi
      ;;
    @hex@*)
      decoded=$(mktemp)
      errors=$(mktemp)
      if echo "$value" | cut -c6- | sed 's/\([0-9A-F]\{2\}\)/\\\\x\1/gI' | xargs printf > "${decoded}" 2> "${errors}"
      then
        # shellcheck disable=SC2086
        export ${name}="$(cat ${decoded})"
        log_info "Successfully decoded hexadecimal secret \\e[33;1m${name}\\e[0m"
      else
        fail "Failed decoding hexadecimal secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")"
      fi
      ;;
    @url@*)
      url=$(echo "$value" | cut -c6-)
      if command -v curl > /dev/null
      then
        decoded=$(mktemp)
        errors=$(mktemp)
        if curl -s -S -f --connect-timeout 5 -o "${decoded}" "$url" 2> "${errors}"
        then
          # shellcheck disable=SC2086
          export ${name}="$(cat ${decoded})"
          log_info "Successfully curl'd secret \\e[33;1m${name}\\e[0m"
        else
          log_warn "Failed getting secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")"
        fi
      elif command -v wget > /dev/null
      then
        decoded=$(mktemp)
        errors=$(mktemp)
        if wget -T 5 -O "${decoded}" "$url" 2> "${errors}"
        then
          # shellcheck disable=SC2086
          export ${name}="$(cat ${decoded})"
          log_info "Successfully wget'd secret \\e[33;1m${name}\\e[0m"
        else
          log_warn "Failed getting secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")"
        fi
      elif command -v python3 > /dev/null
      then
        decoded=$(mktemp)
        errors=$(mktemp)
        # shellcheck disable=SC2086
        if python3 -c "import urllib.request ; urllib.request.urlretrieve(\"$url\",\"${decoded}\")" > "${errors}" 2>&1
        then
          export ${name}="$(cat ${decoded})"
          log_info "Successfully fetched secret \\e[33;1m${name}\\e[0m"
        else
          log_warn "Failed getting secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")"
        fi
      else
        log_warn "Couldn't get secret \\e[33;1m${name}\\e[0m: no http client found"
      fi
      ;;
    esac
  }

  function eval_all_secrets() {
    encoded_vars=$(env | grep -v '^scoped__' | awk -F '=' '/^[a-zA-Z0-9_]*=@(b64|hex|url)@/ {print $1}')
    for var in $encoded_vars
    do
      eval_secret "$var"
    done
  }

  function maybe_install_packages() {
    if command -v apt-get > /dev/null
    then
      # Debian
      if ! dpkg --status "$@" > /dev/null
      then
        apt-get update
        apt-get install --no-install-recommends --yes --quiet "$@"
      fi
    elif command -v apk > /dev/null
    then
      # Alpine
      if ! apk info --installed "$@" > /dev/null
      then
        apk add --no-cache "$@"
      fi
    else
      log_error "... didn't find any supported package manager to install $*"
      exit 1
    fi
  }

  function guess_build_system() {
    case "${PYTHON_BUILD_SYSTEM:-auto}" in
    auto)
      ;;
    poetry*|setuptools*|pipenv*|uv*)
      log_info "--- Build system explicitly declared: ${PYTHON_BUILD_SYSTEM}"
      return
      ;;
    reqfile)
      log_info "--- Build system explicitly declared: requirements file"
      return
      ;;
    *)
      log_warn "--- Unknown declared build system: \\e[33;1m${PYTHON_BUILD_SYSTEM}\\e[0m: please read template doc"
      ;;
    esac

    if [[ -f "${PYTHON_REQS_FILE}" ]]
    then
      log_info "--- Build system auto-detected: requirements file"
      export PYTHON_BUILD_SYSTEM="reqfile"
      return
    fi
  
    if [[ -f "uv.lock" ]]
    then
      if [[ -f "pyproject.toml" ]]
      then
        log_info "--- Build system auto-detected: uv (uv.lock and pyproject.toml)"
        export PYTHON_BUILD_SYSTEM="uv"
        return
      fi
      log_error "--- Build system auto-detected: uv (uv.lock) but no pyproject.toml found: please read template doc"
    fi

    if [[ -f "pyproject.toml" ]]
    then
      # that might be PEP 517 if a build-backend is specified
      # otherwise it might be only used as configuration file for development tools...
      build_backend=$(sed -rn 's/^build-backend *= *"([^"]*)".*/\1/p' pyproject.toml)
      case "$build_backend" in
      "")
        log_info "--- Build system auto-detection... pyproject.toml found but no 'build-backend' specified: continue..."
        ;;
      poetry.core.masonry.api)
        log_info "--- Build system auto-detected: PEP 517 with Poetry backend"
        export PYTHON_BUILD_SYSTEM="poetry"
        return
        ;;
      setuptools.build_meta)
        log_info "--- Build system auto-detected: PEP 517 with Setuptools backend"
        export PYTHON_BUILD_SYSTEM="setuptools"
        return
        ;;
      *)
        log_error "--- Build system auto-detected: PEP 517 with unsupported backend \\e[33;1m${build_backend}\\e[0m: please read template doc"
        exit 1
        ;;
      esac
    fi

    if [[ -f "setup.py" ]]
    then
      log_info "--- Build system auto-detected: Setuptools (legacy)"
      export PYTHON_BUILD_SYSTEM="setuptools"
    elif [[ -f "Pipfile" ]]
    then
      log_info "--- Build system auto-detected: Pipenv"
      export PYTHON_BUILD_SYSTEM="pipenv"
    else
      log_error "--- Build system auto-detect failed: please read template doc"
      exit 1
    fi
  }

  function maybe_install_poetry() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^poetry ]] && ! command -v poetry > /dev/null
    then
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} "$PYTHON_BUILD_SYSTEM"
    fi
  }
  function maybe_install_uv() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]] && ! command -v uv > /dev/null
    then
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} "$PYTHON_BUILD_SYSTEM"
    fi
  }

  # install requirements
  function install_requirements() {
    case "$PYTHON_BUILD_SYSTEM" in
    poetry*)
      if  [[ ! -f "poetry.lock" ]]; then
        log_warn "Using Poetry but \\e[33;1mpoetry.lock\\e[0m file not found: you shall commit it with your project files"
      fi
      maybe_install_poetry
      poetry install ${PYTHON_EXTRA_DEPS:+--extras "$PYTHON_EXTRA_DEPS"}
      ;;
    setuptools*)
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} "$PYTHON_BUILD_SYSTEM"
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} ".${PYTHON_EXTRA_DEPS:+[$PYTHON_EXTRA_DEPS]}"
      ;;
    pipenv*)
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} "$PYTHON_BUILD_SYSTEM"
      if  [[ ! -f "Pipfile.lock" ]]; then
        log_warn "Using Pipenv but \\e[33;1mPipfile.lock\\e[0m file not found: you shall commit it with your project files"
        pipenv install --dev --system
      else
        pipenv sync --dev --system
      fi
      ;;
    reqfile)
      if [[ -f "${PYTHON_REQS_FILE}" ]]; then
        log_info "--- installing main requirements from \\e[33;1m${PYTHON_REQS_FILE}\\e[0m"
        # shellcheck disable=SC2086
        pip install ${PIP_OPTS} -r "${PYTHON_REQS_FILE}"
        # shellcheck disable=SC2086
        found_reqs_files=$(eval ls -1 $PYTHON_EXTRA_REQS_FILES 2>/dev/null || echo "")
        # shellcheck disable=SC2116
        for extrareqsfile in $(echo "$found_reqs_files"); do
          log_info "--- installing extra requirements from \\e[33;1m${extrareqsfile}\\e[0m"
          # shellcheck disable=SC2086
          pip install ${PIP_OPTS} -r "${extrareqsfile}"
        done
      else
        log_warn "--- requirements build system defined, but no ${PYTHON_REQS_FILE} file found"
      fi
      ;;
    uv*)
      if  [[ ! -f "uv.lock" ]]; then
        log_warn "Using uv but \\e[33;1muv.lock\\e[0m file not found: you shall commit it with your project files"
      fi
      maybe_install_uv
      uv sync --frozen ${PYTHON_EXTRA_DEPS:+--extra "$PYTHON_EXTRA_DEPS"}
      ;;
    esac
  }

  function _run() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^poetry ]]
    then
      maybe_install_poetry
      poetry run "$@"
    elif [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]]
    then
      maybe_install_uv
      uv run "$@"
    else
      "$@"
    fi
  }

  function _python() {
    _run python "$@"
  }

  function _pip() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]]
    then
      maybe_install_uv
      # shellcheck disable=SC2086
      uv pip ${PIP_OPTS} "$@"
    else
      # shellcheck disable=SC2086
      _run pip ${PIP_OPTS} "$@"
    fi
  }

  function py_package() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^poetry ]]
    then
      maybe_install_poetry
      poetry build
    elif [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]]
    then
      maybe_install_uv
      uv build
    else
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} build
      python -m build
    fi
  }

  function configure_scm_auth() {
    git_base_url=$(echo "$CI_REPOSITORY_URL" | cut -d\@ -f2)
    if [[ -n "${GIT_USERNAME}" ]] && [[ -n "${GIT_PASSWORD}" ]]; then
      log_info "--- using https protocol with SCM credentials from env (\$GIT_USERNAME and \$GIT_PASSWORD)..."
      export git_auth_url="https://${GIT_USERNAME}:${GIT_PASSWORD}@${git_base_url}"
    elif [[ -n "${GIT_PRIVATE_KEY}" ]]; then
      log_info "--- using ssh protocol with SSH key from env (\$GIT_PRIVATE_KEY)..."
      mkdir -m 700 "${HOME}/.ssh"
      ssh-keyscan -H "${CI_SERVER_HOST}" >> ~/.ssh/known_hosts
      eval "$(ssh-agent -s)"
      # Handle file variable
      if [[ -f "${GIT_PRIVATE_KEY}" ]]; then
        tr -d '\r' < "${GIT_PRIVATE_KEY}" | ssh-add -
      else
        echo "${GIT_PRIVATE_KEY}" | tr -d '\r' | ssh-add -
      fi
      export git_auth_url="git@${git_base_url/\//:}"
    else
      log_error "--- Please specify either \$GIT_USERNAME and \$GIT_PASSWORD or \$GIT_PRIVATE_KEY variables to enable release (see doc)."
      exit 1
    fi
  }

  function py_release() {
    # 1: retrieve next release info from semantic-release
    if [ "$SEMREL_INFO_ON" ] && [ "$PYTHON_SEMREL_RELEASE_DISABLED" != "true" ]
    then
      if [ -z "$SEMREL_INFO_NEXT_VERSION" ]
      then
        log_info "[semantic-release] no new version to release: skip"
        exit 0
      else
        py_cur_version="$SEMREL_INFO_LAST_VERSION"
        py_next_version="$SEMREL_INFO_NEXT_VERSION"
        py_release_part="$SEMREL_INFO_NEXT_VERSION_TYPE"
        log_info "[semantic-release] new ($py_release_part) release required \\e[1;94m${py_cur_version}\\e[0m → \\e[1;94m${py_next_version}\\e[0m"
      fi
    fi

    # 2: bump-my-version (+ Git commit & tag)
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^poetry ]]
    then
      maybe_install_poetry
      if [[ -z "$py_next_version" ]]
      then
        py_cur_version=$(poetry version --short)
        py_next_version="$PYTHON_RELEASE_NEXT"
      fi
      log_info "[poetry] change version \\e[1;94m${py_cur_version}\\e[0m → \\e[1;94m${py_next_version}\\e[0m"
      poetry version ${TRACE+--verbose} "$py_next_version"
      # eval exact next version
      py_next_version=$(poetry version --short)
      # Git commit and tag
      git add pyproject.toml
      # emulate bump-my-version to generate commit message
      py_commit_message=$(python -c "print('$PYTHON_RELEASE_COMMIT_MESSAGE'.format(current_version='$py_cur_version', new_version='$py_next_version'))")
      git commit -m "$py_commit_message"
      git tag "$py_next_version"
    elif [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]]
    then
      maybe_install_uv
      if [[ -z "$py_next_version" ]]
      then
        # quick version waiting for uv to manage bump
        # related uv MR https://github.com/astral-sh/uv/pull/7248#issuecomment-2395465334 
        mkdir -p -m 777 tbc_tmp
        uvx --from toml-cli toml get --toml-path pyproject.toml project.version > tbc_tmp/version.txt
        py_cur_version=$(cat tbc_tmp/version.txt)

        py_release_part="$PYTHON_RELEASE_NEXT"
        log_info "[bump-my-version] increase \\e[1;94m${py_release_part}\\e[0m (from current \\e[1;94m${py_cur_version}\\e[0m)"
        uvx bump-my-version bump ${TRACE+--verbose} --current-version "$py_cur_version" "$py_release_part" tbc_tmp/version.txt
        py_next_version=$(cat tbc_tmp/version.txt)
        rm -fr tbc_tmp/version.txt
      fi

      log_info "[uv] change version \\e[1;94m${py_cur_version}\\e[0m → \\e[1;94m${py_next_version}\\e[0m"
      uvx --from toml-cli toml set --toml-path pyproject.toml project.version "$py_next_version"

      # Git commit and tag
      git add pyproject.toml
      # emulate bump-my-version to generate commit message
      py_commit_message=$(python -c "print('$PYTHON_RELEASE_COMMIT_MESSAGE'.format(current_version='$py_cur_version', new_version='$py_next_version'))")
      git commit -m "$py_commit_message"
      git tag --force "$py_next_version"
    else
      # Setuptools / bump-my-version
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} bump-my-version
      if [[ "$py_next_version" ]]
      then
        # explicit release version (semantic-release)
        log_info "[bumpversion] change version \\e[1;94m${py_cur_version}\\e[0m → \\e[1;94m${py_next_version}\\e[0m"
        # create cfg in case it doesn't exist - will be updated by bumpversion
        if [[ ! "$py_cur_version" && ! -f ".bumpversion.cfg" && ! -f ".bumpversion.toml" && ! -f "pyproject.toml" && ! -f "setup.cfg" ]]
        then
          log_error "Current version not defined and not version file found, set initial version at least in .bumpversion.toml or pyproject.toml"
        fi
        bump-my-version bump ${TRACE+--verbose} --current-version "${py_cur_version:-${PYTHON_RELEASE_START_VERSION:-0.0.0}}" --new-version "$py_next_version" --commit ${PYTHON_RELEASE_COMMIT_MESSAGE:+--message "$PYTHON_RELEASE_COMMIT_MESSAGE"} --tag --tag-name "{new_version}" "$py_release_part"
      elif [[ -f ".bumpversion.cfg" ]]
      then
        # current version shall be set in .bumpversion.cfg
        py_release_part="$PYTHON_RELEASE_NEXT"
        log_info "[bump-my-version bump] increase \\e[1;94m${py_release_part}\\e[0m"
        bump-my-version bump ${TRACE+--verbose} --commit ${PYTHON_RELEASE_COMMIT_MESSAGE:+--message "$PYTHON_RELEASE_COMMIT_MESSAGE"} --tag --tag-name "{new_version}" "$py_release_part"
      elif [[ -f "setup.py" ]]
      then
        # retrieve current version from setup.py
        py_cur_version=$(python setup.py --version)
        py_release_part="$PYTHON_RELEASE_NEXT"
        log_info "[bump-my-version] increase \\e[1;94m${py_release_part}\\e[0m (from current \\e[1;94m${py_cur_version}\\e[0m)"
        bump-my-version bump ${TRACE+--verbose} --current-version "$py_cur_version" --commit ${PYTHON_RELEASE_COMMIT_MESSAGE:+--message "$PYTHON_RELEASE_COMMIT_MESSAGE"} --tag --tag-name "{new_version}" "$py_release_part" setup.py
      else
        log_error "--- setup.py or .bumpversion.cfg file required to retrieve current version: cannot perform release"
        exit 1
      fi
    fi

    # 3: Git commit, tag and push
    log_info "--- git push commit and tag..."
    git push "$git_auth_url" "$CI_COMMIT_REF_NAME"
    git push "$git_auth_url" --tags
  }

  function py_publish() {
    if [[ "$PYTHON_BUILD_SYSTEM" =~ ^poetry ]]
    then
      maybe_install_poetry

      if [[ "$PYTHON_PACKAGE_ENABLED" != "true" ]]
      then
        log_info "--- build packages (poetry)..."
        poetry build ${TRACE+--verbose}
      fi

      log_info "--- publish packages (poetry) to $PYTHON_REPOSITORY_URL with user $PYTHON_REPOSITORY_USERNAME..."
      poetry config repositories.user_defined "$PYTHON_REPOSITORY_URL"
      poetry publish ${TRACE+--verbose} --username "$PYTHON_REPOSITORY_USERNAME" --password "$PYTHON_REPOSITORY_PASSWORD" --repository user_defined
    elif [[ "$PYTHON_BUILD_SYSTEM" =~ ^uv ]]
    then
      maybe_install_uv
  
      if [[ "$PYTHON_PACKAGE_ENABLED" != "true" ]]
      then
        log_info "--- build packages (uv)..."
        uv build ${TRACE+--verbose}
      fi

      log_info "--- publish packages (uv) to $PYTHON_REPOSITORY_URL with user $PYTHON_REPOSITORY_USERNAME..."
      uv publish ${TRACE+--verbose} --username "$PYTHON_REPOSITORY_USERNAME" --password "$PYTHON_REPOSITORY_PASSWORD" --publish-url "$PYTHON_REPOSITORY_URL"
    else
      # shellcheck disable=SC2086
      pip install ${PIP_OPTS} build twine

      if [[ "$PYTHON_PACKAGE_ENABLED" != "true" ]]
      then
        log_info "--- build packages (build)..."
        rm -rf dist
        python -m build
      fi

      log_info "--- publish packages (twine) to $PYTHON_REPOSITORY_URL with user $PYTHON_REPOSITORY_USERNAME..."
      twine upload ${TRACE+--verbose} --username "$PYTHON_REPOSITORY_USERNAME" --password "$PYTHON_REPOSITORY_PASSWORD" --repository-url "$PYTHON_REPOSITORY_URL" dist/*
    fi
  }

  function github_get_latest_version() {
    if command -v curl &> /dev/null
    then
      curl -sSf -I "https://github.com/$1/releases/latest" | awk -F '/' -v RS='\r\n' '/location:/ {print $NF}'
    elif command -v python3 &> /dev/null
    then
      python3 -c "import urllib.request;url='https://github.com/$1/releases/latest';opener=urllib.request.build_opener(type('NoRedirection', (urllib.request.HTTPErrorProcessor,), {'http_response': lambda self, req, resp: resp, 'https_response': lambda self, req, resp: resp})());req=urllib.request.Request(url, method='HEAD');print(opener.open(req).headers.get('Location').split('/')[-1])"
    else
      fail "curl or python3 required"
    fi
  }

  unscope_variables
  eval_all_secrets

  # ENDSCRIPT

###############################################################################################
#                                      stages definition                                      #
###############################################################################################
stages:
  - build
  - test
  - package-build
  - package-test
  - infra
  - deploy
  - acceptance
  - publish
  - infra-prod
  - production

###############################################################################################
#                                      Generic python jobs                                    #
###############################################################################################
.python-base:
  image: $PYTHON_IMAGE
  services:
    - name: "$TBC_TRACKING_IMAGE"
      command: ["--service", "python", "7.7.0"]
  variables:
    # set local cache dir; most Python tools honour XDG specs
    XDG_CACHE_HOME: "$CI_PROJECT_DIR/.cache"
    PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
    POETRY_CACHE_DIR: "$CI_PROJECT_DIR/.cache/poetry"
    PIPENV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pipenv"
    UV_CACHE_DIR: "$CI_PROJECT_DIR/.cache/uv"
    POETRY_VIRTUALENVS_IN_PROJECT: "false"
  cache:
    key: "$CI_COMMIT_REF_SLUG-python"
    when: always
    paths:
      - .cache
  before_script:
    - !reference [.python-scripts]
    - install_ca_certs "${CUSTOM_CA_CERTS:-$DEFAULT_CA_CERTS}"
    - cd ${PYTHON_PROJECT_DIR}
    - guess_build_system
    - mkdir -p -m 777 reports

.python-test:
  extends: .python-base
  stage: build
  coverage: /^TOTAL.+?(\d+(?:\.\d+)?\%)$/
  artifacts:
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    when: always
    reports:
      junit:
        - "$PYTHON_PROJECT_DIR/reports/TEST-*.xml"
      coverage_report:
        coverage_format: cobertura
        path: "$PYTHON_PROJECT_DIR/reports/py-coverage.cobertura.xml"
    paths:
      - "$PYTHON_PROJECT_DIR/reports/TEST-*.xml"
      - "$PYTHON_PROJECT_DIR/reports/py-coverage.*"

###############################################################################################
#                                      build stage                                             #
###############################################################################################
# build Python packages as artifacts
py-package:
  extends: .python-base
  stage: build
  script:
    - py_package
  artifacts:
    paths:
      - $PYTHON_PROJECT_DIR/dist/*
    expire_in: 1 day
  rules:
    - if: '$PYTHON_PACKAGE_ENABLED == "true"'

py-lint:
  extends: .python-base
  stage: build
  script:
    - install_requirements
    - _pip install pylint_gitlab # codeclimate reports
    # run pylint and generate reports all at once
    - _run pylint --output-format=colorized,pylint_gitlab.GitlabCodeClimateReporter:reports/py-lint.codeclimate.json,parseable:reports/py-lint.parseable.txt ${PYLINT_ARGS} ${PYLINT_FILES:-$(find -type f -name "*.py" -not -path "./.cache/*")}
  artifacts:
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    when: always
    reports:
      codequality: $PYTHON_PROJECT_DIR/reports/py-lint.codeclimate.json
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-lint.*"
  rules:
    # exclude if $PYLINT_ENABLED not set
    - if: '$PYLINT_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-compile:
  extends: .python-base
  stage: build
  script:
    - install_requirements
    - _python -m compileall $PYTHON_COMPILE_ARGS
  rules:
    # skip when one of unit test framework is enabled
    - if: '$UNITTEST_ENABLED == "true" || $PYTEST_ENABLED == "true" || $NOSETESTS_ENABLED == "true"'
      when: never
    - !reference [.test-policy, rules]

py-black:
  extends: .python-base
  stage: build
  script:
    - install_requirements
    - _pip install black
    - _run black . --check --extend-exclude '(\/\.cache\/|\/\.venv\/)'
  rules:
    # exclude if $PYTHON_BLACK_ENABLED not set
    - if: '$PYTHON_BLACK_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-isort:
  extends: .python-base
  stage: build
  script:
    - install_requirements
    - _pip install isort
    - _run isort . --check-only --extend-skip .cache --extend-skip .venv
  rules:
    # exclude if $PYTHON_ISORT_ENABLED not set
    - if: '$PYTHON_ISORT_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-ruff:
  extends: .python-base
  stage: build
  script:
    - |
      if [[ ${BANDIT_ENABLED} == "true" || ${PYLINT_ENABLED} == "true" || ${PYTHON_ISORT_ENABLED} == "true" ]]; then
        log_warn "Ruff can replace isort, Bandit, Pylint"
      fi
    # Ruff is self dependent tool (written in Rust), it can be installed without project dependencies (_pip and _run don't look required here)
    - pip install ${PIP_OPTS} ruff
    # JSON output (for SonarQube)
    - |
      if [[ "$SONAR_HOST_URL" ]]
      then
        ruff check . ${RUFF_ARGS} --extend-exclude .venv,.cache --exit-zero --output-format json --output-file reports/py-ruff.native.json
      fi
    # then GitLab and grouped/console formats
    - ruff check . ${RUFF_ARGS} --extend-exclude .venv,.cache --output-format gitlab --output-file reports/py-ruff.gitlab.json || ruff check . ${RUFF_ARGS} --extend-exclude .venv,.cache --output-format grouped
  artifacts:
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    when: always
    reports:
      codequality: $PYTHON_PROJECT_DIR/reports/py-ruff.gitlab.json
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-ruff.*"
  rules:
    # exclude if $RUFF_ENABLED not set
    - if: '$RUFF_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-ruff-format:
  extends: .python-base
  stage: build
  script:
    - |
      if [[ ${PYTHON_BLACK_ENABLED} == "true" ]]; then
        log_warn "Ruff can replace Black"
      fi
    # Ruff is self dependent tool (written in Rust), it can be installed without project dependencies (_pip and _run don't look required here)
    - pip install ${PIP_OPTS} ruff
    - ruff format --check . --exclude .venv,.cache
  rules:
    # exclude if $RUFF_FORMAT_ENABLED not set
    - if: '$RUFF_FORMAT_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-mypy:
  extends: .python-base
  stage: build
  variables:
    MYPY_CACHE_DIR: "$CI_PROJECT_DIR/.cache/mypy"
  script:
    - install_requirements
    - _pip install mypy mypy-to-codeclimate
    - _run mypy ${MYPY_ARGS} ${MYPY_FILES:-$(find -type f -name "*.py" -not -path "./.cache/*" -not -path "./.venv/*")} | tee reports/py-mypy.console.txt || true
    # mypy-to-codeclimate will fail if any error was found
    - _run mypy-to-codeclimate reports/py-mypy.console.txt reports/py-mypy.codeclimate.json
  artifacts:
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    when: always
    reports:
      codequality: $PYTHON_PROJECT_DIR/reports/py-mypy.codeclimate.json
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-mypy.*"
  rules:
    # exclude if $MYPY_ENABLED not set
    - if: '$MYPY_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

###############################################################################################
#                                      test stage                                             #
###############################################################################################
py-unittest:
  extends: .python-test
  script:
    - install_requirements
    # code coverage
    - _pip install coverage
    # JUnit XML report
    - _pip install unittest-xml-reporting
    - _run coverage run -m xmlrunner discover -o "reports/" $UNITTEST_ARGS
    - _run coverage report -m
    - _run coverage xml -o "reports/py-coverage.cobertura.xml"
  rules:
    # skip if $UNITTEST_ENABLED not set
    - if: '$UNITTEST_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-pytest:
  extends: .python-test
  script:
    - install_requirements
    - _pip install pytest pytest-cov coverage
    - _python -m pytest --junit-xml=reports/TEST-pytests.xml --cov --cov-report term  --cov-report xml:reports/py-coverage.cobertura.xml ${PYTEST_ARGS}
  rules:
    # skip if $PYTEST_ENABLED not set
    - if: '$PYTEST_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

py-nosetests:
  extends: .python-test
  script:
    - install_requirements
    - _run nosetests --with-xunit --xunit-file=reports/TEST-nosetests.xml --with-coverage --cover-erase --cover-xml --cover-xml-file=reports/py-coverage.cobertura.xml ${NOSETESTS_ARGS}
  rules:
    # skip if $NOSETESTS_ENABLED not set
    - if: '$NOSETESTS_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

# Bandit (SAST)
py-bandit:
  extends: .python-base
  stage: test
  # force no dependencies
  dependencies: []
  script:
    - install_requirements
    - _pip install bandit
    # CSV (for SonarQube)
    - |
      if [[ "$SONAR_HOST_URL" ]]
      then
        _run bandit ${TRACE+--verbose} --exit-zero --exclude ./.cache,./.venv --format csv --output reports/py-bandit.bandit.csv ${BANDIT_ARGS}
      fi
    # JSON (for DefectDojo)
    - |
      if [[ "$DEFECTDOJO_BANDIT_REPORTS" ]]
      then
        _run bandit ${TRACE+--verbose} --exit-zero --exclude ./.cache,./.venv --format json --output reports/py-bandit.bandit.json ${BANDIT_ARGS}
      fi
    - _run bandit ${TRACE+--verbose} --exclude ./.cache,./.venv ${BANDIT_ARGS}
  artifacts:
    when: always
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    access: developer
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-bandit.*"
  rules:
    # exclude if $BANDIT_ENABLED not set
    - if: '$BANDIT_ENABLED != "true"'
      when: never
    - !reference [.test-policy, rules]

# Trivy (dependency check)
py-trivy:
  extends: .python-base
  stage: test
  # force no dependencies
  dependencies: []
  script:
    - |
      if [[ -z "$PYTHON_TRIVY_DIST_URL" ]]
      then
        log_info "Trivy version unset: retrieve latest version..."
        trivy_version=$(github_get_latest_version aquasecurity/trivy)
        PYTHON_TRIVY_DIST_URL="https://github.com/aquasecurity/trivy/releases/download/${trivy_version}/trivy_${trivy_version:1}_Linux-64bit.tar.gz"
        log_info "... use latest Trivy version: \\e[32m$PYTHON_TRIVY_DIST_URL\\e[0m"
      fi
      python_trivy="$XDG_CACHE_HOME/trivy-$(echo "$PYTHON_TRIVY_DIST_URL" | md5sum | cut -d" " -f1)"
      if [[ -f $python_trivy ]]
      then
        log_info "Trivy found in cache (\\e[32m$PYTHON_TRIVY_DIST_URL\\e[0m): reuse"
      else
        log_info "Trivy not found in cache (\\e[32m$PYTHON_TRIVY_DIST_URL\\e[0m): download"
        python3 -c 'import urllib.request;urllib.request.urlretrieve("'$PYTHON_TRIVY_DIST_URL'","trivy.tar.gz")'
        tar zxf trivy.tar.gz trivy
        mkdir -p $XDG_CACHE_HOME
        mv ./trivy $python_trivy
      fi
    - |
      case "$PYTHON_BUILD_SYSTEM" in
        poetry*|pipenv*)
          log_info "$PYTHON_BUILD_SYSTEM build system (\\e[32muse lock file\\e[0m)"
          cp poetry.lock Pipfile.lock ./reports 2>/dev/null || true
          ;;
        uv*)
          log_info "$PYTHON_BUILD_SYSTEM build system used (\\e[32mmust generate pinned requirements.txt from uv.lock\\e[0m)"
          maybe_install_uv
          uv export > ./reports/requirements.txt
          ;;
        *)
          log_info "$PYTHON_BUILD_SYSTEM build system used (\\e[32mmust generate pinned requirements.txt\\e[0m)"
          install_requirements
          _pip freeze | tee ./reports/requirements.txt
          ;;
      esac
      if [[ -f "./requirements.txt" ]]
      then
        sort -u ./requirements.txt | grep -v "^[  ]*$" > ./requirements.txt.sorted
        sort -u ./reports/requirements.txt | grep -v "^[  ]*$" > ./requirements.txt.generated.sorted
        if [[ $(diff ./requirements.txt.sorted ./requirements.txt.generated.sorted) ]]
        then
          log_warn "The ./requirements.txt file does not match the ./reports/requirements.txt file generated via pip freeze. Make sure to include all dependencies with pinned versions in ./requirements.txt and re-commit the file."
        fi
      fi

      # Generate the native JSON report that can later be converted to other formats
      $python_trivy fs ${PYTHON_TRIVY_ARGS} --format json --list-all-pkgs --output reports/py-trivy.trivy.json --exit-code 1 ./reports/ > ./reports/trivy.log 2>&1 || exit_code=$?
      cat ./reports/trivy.log
      if [ $(grep -ic "Number of language-specific files[^0-9]*0$" ./reports/trivy.log) -eq 1 ]; then
        log_error "Could not find a file listing all dependencies with their versions."
        exit 1
      fi
      rm ./reports/trivy.log
      # console output
      $python_trivy convert --format table reports/py-trivy.trivy.json
      exit $exit_code
  artifacts:
    name: "$CI_JOB_NAME artifacts from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 day
    access: developer
    when: always
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-trivy.*"
      - "$PYTHON_PROJECT_DIR/reports/requirements.txt"
  rules:
    # exclude if $PYTHON_TRIVY_ENABLED not set
    - if: '$PYTHON_TRIVY_DISABLED == "true"'
      when: never
    - !reference [.test-policy, rules]

py-sbom:
  extends: .python-base
  stage: test
  # force no dependency
  dependencies: []
  needs: []
  script:
    - |
      case "$PYTHON_BUILD_SYSTEM" in
        poetry*|pipenv*)
          log_info "$PYTHON_BUILD_SYSTEM build system (\\e[32muse lock file\\e[0m)"
          ;;
        uv*)
          log_info "$PYTHON_BUILD_SYSTEM build system used (\\e[32mmust generate pinned requirements.txt from uv.lock\\e[0m)"
          maybe_install_uv
          uv export > ./reports/requirements.txt
          ;;
        *)
          log_info "$PYTHON_BUILD_SYSTEM build system used (\\e[32mmust generate pinned requirements.txt\\e[0m)"
          install_requirements
          _pip freeze > "${PYTHON_REQS_FILE}"
          ;;
      esac
    - |
      if [[ -z "$PYTHON_SBOM_SYFT_URL" ]]
      then
        log_info "Syft version unset: retrieve latest version..."
        syft_version=$(github_get_latest_version anchore/syft)
        PYTHON_SBOM_SYFT_URL="https://github.com/anchore/syft/releases/download/${syft_version}/syft_${syft_version:1}_linux_amd64.tar.gz"
        log_info "... use latest Syft version: \\e[32m$PYTHON_SBOM_SYFT_URL\\e[0m"
      fi
      python_sbom_syft="$PIP_CACHE_DIR/syft-$(echo "$PYTHON_SBOM_SYFT_URL" | md5sum | cut -d" " -f1)"
      if [ ! -f $python_sbom_syft ]; then
        python3 -c 'import urllib.request;urllib.request.urlretrieve("'$PYTHON_SBOM_SYFT_URL'","syft.tar.gz")'
        tar zxf syft.tar.gz syft
        mkdir -p $PIP_CACHE_DIR
        mv ./syft $python_sbom_syft
      fi
    - $python_sbom_syft dir:. --source-name $PYTHON_SBOM_NAME $PYTHON_SBOM_OPTS -o cyclonedx-json > reports/py-sbom.cyclonedx.json
    - chmod a+r reports/py-sbom.cyclonedx.json
  artifacts:
    name: "Python SBOM from $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG"
    expire_in: 1 week
    when: always
    paths:
      - "$PYTHON_PROJECT_DIR/reports/py-sbom.cyclonedx.json"
    reports:
      cyclonedx:
        - "$PYTHON_PROJECT_DIR/reports/py-sbom.cyclonedx.json"
  rules:
    # exclude if disabled
    - if: '$PYTHON_SBOM_DISABLED == "true"'
      when: never
    - !reference [.test-policy, rules]

# (manual from master branch): triggers a release (tag creation)
py-release:
  extends: .python-base
  stage: publish
  before_script:
    - !reference [.python-base, before_script]
    # install git and OpenSSH
    - maybe_install_packages git openssh-client
  script:
    - git config --global user.email "$GITLAB_USER_EMAIL"
    - git config --global user.name "$GITLAB_USER_LOGIN"
    - git checkout -B $CI_COMMIT_REF_NAME
    - configure_scm_auth
    - py_release
  artifacts:
    paths:
      - $PYTHON_PROJECT_DIR/dist/*
  rules:
    # exclude if $PYTHON_RELEASE_ENABLED not set
    - if: '$PYTHON_RELEASE_ENABLED != "true"'
      when: never
    # on production branch: auto if $PYTHON_AUTO_RELEASE_ENABLED set and implicitly $PYTHON_RELEASE_ENABLED set
    - if: '$PYTHON_AUTO_RELEASE_ENABLED == "true" && $CI_COMMIT_REF_NAME =~ $PROD_REF'
    # on production or integration branch: manual, non blocking
    - if: '$CI_COMMIT_REF_NAME =~ $PROD_REF || $CI_COMMIT_REF_NAME =~ $INTEG_REF'
      when: manual
      allow_failure: true

# (auto from release tag): publishes the Python package(s) to a PyPi registry
py-publish:
  extends: .python-base
  stage: publish
  before_script:
    - !reference [.python-base, before_script]
    # install curl (to decode @url@ variables)
    - maybe_install_packages curl
  script:
    - py_publish
  artifacts:
    paths:
      - $PYTHON_PROJECT_DIR/dist/*
  rules:
    # exclude if $PYTHON_RELEASE_ENABLED not set
    - if: '$PYTHON_RELEASE_ENABLED != "true" && $PYTHON_PUBLISH_ENABLED != "true"'
      when: never
    # on tag with release pattern: auto
    - if: '$CI_COMMIT_TAG =~ $RELEASE_REF'