# ========================================================================================= # Copyright (C) 2024 Pierre Smeyers and contributors # # This program is free software; you can redistribute it and/or modify it under the terms # of the GNU Lesser General Public License as published by the Free Software Foundation; # either version 3 of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along with this # program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth # Floor, Boston, MA 02110-1301, USA. # ========================================================================================= spec: inputs: image: description: The Docker image used to run Docker Compose CLI commands - **set the version required by your Docker Compose cluster** default: registry.hub.docker.com/library/docker:latest cmd: description: "The docker compose or stack command (empty means _auto_)" default: '' options: - '' - docker compose - docker-compose - docker stack base-app-name: description: Base application name default: $CI_PROJECT_NAME environment-url: description: |- The default environments url _(only define for static environment URLs declaration)_ _supports late variable expansion (ex: `https://%{environment_name}.dcmp.acme.com`)_ default: '' scripts-dir: description: Directory where Compose files, dotenv files and hook scripts are located default: . config-opts: description: "[`compose config` options](https://docs.docker.com/reference/cli/docker/compose/config/#options)" default: '--quiet' stack-config-opts: description: "[`stack config` options](https://docs.docker.com/reference/cli/docker/stack/config/)" default: '' stack-config-silent: description: Silences standard output of `stack config` command default: "true" type: boolean config-disabled: description: Disable Compose Config type: boolean default: false up-opts: description: "[`compose up` options](https://docs.docker.com/reference/cli/docker/compose/up/#options) (only when using Docker Compose)" default: "--no-build --remove-orphans --wait --wait-timeout 180" down-opts: description: "[`compose down` options](https://docs.docker.com/reference/cli/docker/compose/down/#options) (only when using Docker Compose)" default: "--volumes --remove-orphans --rmi all" stack-deploy-opts: description: "[`stack deploy` options](https://docs.docker.com/reference/cli/docker/stack/deploy/) (only when using Docker Stack)" default: "--prune" ssh-known-hosts: description: SSH `known_hosts` (file or text variable) default: '' review-docker-host: description: "Docker Host for `review` env (ex: `ssh://docker@docker-host-for-review:2375`)" default: '' review-app-name: description: The application name for `review` env (only define to override default) default: '' review-autostop-duration: description: The amount of time before GitLab will automatically stop `review` environments default: 4 hours review-environment-url: description: The `review` environments url _(only define for static environment URLs declaration and if different from default)_ default: '' integ-docker-host: description: "Docker Host for `integration` env (ex: `ssh://docker@docker-host-for-integ:2375`)" default: '' integ-app-name: description: The application name for `integration` env (only define to override default) default: '' integ-environment-url: description: The `integration` environment url _(only define for static environment URLs declaration and if different from default)_ default: '' staging-docker-host: description: "Docker Host for `staging` env (ex: `ssh://docker@docker-host-for-staging:2375`)" default: '' staging-app-name: description: The application name for `staging` env (only define to override default) default: '' staging-environment-url: description: The `staging` environment url _(only define for static environment URLs declaration and if different from default)_ default: '' prod-docker-host: description: "Docker Host for `production` env (ex: `ssh://docker@docker-host-for-prod:2375`)" default: '' prod-app-name: description: The application name for `production` env (only define to override default) default: '' prod-environment-url: description: The `production` environment url _(only define for static environment URLs declaration and if different from default)_ default: '' prod-deploy-strategy: description: Defines the deployment to `production` strategy. options: - manual - auto default: manual --- # default workflow rules: Merge Request pipelines workflow: rules: # prevent MR pipeline originating from production or integration branch(es) - if: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ $PROD_REF || $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ $INTEG_REF' when: never # on non-prod, non-integration branches: prefer MR pipeline over branch pipeline - if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS && $CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*tag(,[^],]*)*\]/" && $CI_COMMIT_TAG' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*branch(,[^],]*)*\]/" && $CI_COMMIT_BRANCH' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*mr(,[^],]*)*\]/" && $CI_MERGE_REQUEST_ID' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*default(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $CI_DEFAULT_BRANCH' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*prod(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $PROD_REF' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*integ(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME =~ $INTEG_REF' when: never - if: '$CI_COMMIT_MESSAGE =~ "/\[(ci skip|skip ci) on ([^],]*,)*dev(,[^],]*)*\]/" && $CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF' when: never - when: always # test job prototype: implement adaptive pipeline rules .test-policy: rules: # on tag: auto & failing - if: $CI_COMMIT_TAG # on ADAPTIVE_PIPELINE_DISABLED: auto & failing - if: '$ADAPTIVE_PIPELINE_DISABLED == "true"' # on production or integration branch(es): auto & failing - if: '$CI_COMMIT_REF_NAME =~ $PROD_REF || $CI_COMMIT_REF_NAME =~ $INTEG_REF' # early stage (dev branch, no MR): manual & non-failing - if: '$CI_MERGE_REQUEST_ID == null && $CI_OPEN_MERGE_REQUESTS == null' when: manual allow_failure: true # Draft MR: auto & non-failing - if: '$CI_MERGE_REQUEST_TITLE =~ /^Draft:.*/' allow_failure: true # else (Ready MR): auto & failing - when: on_success variables: # variabilized tracking image TBC_TRACKING_IMAGE: registry.gitlab.com/to-be-continuous/tools/tracking:master # Default Docker image (use a public image - can be overridden) DCMP_IMAGE: $[[ inputs.image ]] DCMP_CMD: $[[ inputs.cmd ]] DCMP_BASE_APP_NAME: $[[ inputs.base-app-name ]] DCMP_ENVIRONMENT_URL: $[[ inputs.environment-url ]] DCMP_SCRIPTS_DIR: $[[ inputs.scripts-dir ]] DCMP_SSH_KNOWN_HOSTS: $[[ inputs.ssh-known-hosts ]] DCMP_CONFIG_OPTS: $[[ inputs.config-opts ]] DCMP_STACK_CONFIG_OPTS: $[[ inputs.stack-config-opts ]] DCMP_STACK_CONFIG_SILENT: $[[ inputs.stack-config-silent ]] DCMP_CONFIG_DISABLED: $[[ inputs.config-disabled ]] DCMP_UP_OPTS: $[[ inputs.up-opts ]] DCMP_DOWN_OPTS: $[[ inputs.down-opts ]] DCMP_STACK_DEPLOY_OPTS: $[[ inputs.stack-deploy-opts ]] DCMP_REVIEW_DOCKER_HOST: $[[ inputs.review-docker-host ]] DCMP_REVIEW_APP_NAME: $[[ inputs.review-app-name ]] DCMP_REVIEW_ENVIRONMENT_URL: $[[ inputs.review-environment-url ]] DCMP_REVIEW_AUTOSTOP_DURATION: $[[ inputs.review-autostop-duration ]] DCMP_INTEG_DOCKER_HOST: $[[ inputs.integ-docker-host ]] DCMP_INTEG_APP_NAME: $[[ inputs.integ-app-name ]] DCMP_INTEG_ENVIRONMENT_URL: $[[ inputs.integ-environment-url ]] DCMP_STAGING_DOCKER_HOST: $[[ inputs.staging-docker-host ]] DCMP_STAGING_APP_NAME: $[[ inputs.staging-app-name ]] DCMP_STAGING_ENVIRONMENT_URL: $[[ inputs.staging-environment-url ]] DCMP_PROD_DOCKER_HOST: $[[ inputs.prod-docker-host ]] DCMP_PROD_APP_NAME: $[[ inputs.prod-app-name ]] DCMP_PROD_ENVIRONMENT_URL: $[[ inputs.prod-environment-url ]] DCMP_PROD_DEPLOY_STRATEGY: $[[ inputs.prod-deploy-strategy ]] # default production ref name (pattern) PROD_REF: '/^(master|main)$/' # default integration ref name (pattern) INTEG_REF: '/^develop$/' stages: - build - test - package-build - package-test - infra - deploy - acceptance - publish - infra-prod - production .compose-scripts: &compose-scripts | # BEGSCRIPT set -e function log_info() { >&2 echo -e "[\\e[1;94mINFO\\e[0m] $*" } function log_warn() { >&2 echo -e "[\\e[1;93mWARN\\e[0m] $*" } function log_error() { >&2 echo -e "[\\e[1;91mERROR\\e[0m] $*" } function fail() { log_error "$*" exit 1 } function assert_defined() { if [[ -z "$1" ]] then log_error "$2" exit 1 fi } function install_ca_certs() { certs=$1 if [[ -z "$certs" ]] then return fi # List of typical bundles bundles="/etc/ssl/certs/ca-certificates.crt" # Debian/Ubuntu/Gentoo etc. bundles="${bundles} /etc/ssl/cert.pem" # Alpine Linux bundles="${bundles} /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem" # CentOS/RHEL 7 bundles="${bundles} /etc/pki/tls/certs/ca-bundle.crt" # Fedora/RHEL 6 bundles="${bundles} /etc/ssl/ca-bundle.pem" # OpenSUSE bundles="${bundles} /etc/pki/tls/cacert.pem" # OpenELEC # Try to find the right bundle to update it with custom CA certificates for bundle in ${bundles} do # import if bundle exists if [[ -f "${bundle}" ]] then # Import certificates in bundle echo "${certs}" | tr -d '\r' >> "${bundle}" log_info "Custom CA certificates imported in \\e[33;1m${bundle}\\e[0m" ca_imported=1 break fi done if [[ -z "$ca_imported" ]] then log_warn "Could not import custom CA certificates !" fi } function unscope_variables() { _scoped_vars=$(env | awk -F '=' "/^scoped__[a-zA-Z0-9_]+=/ {print \$1}" | sort) if [[ -z "$_scoped_vars" ]]; then return; fi log_info "Processing scoped variables..." for _scoped_var in $_scoped_vars do _fields=${_scoped_var//__/:} _condition=$(echo "$_fields" | cut -d: -f3) case "$_condition" in if) _not="";; ifnot) _not=1;; *) log_warn "... unrecognized condition \\e[1;91m$_condition\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac _target_var=$(echo "$_fields" | cut -d: -f2) _cond_var=$(echo "$_fields" | cut -d: -f4) _cond_val=$(eval echo "\$${_cond_var}") _test_op=$(echo "$_fields" | cut -d: -f5) case "$_test_op" in defined) if [[ -z "$_not" ]] && [[ -z "$_cond_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" ]]; then continue; fi ;; equals|startswith|endswith|contains|in|equals_ic|startswith_ic|endswith_ic|contains_ic|in_ic) # comparison operator # sluggify actual value _cond_val=$(echo "$_cond_val" | tr '[:punct:]' '_') # retrieve comparison value _cmp_val_prefix="scoped__${_target_var}__${_condition}__${_cond_var}__${_test_op}__" _cmp_val=${_scoped_var#"$_cmp_val_prefix"} # manage 'ignore case' if [[ "$_test_op" == *_ic ]] then # lowercase everything _cond_val=$(echo "$_cond_val" | tr '[:upper:]' '[:lower:]') _cmp_val=$(echo "$_cmp_val" | tr '[:upper:]' '[:lower:]') fi case "$_test_op" in equals*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val" ]]; then continue; fi ;; startswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != "$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == "$_cmp_val"* ]]; then continue; fi ;; endswith*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val" ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val" ]]; then continue; fi ;; contains*) if [[ -z "$_not" ]] && [[ "$_cond_val" != *"$_cmp_val"* ]]; then continue; elif [[ "$_not" ]] && [[ "$_cond_val" == *"$_cmp_val"* ]]; then continue; fi ;; in*) if [[ -z "$_not" ]] && [[ "__${_cmp_val}__" != *"__${_cond_val}__"* ]]; then continue; elif [[ "$_not" ]] && [[ "__${_cmp_val}__" == *"__${_cond_val}__"* ]]; then continue; fi ;; esac ;; *) log_warn "... unrecognized test operator \\e[1;91m${_test_op}\\e[0m in \\e[33;1m${_scoped_var}\\e[0m" continue ;; esac # matches _val=$(eval echo "\$${_target_var}") log_info "... apply \\e[32m${_target_var}\\e[0m from \\e[32m\$${_scoped_var}\\e[0m${_val:+ (\\e[33;1moverwrite\\e[0m)}" _val=$(eval echo "\$${_scoped_var}") export "${_target_var}"="${_val}" done log_info "... done" } # evaluate and export a secret # - $1: secret variable name function eval_secret() { name=$1 value=$(eval echo "\$${name}") case "$value" in @b64@*) decoded=$(mktemp) errors=$(mktemp) if echo "$value" | cut -c6- | base64 -d > "${decoded}" 2> "${errors}" then # shellcheck disable=SC2086 export ${name}="$(cat ${decoded})" log_info "Successfully decoded base64 secret \\e[33;1m${name}\\e[0m" else fail "Failed decoding base64 secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")" fi ;; @hex@*) decoded=$(mktemp) errors=$(mktemp) if echo "$value" | cut -c6- | sed 's/\([0-9A-F]\{2\}\)/\\\\x\1/gI' | xargs printf > "${decoded}" 2> "${errors}" then # shellcheck disable=SC2086 export ${name}="$(cat ${decoded})" log_info "Successfully decoded hexadecimal secret \\e[33;1m${name}\\e[0m" else fail "Failed decoding hexadecimal secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")" fi ;; @url@*) url=$(echo "$value" | cut -c6-) if command -v curl > /dev/null then decoded=$(mktemp) errors=$(mktemp) if curl -s -S -f --connect-timeout 5 -o "${decoded}" "$url" 2> "${errors}" then # shellcheck disable=SC2086 export ${name}="$(cat ${decoded})" log_info "Successfully curl'd secret \\e[33;1m${name}\\e[0m" else log_warn "Failed getting secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")" fi elif command -v wget > /dev/null then decoded=$(mktemp) errors=$(mktemp) if wget -T 5 -O "${decoded}" "$url" 2> "${errors}" then # shellcheck disable=SC2086 export ${name}="$(cat ${decoded})" log_info "Successfully wget'd secret \\e[33;1m${name}\\e[0m" else log_warn "Failed getting secret \\e[33;1m${name}\\e[0m:\\n$(sed 's/^/... /g' "${errors}")" fi else fail "Couldn't get secret \\e[33;1m${name}\\e[0m: no http client found" fi ;; esac } function eval_all_secrets() { encoded_vars=$(env | grep -v '^scoped__' | awk -F '=' '/^[a-zA-Z0-9_]*=@(b64|hex|url)@/ {print $1}') for var in $encoded_vars do eval_secret "$var" done } function exec_hook() { if [[ ! -x "$1" ]] && ! chmod +x "$1" then log_warn "... could not make \\e[33;1m${1}\\e[0m executable: please do it (chmod +x)" # fallback technique sh "$1" else "$1" fi } # Converts a string to SCREAMING_SNAKE_CASE function to_ssc() { echo "$1" | tr '[:lower:]' '[:upper:]' | tr '[:punct:]' '_' } function awkenvsubst() { # escapes '&' char in variables for gsub awk '{while(match($0,"[$%]{[^}]*}")) {var=substr($0,RSTART+2,RLENGTH-3);val=ENVIRON[var];gsub("&","\\\\&",val);gsub("[$%]{"var"}",val)}}1' } function configure_network() { # maybe install .netrc if [[ -f ".netrc" ]] then log_info "--- \\e[32m.netrc\\e[0m file found: envsubst and install" awkenvsubst < .netrc > ~/.netrc chmod 0600 ~/.netrc fi dh_proto=${DOCKER_HOST%%:*} if [[ "$dh_proto" == "ssh" ]] then # setup SSH log_info "--- SSH Docker Host detected: configure SSH..." # 1: maybe install SSH client if ! command -v ssh-agent > /dev/null then log_info "... install SSH client" apk add --no-cache openssh-client fi # 2: maybe setup known hosts if [[ "$DCMP_SSH_KNOWN_HOSTS" ]] then mkdir -m 700 ~/.ssh if [[ -f "$DCMP_SSH_KNOWN_HOSTS" ]] then log_info "... add SSH known hosts (file)" cp -f "$DCMP_SSH_KNOWN_HOSTS" ~/.ssh/known_hosts else log_info "... add SSH known hosts (text)" echo "$DCMP_SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts fi chmod 644 ~/.ssh/known_hosts fi # start SSH agent eval "$(ssh-agent -s)" # 3: maybe detect and add SSH private key (file or PEM content) ssh_priv_key=${ENV_SSH_PRIVATE_KEY:-$DCMP_SSH_PRIVATE_KEY} if [[ -f "$ssh_priv_key" ]] then log_info "... add SSH private key (file)" tr -d '\r' < "$ssh_priv_key" | ssh-add - elif [[ "$ssh_priv_key" ]] then log_info "... add SSH private key (text)" echo "$ssh_priv_key" | tr -d '\r' | ssh-add - else log_warn "No SSH private key found in configuration" fi fi } function configure_registries_auth() { if [[ -f ".docker/config.json" ]] then log_info "--- \\e[32m.docker/config.json\\e[0m file found: envsubst and install" mkdir -p ~/.docker # special variable supported TBC_CI_REGISTRY_TOKEN=$(echo -n "$CI_REGISTRY_USER:$CI_REGISTRY_PASSWORD" | base64 | tr -d '\n') export TBC_CI_REGISTRY_TOKEN awkenvsubst < .docker/config.json > ~/.docker/config.json else log_info "--- \\e[32m.docker/config.json\\e[0m file not found: looking for TBC built images..." _image_vars=$(env | awk -F '=' "/^[A-Z]+_(SNAPSHOT|RELEASE)_IMAGE=/ {print \$1}" | sort | uniq) if [[ -z "$_image_vars" ]] then log_info "... no TBC image detected: leave unconfigured Docker authentication" else # init Docker config JSON _docker_cfg_json="{\"auths\":{" _registry_hosts="," for _image_var in $_image_vars do _image_url=$(eval echo "\$${_image_var}") _registry_host=$(echo "$_image_url" | cut -d/ -f1) if [[ "$_registry_host" ]] then log_info "... TBC image detected: \\e[33;1m${_image_var}\\e[0m=\\e[33;1m${_image_url}\\e[0m..." if expr "${_registry_hosts}" : ".*,${_registry_host},.*" >/dev/null then log_info "... host \\e[33;1m${_registry_host}\\e[0m already configured: skip" else log_info "... add host \\e[33;1m${_registry_host}\\e[0m authentication" _prefix=$(echo "$_image_var" | cut -d_ -f1) _kind=$(echo "$_image_var" | cut -d_ -f2) _specific_user=$(eval echo "\$${_prefix}_REGISTRY_\$${_kind}_USER") _default_user=$(eval echo "\$${_prefix}_REGISTRY_USER") _specific_password=$(eval echo "\$${_prefix}_REGISTRY_\$${_kind}_PASSWORD") _default_password=$(eval echo "\$${_prefix}_REGISTRY_PASSWORD") _authent_token=$(echo -n "${_specific_user:-${_default_user:-$CI_REGISTRY_USER}}:${_specific_password:-${_default_password:-$CI_REGISTRY_PASSWORD}}" | base64 | tr -d '\n') if [[ "$_registry_hosts" != "," ]] then _docker_cfg_json="${_docker_cfg_json}," fi _docker_cfg_json="${_docker_cfg_json}\"$_registry_host\":{\"auth\":\"$_authent_token\"}" _registry_hosts="$_registry_hosts$_registry_host," fi fi done # end Docker config JSON _docker_cfg_json="${_docker_cfg_json}}}" mkdir -p ~/.docker echo "$_docker_cfg_json" > ~/.docker/config.json fi fi } function find_first() { for file in "$@"; do if [[ -f "$file" ]]; then echo "$file" break fi done } # initialize context and authentication (SSH) function compose_init() { export environment_type=$ENV_TYPE export environment_name=${ENV_APP_NAME:-${DCMP_BASE_APP_NAME}${ENV_APP_SUFFIX}} # also export environment_name in SCREAMING_SNAKE_CASE format (may be useful with Kubernetes env variables) environment_name_ssc=$(to_ssc "$environment_name") export environment_name_ssc # auto-detect command if [[ -z "$DCMP_CMD" ]] then if command -v docker-compose > /dev/null then log_info "... \\e[33;1mdocker-compose\\e[0m command found: will be used" DCMP_CMD=docker-compose else log_info "... \\e[33;1mdocker-compose\\e[0m command not found: will use \\e[33;1mdocker compose\\e[0m instead" DCMP_CMD="docker compose" fi fi # set COMPOSE_PROJECT_NAME (use env name) if [[ -z "$COMPOSE_PROJECT_NAME" ]] then export COMPOSE_PROJECT_NAME=$environment_name log_info "--- \$COMPOSE_PROJECT_NAME unset: use \\e[33;1m${COMPOSE_PROJECT_NAME}\\e[0m" fi # compose file lookup # see: https://docs.docker.com/compose/compose-application-model/#the-compose-file if [[ -z "$COMPOSE_FILE" ]] then log_info "--- \$COMPOSE_FILE unset: lookup for Docker Compose files..." base_compose_file=$(find_first "$DCMP_SCRIPTS_DIR/compose.yaml" "$DCMP_SCRIPTS_DIR/compose.yml" "$DCMP_SCRIPTS_DIR/docker-compose.yaml" "$DCMP_SCRIPTS_DIR/docker-compose.yml") env_compose_file=$(find_first "$DCMP_SCRIPTS_DIR/compose-${environment_type}.yaml" "$DCMP_SCRIPTS_DIR/compose-${environment_type}.yml" "$DCMP_SCRIPTS_DIR/docker-compose-${environment_type}.yaml" "$DCMP_SCRIPTS_DIR/docker-compose-${environment_type}.yml") if [[ -f "$env_compose_file" ]] then COMPOSE_FILE=$env_compose_file log_info "... env-specific Docker Compose file found: \\e[33;1m${env_compose_file}\\e[0m" file_no_ext="${env_compose_file%.*}" ext="${env_compose_file##*.}" # lookup for env-specific override env_override_file="${file_no_ext}.override.${ext}" if [[ -f "${env_override_file}" ]] then log_info "... env-specific Docker Compose override file found: \\e[33;1m${env_override_file}\\e[0m" COMPOSE_FILE="$COMPOSE_FILE:$env_override_file" fi elif [[ -f "$base_compose_file" ]] then COMPOSE_FILE=$base_compose_file # lookup for base override file_no_ext="${base_compose_file%.*}" ext="${base_compose_file##*.}" log_info "... base Docker Compose file found: \\e[33;1m${base_compose_file}\\e[0m" base_override_file="${file_no_ext}.override.${ext}" if [[ -f "${base_override_file}" ]] then log_info "... base Docker Compose override file found: \\e[33;1m${base_override_file}\\e[0m" COMPOSE_FILE="$COMPOSE_FILE:$base_override_file" fi # lookup for env-specific override env_override_file="${file_no_ext}-${environment_type}.override.${ext}" if [[ -f "${env_override_file}" ]] then log_info "... env-specific Docker Compose override file found: \\e[33;1m${env_override_file}\\e[0m" COMPOSE_FILE="$COMPOSE_FILE:$env_override_file" fi else log_error "... no Docker Compose file found in $DCMP_SCRIPTS_DIR: please refer to the template documentation" exit 1 fi export COMPOSE_FILE fi # dotenv files lookup if [[ -z "$COMPOSE_ENV_FILES" ]] then log_info "--- \$COMPOSE_ENV_FILES unset: lookup for env files..." # latest defined file takes precedence dcmp_envs="" if [[ -f "$DCMP_SCRIPTS_DIR/.env" ]] then dcmp_envs="$dcmp_envs,$DCMP_SCRIPTS_DIR/.env" log_info "... env file found: \\e[33;1m$DCMP_SCRIPTS_DIR/.env\\e[0m" fi if [[ -f "$DCMP_SCRIPTS_DIR/${environment_type}.env" ]] then dcmp_envs="$dcmp_envs,$DCMP_SCRIPTS_DIR/${environment_type}.env" log_info "... env file found: \\e[33;1m$DCMP_SCRIPTS_DIR/${environment_type}.env\\e[0m" fi export COMPOSE_ENV_FILES=${dcmp_envs:1} fi } # application deployment function function compose_up() { environment_url=${ENV_URL:-$DCMP_ENVIRONMENT_URL} # variables expansion in $environment_url environment_url=$(echo "$environment_url" | awkenvsubst) export environment_url # extract hostname from $environment_url hostname=$(echo "$environment_url" | awk -F[/:] '{print $4}') export hostname log_info "--- \\e[32mdeploy\\e[0m" log_info "--- \$environment_type: \\e[33;1m${environment_type}\\e[0m" log_info "--- \$environment_name: \\e[33;1m${environment_name}\\e[0m" log_info "--- \$environment_name_ssc: \\e[33;1m${environment_name_ssc}\\e[0m" log_info "--- \$hostname: \\e[33;1m${hostname}\\e[0m" # unset any upstream deployment env & artifacts rm -f docker-compose.out.env rm -f environment_url.txt # maybe execute pre compose-up script prescript="$DCMP_SCRIPTS_DIR/pre-compose-up.sh" if [[ -f "$prescript" ]]; then log_info "--- \\e[32mpre-compose-up\\e[0m hook (\\e[33;1m${prescript}\\e[0m) found: execute" exec_hook "$prescript" else log_info "--- \\e[32mpre-compose-up\\e[0m hook (\\e[33;1m${prescript}\\e[0m) not found: skip" fi if [[ "$DCMP_CMD" == "docker stack" ]]; then set -a for env_file in ${COMPOSE_ENV_FILES//,/$IFS}; do # shellcheck disable=SC1090 . "$env_file" done set +a compose_file_opts="-c ${COMPOSE_FILE//:/ -c }" # shellcheck disable=SC2086 $DCMP_CMD deploy --detach=false --with-registry-auth $DCMP_STACK_DEPLOY_OPTS $compose_file_opts "$environment_name" else # up (--detach is mandatory and therefore not configurable) # shellcheck disable=SC2086 $DCMP_CMD up --detach $DCMP_UP_OPTS fi # maybe execute post compose-up script postscript="$DCMP_SCRIPTS_DIR/post-compose-up.sh" if [[ -f "$postscript" ]]; then log_info "--- \\e[32mpost-compose-up\\e[0m hook (\\e[33;1m${postscript}\\e[0m) found: execute" exec_hook "$postscript" else log_info "--- \\e[32mpost-compose-up\\e[0m hook (\\e[33;1m${postscript}\\e[0m) not found: skip" fi # persist environment url if [[ -f environment_url.txt ]] then environment_url=$(cat environment_url.txt) export environment_url log_info "--- dynamic environment url found: (\\e[33;1m$environment_url\\e[0m)" else echo "$environment_url" > environment_url.txt fi echo -e "environment_type=$environment_type\\nenvironment_name=$environment_name\\nenvironment_url=$environment_url" >> docker-compose.out.env } # environment cleanup function function compose_down() { log_info "--- \\e[32mcleanup\\e[0m" log_info "--- \$environment_type: \\e[33;1m${environment_type}\\e[0m" log_info "--- \$environment_name: \\e[33;1m${environment_name}\\e[0m" log_info "--- \$environment_name_ssc: \\e[33;1m${environment_name_ssc}\\e[0m" # maybe execute pre compose-down script prescript="$DCMP_SCRIPTS_DIR/pre-compose-down.sh" if [[ -f "$prescript" ]]; then log_info "--- \\e[32mpre-compose-down\\e[0m hook (\\e[33;1m${prescript}\\e[0m) found: execute" exec_hook "$prescript" else log_info "--- \\e[32mpre-compose-down\\e[0m hook (\\e[33;1m${prescript}\\e[0m) not found: skip" fi if [[ "$DCMP_CMD" == "docker stack" ]]; then $DCMP_CMD rm --detach=false "$environment_name" else # down # shellcheck disable=SC2086 $DCMP_CMD down $DCMP_DOWN_OPTS fi # maybe execute post compose-down script postscript="$DCMP_SCRIPTS_DIR/post-compose-down.sh" if [[ -f "$postscript" ]]; then log_info "--- \\e[32mpost-compose-down\\e[0m hook (\\e[33;1m${postscript}\\e[0m) found: execute" exec_hook "$postscript" else log_info "--- \\e[32mpost-compose-down\\e[0m hook (\\e[33;1m${postscript}\\e[0m) not found: skip" fi } unscope_variables eval_all_secrets # ENDSCRIPT # job prototype # defines default Docker image, services, cache policy and init scripts # Required vars for login: # @var ENV_TYPE : environment type # @var ENV_APP_NAME : env-specific application name # @var ENV_APP_SUFFIX: env-specific application suffix # @var DOCKER_HOST : env-specific DOCKER_HOST .compose-base: image: $DCMP_IMAGE services: - name: "$TBC_TRACKING_IMAGE" command: ["--service", "docker-compose", "1.0.2"] before_script: - !reference [.compose-scripts] - install_ca_certs "${CUSTOM_CA_CERTS:-$DEFAULT_CA_CERTS}" - compose_init # base job for online compose operations .compose-online: extends: .compose-base before_script: - !reference [.compose-base, before_script] - configure_network - configure_registries_auth # Deploy job prototype # Can be extended to define a concrete environment # @var ENV_URL : env-specific application url .compose-deploy: extends: .compose-online stage: deploy variables: ENV_APP_SUFFIX: "-$CI_ENVIRONMENT_SLUG" script: - compose_up artifacts: name: "$ENV_TYPE env url for $CI_PROJECT_NAME on $CI_COMMIT_REF_SLUG" # propagate deployed env url in a environment_url.txt file paths: - environment_url.txt reports: # propagate deployed env info in a dotenv artifact dotenv: docker-compose.out.env environment: url: "$environment_url" # can be either static or dynamic # Cleanup job prototype # Can be extended for each deletable environment .compose-cleanup: extends: .compose-online stage: deploy # force no dependencies dependencies: [] variables: ENV_APP_SUFFIX: "-$CI_ENVIRONMENT_SLUG" script: - compose_down environment: action: stop # run compose config job as parallel matrix compose-config: extends: .compose-base stage: package-test script: - | if [[ "$DCMP_CMD" == "docker stack" ]]; then if [[ "$DCMP_STACK_CONFIG_SILENT" == "true" ]]; then $DCMP_CMD config -c ${COMPOSE_FILE//:/ -c } $DCMP_STACK_CONFIG_OPTS > /dev/null else $DCMP_CMD config -c ${COMPOSE_FILE//:/ -c } $DCMP_STACK_CONFIG_OPTS fi else $DCMP_CMD config $DCMP_CONFIG_OPTS fi parallel: matrix: - ENV_TYPE: review ENV_APP_SUFFIX: "-review-slug" ENV_APP_NAME: "$DCMP_REVIEW_APP_NAME" - ENV_TYPE: integration ENV_APP_SUFFIX: "-integration" ENV_APP_NAME: "$DCMP_INTEG_APP_NAME" - ENV_TYPE: staging ENV_APP_SUFFIX: "-staging" ENV_APP_NAME: "$DCMP_STAGINGAPP_NAME" - ENV_TYPE: production ENV_APP_NAME: "$DCMP_PROD_APP_NAME" rules: # exclude tags - if: $CI_COMMIT_TAG when: never # exclude when $DCMP_CONFIG_DISABLED is set - if: '$DCMP_CONFIG_DISABLED == "true"' when: never # review: skip if $DCMP_REVIEW_DOCKER_HOST unset or integration branch or prod branch - if: '$ENV_TYPE == "review" && ($DCMP_REVIEW_DOCKER_HOST == null || $DCMP_REVIEW_DOCKER_HOST == "" || $CI_COMMIT_REF_NAME =~ $INTEG_REF || $CI_COMMIT_REF_NAME =~ $PROD_REF)' when: never # integration: skip if $DCMP_INTEG_DOCKER_HOST unset or prod branch - if: '$ENV_TYPE == "integration" && ($DCMP_INTEG_DOCKER_HOST == null || $DCMP_INTEG_DOCKER_HOST == "" || $CI_COMMIT_REF_NAME =~ $PROD_REF)' when: never # staging: skip if $DCMP_STAGING_DOCKER_HOST unset - if: '$ENV_TYPE == "staging" && ($DCMP_STAGING_DOCKER_HOST == null || $DCMP_STAGING_DOCKER_HOST == "")' when: never # production: skip if $DCMP_PROD_DOCKER_HOST unset - if: '$ENV_TYPE == "production" && ($DCMP_PROD_DOCKER_HOST == null || $DCMP_PROD_DOCKER_HOST == "")' when: never # test policy rules must come last - !reference [.test-policy, rules] # deploy to review env (only on feature branches) # disabled by default, enable this job by setting $DCMP_REVIEW_DOCKER_HOST. compose-review: extends: .compose-deploy variables: DOCKER_HOST: "$DCMP_REVIEW_DOCKER_HOST" ENV_SSH_PRIVATE_KEY: "$DCMP_REVIEW_SSH_PRIVATE_KEY" ENV_TYPE: review ENV_APP_NAME: "$DCMP_REVIEW_APP_NAME" ENV_URL: "$DCMP_REVIEW_ENVIRONMENT_URL" environment: name: review/$CI_COMMIT_REF_NAME on_stop: compose-cleanup-review auto_stop_in: "$DCMP_REVIEW_AUTOSTOP_DURATION" resource_group: review/$CI_COMMIT_REF_NAME rules: # exclude tags - if: $CI_COMMIT_TAG when: never # exclude if $DCMP_REVIEW_DOCKER_HOST not set - if: '$DCMP_REVIEW_DOCKER_HOST == null || $DCMP_REVIEW_DOCKER_HOST == ""' when: never # only on non-production, non-integration branches - if: '$CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF' # cleanup review env (automatically triggered once branches are deleted) compose-cleanup-review: extends: .compose-cleanup variables: DOCKER_HOST: "$DCMP_REVIEW_DOCKER_HOST" ENV_SSH_PRIVATE_KEY: "$DCMP_REVIEW_SSH_PRIVATE_KEY" ENV_TYPE: review ENV_APP_NAME: "$DCMP_REVIEW_APP_NAME" environment: name: review/$CI_COMMIT_REF_NAME action: stop # TODO: use resource group resource_group: review/$CI_COMMIT_REF_NAME rules: # exclude tags - if: $CI_COMMIT_TAG when: never # exclude if $DCMP_REVIEW_DOCKER_HOST not set - if: '$DCMP_REVIEW_DOCKER_HOST == null || $DCMP_REVIEW_DOCKER_HOST == ""' when: never # only on non-production, non-integration branches - if: '$CI_COMMIT_REF_NAME !~ $PROD_REF && $CI_COMMIT_REF_NAME !~ $INTEG_REF' when: manual allow_failure: true # deploy to `integration` env (only on develop branch) compose-integration: extends: .compose-deploy variables: DOCKER_HOST: "$DCMP_INTEG_DOCKER_HOST" ENV_SSH_PRIVATE_KEY: "$DCMP_INTEG_SSH_PRIVATE_KEY" ENV_TYPE: integration ENV_APP_NAME: "$DCMP_INTEG_APP_NAME" ENV_URL: "$DCMP_INTEG_ENVIRONMENT_URL" environment: name: integration # TODO: use resource group resource_group: integration rules: # exclude if $DCMP_INTEG_DOCKER_HOST not set - if: '$DCMP_INTEG_DOCKER_HOST == null || $DCMP_INTEG_DOCKER_HOST == ""' when: never # only on integration branch(es) - if: '$CI_COMMIT_REF_NAME =~ $INTEG_REF' # deploy to `staging` env (only on master branch) compose-staging: extends: .compose-deploy variables: DOCKER_HOST: "$DCMP_STAGING_DOCKER_HOST" ENV_SSH_PRIVATE_KEY: "$DCMP_STAGING_SSH_PRIVATE_KEY" ENV_TYPE: staging ENV_APP_NAME: "$DCMP_STAGING_APP_NAME" ENV_URL: "$DCMP_STAGING_ENVIRONMENT_URL" environment: name: staging # TODO: use resource group resource_group: staging rules: # exclude if $DCMP_STAGING_DOCKER_HOST not set - if: '$DCMP_STAGING_DOCKER_HOST == null || $DCMP_STAGING_DOCKER_HOST == ""' when: never # only on production branch(es) - if: '$CI_COMMIT_REF_NAME =~ $PROD_REF' # Deploy to production if on branch master and variable DCMP_PROD_DOCKER_HOST defined and AUTODEPLOY_TO_PROD is set compose-production: extends: .compose-deploy stage: production variables: DOCKER_HOST: "$DCMP_PROD_DOCKER_HOST" ENV_SSH_PRIVATE_KEY: "$DCMP_PROD_SSH_PRIVATE_KEY" ENV_TYPE: production ENV_APP_SUFFIX: "" # no suffix for prod ENV_APP_NAME: "$DCMP_PROD_APP_NAME" ENV_URL: "$DCMP_PROD_ENVIRONMENT_URL" environment: name: production # TODO: use resource group resource_group: production rules: # exclude if $DCMP_PROD_DOCKER_HOST not set - if: '$DCMP_PROD_DOCKER_HOST == null || $DCMP_PROD_DOCKER_HOST == ""' when: never # exclude non-production branch(es) - if: '$CI_COMMIT_REF_NAME !~ $PROD_REF' when: never - if: '$DCMP_PROD_DEPLOY_STRATEGY == "manual"' when: manual - if: '$DCMP_PROD_DEPLOY_STRATEGY == "auto"'