Skip to content

Commit

Permalink
add elasticsearch repository snapshot method on s3
Browse files Browse the repository at this point in the history
  • Loading branch information
rhanka committed Dec 26, 2023
1 parent 237838a commit 981a33b
Show file tree
Hide file tree
Showing 5 changed files with 131 additions and 35 deletions.
15 changes: 9 additions & 6 deletions .github/workflows/full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,18 +73,20 @@ jobs:
- name: Run
if: success()
run: |
make clean remote-all GIT_BRANCH="$GIT_BRANCH" \
CHUNK_SIZE=$CHUNK_SIZE ES_THREADS=$ES_THREADS RECIPE_THREADS=$RECIPE_THREADS ES_MEM=$ES_MEM RECIPE_QUEUE=$RECIPE_QUEUE SCW_FLAVOR=$SCW_FLAVOR SCW_VOLUME_SIZE=$SCW_VOLUME_SIZE SCW_VOLUME_TYPE=$SCW_VOLUME_TYPE;\
make clean remote-all \
GIT_BRANCH="$GIT_BRANCH" \
SCW_FLAVOR=${SCW_FLAVOR} SCW_VOLUME_SIZE=${SCW_VOLUME_SIZE} SCW_VOLUME_TYPE=${SCW_VOLUME_TYPE} \
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM} RECIPE_QUEUE=${RECIPE_QUEUE} \
SLACK_TITLE="deces-dataprep - full" SLACK_WEBHOOK="$SLACK_WEBHOOK";
env:
VERBOSE: True
SCW_FLAVOR: PRO2-L
SCW_VOLUME_SIZE: 50000000000
SCW_VOLUME_TYPE: b_ssd
CHUNK_SIZE: 30000
ES_THREADS: 24
RECIPE_THREADS: 24
ES_MEM: 32000m
CHUNK_SIZE: 50000
ES_THREADS: 28
RECIPE_THREADS: 30
ES_MEM: 48000m
RECIPE_QUEUE: 32
GIT_BRANCH: ${{ steps.extract_branch.outputs.branch }}
remote_http_proxy: ${{ secrets.remote_http_proxy }}
Expand All @@ -98,3 +100,4 @@ jobs:
STORAGE_SECRET_KEY: ${{ secrets.STORAGE_SECRET_KEY }}
SSH_AUTH_SOCK: /tmp/ssh_agent.sock
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
SLACK_TITLE: deces-dataprep - full
11 changes: 10 additions & 1 deletion .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,16 @@ jobs:
# uses: mxschmitt/action-tmate@v2
- name: 🏃 Run small dataset
#if: steps.extract_branch.outputs.branch == 'master' || steps.extract_branch.outputs.branch == 'dev'
run: make all-step0 recipe-run watch-run FILES_TO_PROCESS=deces-2020-m01.txt.gz STORAGE_ACCESS_KEY=$STORAGE_ACCESS_KEY STORAGE_SECRET_KEY=$STORAGE_SECRET_KEY CHUNK_SIZE=5000 ES_THREADS=2 RECIPE_THREADS=2 ES_MEM=4000m
run: |
make all FILES_TO_PROCESS=${FILES_TO_PROCESS} \
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} \
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM}
env:
FILES_TO_PROCESS: deces-2020-m01.txt.gz
REPOSITORY_BUCKET: fichier-des-personnes-decedees-elasticsearch-dev
CHUNK_SIZE: 5000
ES_THREADS: 2
RECIPE_THREADS: 2
ES_MEM: 4000m
STORAGE_ACCESS_KEY: ${{ secrets.STORAGE_ACCESS_KEY }}
STORAGE_SECRET_KEY: ${{ secrets.STORAGE_SECRET_KEY }}
15 changes: 13 additions & 2 deletions .github/workflows/push-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,21 @@ jobs:
- name: Run
if: success()
run: |
make clean remote-all GIT_BRANCH="$GIT_BRANCH" FILES_TO_PROCESS=deces-2020-m[0-1][0-9].txt.gz \
CHUNK_SIZE=50000 ES_THREADS=10 RECIPE_THREADS=16 ES_MEM=24000m RECIPE_QUEUE=16 SCW_FLAVOR=GP1-M;
make clean remote-all \
GIT_BRANCH=${GIT_BRANCH} \
FILES_TO_PROCESS=${FILES_TO_PROCESS} \
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} \
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM} RECIPE_QUEUE=${RECIPE_QUEUE}
env:
VERBOSE: True
FILES_TO_PROCESS: deces-2020-m[0-1][0-9].txt.gz
REPOSITORY_BUCKET: fichier-des-personnes-decedees-elasticsearch-dev
CHUNK_SIZE: 50000
ES_THREADS: 10
RECIPE_THREADS: 16
ES_MEM: 24000m
RECIPE_QUEUE: 16
SCW_FLAVOR: GP1-M
GIT_BRANCH: ${{ steps.extract_branch.outputs.branch }}
remote_http_proxy: ${{ secrets.remote_http_proxy }}
remote_https_proxy: ${{ secrets.remote_https_proxy }}
Expand Down
29 changes: 25 additions & 4 deletions .github/workflows/small.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,31 @@ jobs:
id: extract_branch
# - name: Setup tmate session
# uses: mxschmitt/action-tmate@v2
- name: 🏃 Run small dataset
run:
make clean config all recipe-run watch-run FILES_TO_PROCESS=deces-2020-m01.txt.gz STORAGE_ACCESS_KEY=$STORAGE_ACCESS_KEY STORAGE_SECRET_KEY=$STORAGE_SECRET_KEY CHUNK_SIZE=5000 ES_THREADS=2 RECIPE_THREADS=2 ES_MEM=4000m;
make clean config all recipe-run watch-run FILES_TO_PROCESS=deaths.txt.gz STORAGE_ACCESS_KEY=$STORAGE_ACCESS_KEY STORAGE_SECRET_KEY=$STORAGE_SECRET_KEY CHUNK_SIZE=5000 ES_THREADS=2 RECIPE_THREADS=2 ES_MEM=4000m;
- name: 🏃 Run small dataset 1
run: |
make clean all FILES_TO_PROCESS=${FILES_TO_PROCESS} \
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} \
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM}
env:
FILES_TO_PROCESS: deces-2020-m01.txt.gz
REPOSITORY_BUCKET: fichier-des-personnes-decedees-elasticsearch-dev
CHUNK_SIZE: 5000
ES_THREADS: 2
RECIPE_THREADS: 2
ES_MEM: 4000m
STORAGE_ACCESS_KEY: ${{ secrets.STORAGE_ACCESS_KEY }}
STORAGE_SECRET_KEY: ${{ secrets.STORAGE_SECRET_KEY }}
- name: 🏃 Run small dataset 2
run: |
make clean all FILES_TO_PROCESS=${FILES_TO_PROCESS} \
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} \
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM}
env:
FILES_TO_PROCESS: deaths.txt.gz
REPOSITORY_BUCKET: fichier-des-personnes-decedees-elasticsearch-dev
CHUNK_SIZE: 5000
ES_THREADS: 2
RECIPE_THREADS: 2
ES_MEM: 4000m
STORAGE_ACCESS_KEY: ${{ secrets.STORAGE_ACCESS_KEY }}
STORAGE_SECRET_KEY: ${{ secrets.STORAGE_SECRET_KEY }}
96 changes: 74 additions & 22 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export GIT_BACKEND = backend
export GIT_TOOLS = tools
export MAKEBIN = $(shell which make)
export MAKE = ${MAKEBIN} --no-print-directory -s
export ES_INDEX=deces
export ES_NODES=1
export ES_MEM=1024m
export ES_VERSION = 8.6.1
Expand All @@ -26,10 +27,13 @@ export DATAGOUV_API = https://www.data.gouv.fr/api/1/datasets
export DATAGOUV_DATASET = fichier-des-personnes-decedees
export DATAGOUV_CONNECTOR = s3
export STORAGE_BUCKET=${DATAGOUV_DATASET}
export REPOSITORY_BUCKET=${DATAGOUV_DATASET}-elasticsearch
export DATA_DIR=${PWD}/data
export BACKUP_DIR = ${PWD}/${GIT_BACKEND}/backup
export DATA_TAG=${PWD}/data-tag
export BACKUP_CHECK=${PWD}/backup-check
export BACKUP_METHOD=repository
export BACKUP_CHECK=${PWD}/${BACKUP_METHOD}-check
export REPOSITORY_CHECK=${PWD}/${BACKUP_METHOD}-check
# files to sync:
export FILES_TO_SYNC=fichier-opposition-deces.csv(.gz)?|deces-.*.txt(.gz)?
export FILES_TO_SYNC_FORCE=fichier-opposition-deces.csv(.gz)?
Expand Down Expand Up @@ -82,19 +86,39 @@ ${DATA_TAG}: config

data-tag: ${DATA_TAG}

dataprep-version:
@echo ${DATAPREP_VERSION}

${BACKUP_CHECK}: data-tag
@${MAKE} -s -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} get-catalog CATALOG=${BACKUP_CHECK}\
${MAKE} -s -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} get-catalog CATALOG=${BACKUP_CHECK}\
DATAGOUV_DATASET=${DATAGOUV_DATASET} STORAGE_BUCKET=${STORAGE_BUCKET}\
FILES_PATTERN=esdata_${DATAPREP_VERSION}_$$(cat ${PWD}/data-tag).tar &&\
if [ -s ${BACKUP_CHECK} ]; then\
echo backup already exist on remote storage;\
if [ -s "${BACKUP_CHECK}" ]; then\
echo classic backup already exist on remote storage;\
else\
echo no previous backup found;\
fi
rm -f "${BACKUP_CHECK}";\
echo no previous classic backup found;\
fi;\

backup-check: ${BACKUP_CHECK}

check-s3: backup-check
repository-config: config
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-config\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES} && touch repository-config

${REPOSITORY_CHECK}: repository-config data-tag
@ES_BACKUP_NAME=esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG});\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-check\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES} ES_BACKUP_NAME=$${ES_BACKUP_NAME} \
| egrep -q "^snapshot found"\
&& echo "snapshot found for or $${ES_BACKUP_NAME} in elasticsearch repository" && (echo "$${ES_BACKUP_NAME}" > "${REPOSITORY_CHECK}") \
|| (echo "no snapshot found for $${ES_BACKUP_NAME} elasticsearch repository")

repository-check: ${REPOSITORY_CHECK}

check-s3: ${BACKUP_METHOD}-check
touch check-s3

check-upload:
Expand Down Expand Up @@ -134,10 +158,10 @@ up:
recipe-run: data-tag
@if [ ! -f recipe-run ];then\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch ES_NODES=${ES_NODES} ES_MEM=${ES_MEM} ${MAKEOVERRIDES};\
echo running recipe on data $$(cat ${DATA_TAG}), dataprep ${DATAPREP_VERSION};\
echo running recipe on data FILES_TO_PROCESS="${FILES_TO_PROCESS}" $$(cat ${DATA_TAG}), dataprep ${DATAPREP_VERSION};\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} version;\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} recipe-run \
RECIPE=${RECIPE} RECIPE_THREADS=${RECIPE_THREADS} RECIPE_QUEUE=${RECIPE_QUEUE}\
CHUNK_SIZE=${CHUNK_SIZE} RECIPE=${RECIPE} RECIPE_THREADS=${RECIPE_THREADS} RECIPE_QUEUE=${RECIPE_QUEUE} \
ES_PRELOAD='${ES_PRELOAD}' ES_THREADS=${ES_THREADS} \
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES} \
Expand All @@ -148,9 +172,9 @@ recipe-run: data-tag
fi

full-check: datagouv-to-${DATAGOUV_CONNECTOR} check-${DATAGOUV_CONNECTOR}
@if [ -s backup-check ]; then\
@if [ -s ${BACKUP_METHOD}-check -a -z "${NO_CHECK}"]; then\
echo recipe has already been runned on full and saved on remote storage;\
touch recipe-run watch-run backup backup-push no-remote;\
touch recipe-run watch-run backup backup-push repository-push no-remote;\
fi

full: full-check recipe-run
Expand All @@ -176,12 +200,22 @@ watch-run:
((egrep -i 'end : run|Ooops' $$LOG_FILE | tail -5) && exit 1) || \
egrep 'end : run.*successfully' $$LOG_FILE

elasticsearch-restore: backup-pull
backup-restore: backup-pull
@if [ ! -f "elasticsearch-restore" ];then\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-restore ES_BACKUP_FILE=esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG}).tar \
&& (echo esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG}).tar > elasticsearch-restore);\
fi

repository-restore: repository-check
@if [ ! -f "elasticsearch-restore" ];then\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-restore\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
ES_INDEX=${ES_INDEX} ES_BACKUP_NAME=esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG})\
&& (echo esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG}) > elasticsearch-restore);\
fi

elasticsearch-restore: ${BACKUP_METHOD}-restore

backup-dir:
mkdir -p ${BACKUP_DIR}

Expand Down Expand Up @@ -209,6 +243,23 @@ backup-push: data-tag backup
echo pushed $$SIZE to storage ${DATAGOUV_DATASET};\
fi

repository-push: data-tag
@if [ ! -f repository-push ];then\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-backup\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
ES_INDEX=${ES_INDEX} ES_BACKUP_NAME=esdata_${DATAPREP_VERSION}_$$(cat ${DATA_TAG}) && touch repository-push;\
fi

repository-backup-tmp: data-tag
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-backup-async\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
ES_INDEX=${ES_INDEX} ES_BACKUP_NAME=esdata_tmp_${DATAPREP_VERSION}_$$(cat ${DATA_TAG})

respository-cleanse:
@(${MAKE} -C ${APP_PATH}/${GIT_BACKEND} elasticsearch-repository-delete\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
ES_INDEX=${ES_INDEX} ES_BACKUP_NAME=esdata_tmp_${DATAPREP_VERSION}_$$(cat ${DATA_TAG}) > /dev/null 2>&1) || exit 0

down:
@if [ -f config ]; then\
(${MAKE} -C ${APP_PATH}/${GIT_BACKEND} backend-stop elasticsearch-stop frontend-stop || true);\
Expand All @@ -217,7 +268,7 @@ down:
clean: down
@sudo rm -rf ${GIT_BACKEND} frontend ${DATA_DIR} data-tag config \
recipe-run backup-check datagouv-to-* check-* elasticsearch-restore watch-run full\
backup backup-pull backup-push no-remote
backup backup-pull backup-push repository-push repository-config repository-check no-remote

# launch all locally
# configure
Expand All @@ -227,7 +278,7 @@ all-step0: ${GIT_BACKEND} config
all-step1: full

# second step is backup
all-step2: backup-push
all-step2: ${BACKUP_METHOD}-push

all: all-step0 all-step1 watch-run all-step2
@echo ended with succes !!!
Expand All @@ -237,35 +288,36 @@ all: all-step0 all-step1 watch-run all-step2
remote-config: config data-tag
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-config\
APP=${APP} APP_VERSION=${DATAPREP_VERSION} CLOUD_TAG=data:$$(cat ${DATA_TAG})-prep:${DATAPREP_VERSION}\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
SCW_FLAVOR=${SCW_FLAVOR} SCW_VOLUME_SIZE=${SCW_VOLUME_SIZE} SCW_VOLUME_TYPE=${SCW_VOLUME_TYPE} \
GIT_BRANCH=${GIT_BRANCH} ${MAKEOVERRIDES}

remote-deploy:
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-deploy\
APP=${APP} APP_VERSION=${DATAPREP_VERSION} GIT_BRANCH=${GIT_BRANCH} \
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES}

remote-step1:
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-actions\
APP=${APP} APP_VERSION=${DATAPREP_VERSION} GIT_BRANCH=${GIT_BRANCH} \
ACTIONS="all-step1"\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES}
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
CHUNK_SIZE=${CHUNK_SIZE} ES_THREADS=${ES_THREADS} RECIPE_THREADS=${RECIPE_THREADS} ES_MEM=${ES_MEM} RECIPE_QUEUE=${RECIPE_QUEUE} \
BACKUP_METHOD=${BACKUP_METHOD} ${MAKEOVERRIDES}

remote-watch:
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-actions\
APP=${APP} APP_VERSION=${DATAPREP_VERSION} GIT_BRANCH=${GIT_BRANCH} \
ACTIONS="watch-run"\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES}

remote-step2:
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-actions\
APP=${APP} APP_VERSION=${DATAPREP_VERSION} GIT_BRANCH=${GIT_BRANCH} \
ACTIONS="all-step2"\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
${MAKEOVERRIDES}
REPOSITORY_BUCKET=${REPOSITORY_BUCKET} STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}\
BACKUP_METHOD=${BACKUP_METHOD} ${MAKEOVERRIDES}

remote-clean:
@${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-clean\
Expand All @@ -288,7 +340,7 @@ update-base-image: remote-docker-pull-base
@\
APP_VERSION=$$(cd ${APP_PATH}/${GIT_BACKEND} && make version | awk '{print $$NF}');\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-cmd REMOTE_CMD="sync"; \
${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-cmd REMOTE_CMD="rm -rf ${APP_GROUP}"; \
${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} remote-cmd REMOTE_CMD="rm -rf ${APP}"; \
sleep 5;\
${MAKE} -C ${APP_PATH}/${GIT_BACKEND}/${GIT_TOOLS} SCW-instance-snapshot \
GIT_BRANCH=${GIT_BRANCH} APP=${APP} APP_VERSION=$${APP_VERSION} CLOUD_TAG=$${APP_VERSION}\
Expand Down

0 comments on commit 981a33b

Please sign in to comment.