diff --git a/.github/workflows/prepare-vs-benchmark.yml b/.github/workflows/prepare-vs-benchmark.yml index db4a7f7..4e99df3 100644 --- a/.github/workflows/prepare-vs-benchmark.yml +++ b/.github/workflows/prepare-vs-benchmark.yml @@ -6,7 +6,7 @@ on: types: - completed schedule: - - cron: '19 9 * * *' + - cron: '45 9 * * *' workflow_dispatch: inputs: cloud_provider: @@ -74,18 +74,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -105,10 +105,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -127,8 +127,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE @@ -193,18 +193,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -225,10 +225,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -247,8 +247,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE diff --git a/.github/workflows/prepare-vs-install.yml b/.github/workflows/prepare-vs-install.yml index 159fdd1..be5a5c5 100644 --- a/.github/workflows/prepare-vs-install.yml +++ b/.github/workflows/prepare-vs-install.yml @@ -2,7 +2,7 @@ name: Prepare Streaming Cluster [Install] on: schedule: - - cron: '19 9 * * *' + - cron: '45 9 * * *' workflow_dispatch: inputs: cloud_provider: @@ -69,18 +69,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -100,10 +100,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -122,8 +122,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE @@ -188,18 +188,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -220,10 +220,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -242,8 +242,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE diff --git a/.github/workflows/prepare-vs-uninstall.yml b/.github/workflows/prepare-vs-uninstall.yml index aaada11..6af6d03 100644 --- a/.github/workflows/prepare-vs-uninstall.yml +++ b/.github/workflows/prepare-vs-uninstall.yml @@ -6,7 +6,7 @@ on: types: - completed schedule: - - cron: '19 9 * * *' + - cron: '45 9 * * *' workflow_dispatch: inputs: cloud_provider: @@ -74,18 +74,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -105,10 +105,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -127,8 +127,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE @@ -193,18 +193,18 @@ jobs: shell: bash run: | echo "current path is: $(pwd)" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf" - sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-nats/deploy/provision-nats-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-pravega/deploy/provision-pravega-aws.tf" + sed -i "s/\${AUTOMQ_ENVID}/$STREAMING_PROVIDER/g" "driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER/provision-kafka-aws.tf" - name: Apply Variables and Secrets for Streaming Provider - working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} + working-directory: driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER ## Set AK/SK and terraform s3 backend info shell: bash run: | echo "current path is: $(pwd)" sed -i "s/\${TF_BACKEND_BUCKET}/$TF_BACKEND_BUCKET/g" "provision-kafka-aws.tf" - sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}/g" "provision-kafka-aws.tf" + sed -i "s/\${TF_BACKEND_KEY}/$TF_BACKEND_KEY-$CLOUD_PROVIDER-$STREAMING_PROVIDER/g" "provision-kafka-aws.tf" sed -i "s/\${TF_BACKEND_REGION}/$REGION/g" "provision-kafka-aws.tf" sed -i "s/\${AUTOMQ_ACCESS_KEY}/${{ secrets.AUTOMQ_ACCESS_KEY }}/g" "var.tfvars" sed -i "s/\${AUTOMQ_SECRET_KEY}/${{ secrets.AUTOMQ_SECRET_KEY }}/g" "var.tfvars" @@ -225,10 +225,10 @@ jobs: shell: bash run: | mkdir -p ~/.ssh - echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub - chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER} - chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub + echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub + chmod 600 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER + chmod 644 ~/.ssh/$STREAMING_PROVIDER_$CLOUD_PROVIDER.pub - name: Install python uses: actions/setup-python@v4 @@ -247,8 +247,8 @@ jobs: shell: bash run: | echo "[INFO] Provider is AutoMQ" - cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER} - infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt + cd driver-$STREAMING_PROVIDER/deploy/$CLOUD_PROVIDER + infracost breakdown --path . --usage-file infracost/$CLOUD_PROVIDER-medium-500m-6t.yml >> /tmp/aws-cost.txt COST_DETAIL_FILE=/tmp/aws-cost.txt cat $COST_DETAIL_FILE