Skip to content

Commit

Permalink
fix: fix pass parameter and adjust schedule time to debug
Browse files Browse the repository at this point in the history
Signed-off-by: Kami Wan <[email protected]>
  • Loading branch information
KaimingWan authored and Kami Wan committed Jun 3, 2024
1 parent b987604 commit a1455b8
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 104 deletions.
46 changes: 20 additions & 26 deletions .github/workflows/prepare-vs-benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:
types:
- completed
schedule:
- cron: '18 8 * * *'
- cron: '48 8 * * *'
workflow_dispatch:
inputs:
cloud_provider:
Expand Down Expand Up @@ -36,6 +36,10 @@ jobs:
environment: ${{ inputs.cloud_provider }}
name: Prepare AutoMQ Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'automq' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
if: ${{ github.event.workflow_run.conclusion == 'success' }}
outputs:
benchmark_result_json_automq: ${{ steps.shared-automq.outputs.benchmark_result_json_automq }}
Expand Down Expand Up @@ -165,6 +169,10 @@ jobs:
environment: ${{ inputs.cloud_provider }}
name: Prepare Kafka Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'kafka' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
outputs:
benchmark_result_json_kafka: ${{ steps.shared-kafka.outputs.benchmark_result_json_kafka }}
extracted_data_kafka: ${{ steps.shared-kafka.outputs.extracted_data_kafka }}
Expand Down Expand Up @@ -199,15 +207,12 @@ jobs:
shell: bash
run: |
echo "current path is: $(pwd)"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}/provision-kafka-aws.tf"
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf"
- name: Apply Variables and Secrets for Streaming Provider
working-directory: driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
## Set AK/SK and terraform s3 backend info
shell: bash
run: |
Expand All @@ -220,12 +225,8 @@ jobs:
env:
## sed match only support ENV rather than expression like ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_BUCKET: ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${{ inputs.cloud_provider }}-${{ inputs.streaming_provider }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}
REGION: ${{ inputs.region }}
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -238,13 +239,10 @@ jobs:
shell: bash
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
chmod 600 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
chmod 644 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
- name: Install python
uses: actions/setup-python@v4
Expand All @@ -263,15 +261,11 @@ jobs:
shell: bash
run: |
echo "[INFO] Provider is AutoMQ"
cd driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
infracost breakdown --path . --usage-file infracost/${{ inputs.cloud_provider }}-medium-500m-6t.yml >> /tmp/aws-cost.txt
cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt
COST_DETAIL_FILE=/tmp/aws-cost.txt
cat $COST_DETAIL_FILE
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
- name: Execute Shared Steps
id: shared-kafka
Expand Down
84 changes: 32 additions & 52 deletions .github/workflows/prepare-vs-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Prepare Streaming Cluster [Install]

on:
schedule:
- cron: '18 8 * * *'
- cron: '48 8 * * *'
workflow_dispatch:
inputs:
cloud_provider:
Expand All @@ -29,9 +29,13 @@ on:
jobs:

prepare_automq_env:
environment: ${{ inputs.cloud_provider }}
environment: ${{ inputs.cloud_provider || 'aws-cn' }}
name: Prepare AutoMQ Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'automq' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
outputs:
benchmark_result_json_automq: ${{ steps.shared-automq.outputs.benchmark_result_json_automq }}
extracted_data_automq: ${{ steps.shared-automq.outputs.extracted_data_automq }}
Expand Down Expand Up @@ -65,15 +69,12 @@ jobs:
shell: bash
run: |
echo "current path is: $(pwd)"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}/provision-kafka-aws.tf"
with:
streaming_provider: automq
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf"
- name: Apply Variables and Secrets for Streaming Provider
working-directory: driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
## Set AK/SK and terraform s3 backend info
shell: bash
run: |
Expand All @@ -86,12 +87,7 @@ jobs:
env:
## sed match only support ENV rather than expression like ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_BUCKET: ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${{ inputs.cloud_provider }}-${{ inputs.streaming_provider }}
REGION: ${{ inputs.region }}
with:
streaming_provider: automq
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -104,13 +100,10 @@ jobs:
shell: bash
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
chmod 600 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
chmod 644 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
with:
streaming_provider: automq
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
- name: Install python
uses: actions/setup-python@v4
Expand All @@ -129,14 +122,11 @@ jobs:
shell: bash
run: |
echo "[INFO] Provider is AutoMQ"
cd driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
infracost breakdown --path . --usage-file infracost/${{ inputs.cloud_provider }}-medium-500m-6t.yml >> /tmp/aws-cost.txt
cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt
COST_DETAIL_FILE=/tmp/aws-cost.txt
cat $COST_DETAIL_FILE
with:
streaming_provider: automq
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
- name: Execute Shared Steps
id: shared-automq
Expand All @@ -160,6 +150,10 @@ jobs:
environment: ${{ inputs.cloud_provider }}
name: Prepare Kafka Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'kafka' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
outputs:
benchmark_result_json_kafka: ${{ steps.shared-kafka.outputs.benchmark_result_json_kafka }}
extracted_data_kafka: ${{ steps.shared-kafka.outputs.extracted_data_kafka }}
Expand Down Expand Up @@ -194,15 +188,12 @@ jobs:
shell: bash
run: |
echo "current path is: $(pwd)"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}/provision-kafka-aws.tf"
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf"
- name: Apply Variables and Secrets for Streaming Provider
working-directory: driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
## Set AK/SK and terraform s3 backend info
shell: bash
run: |
Expand All @@ -215,12 +206,8 @@ jobs:
env:
## sed match only support ENV rather than expression like ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_BUCKET: ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${{ inputs.cloud_provider }}-${{ inputs.streaming_provider }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}
REGION: ${{ inputs.region }}
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -233,13 +220,10 @@ jobs:
shell: bash
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
chmod 600 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
chmod 644 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
- name: Install python
uses: actions/setup-python@v4
Expand All @@ -258,15 +242,11 @@ jobs:
shell: bash
run: |
echo "[INFO] Provider is AutoMQ"
cd driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
infracost breakdown --path . --usage-file infracost/${{ inputs.cloud_provider }}-medium-500m-6t.yml >> /tmp/aws-cost.txt
cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt
COST_DETAIL_FILE=/tmp/aws-cost.txt
cat $COST_DETAIL_FILE
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
- name: Execute Shared Steps
id: shared-kafka
Expand Down
46 changes: 20 additions & 26 deletions .github/workflows/prepare-vs-uninstall.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:
types:
- completed
schedule:
- cron: '18 8 * * *'
- cron: '48 8 * * *'
workflow_dispatch:
inputs:
cloud_provider:
Expand Down Expand Up @@ -36,6 +36,10 @@ jobs:
environment: ${{ inputs.cloud_provider }}
name: Prepare AutoMQ Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'automq' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
if: ${{ github.event.workflow_run.conclusion == 'success' }}
outputs:
benchmark_result_json_automq: ${{ steps.shared-automq.outputs.benchmark_result_json_automq }}
Expand Down Expand Up @@ -165,6 +169,10 @@ jobs:
environment: ${{ inputs.cloud_provider }}
name: Prepare Kafka Environment
runs-on: ubuntu-latest
env:
STREAMING_PROVIDER: ${{ inputs.compared_streaming_provider || 'kafka' }}
CLOUD_PROVIDER: ${{ inputs.cloud_provider || 'aws-cn' }}
REGION: ${{ inputs.region || 'cn-northwest-1' }}
outputs:
benchmark_result_json_kafka: ${{ steps.shared-kafka.outputs.benchmark_result_json_kafka }}
extracted_data_kafka: ${{ steps.shared-kafka.outputs.extracted_data_kafka }}
Expand Down Expand Up @@ -199,15 +207,12 @@ jobs:
shell: bash
run: |
echo "current path is: $(pwd)"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${{ inputs.streaming_provider }}/g" "driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}/provision-kafka-aws.tf"
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-nats/deploy/provision-nats-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-pravega/deploy/provision-pravega-aws.tf"
sed -i "s/\${AUTOMQ_ENVID}/${STREAMING_PROVIDER}/g" "driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}/provision-kafka-aws.tf"
- name: Apply Variables and Secrets for Streaming Provider
working-directory: driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
working-directory: driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
## Set AK/SK and terraform s3 backend info
shell: bash
run: |
Expand All @@ -220,12 +225,8 @@ jobs:
env:
## sed match only support ENV rather than expression like ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_BUCKET: ${{ secrets.TF_BACKEND_BUCKET }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${{ inputs.cloud_provider }}-${{ inputs.streaming_provider }}
TF_BACKEND_KEY: ${{ secrets.TF_BACKEND_KEY }}-${CLOUD_PROVIDER}-${STREAMING_PROVIDER}
REGION: ${{ inputs.region }}
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -238,13 +239,10 @@ jobs:
shell: bash
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
chmod 600 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}
chmod 644 ~/.ssh/${{ inputs.streaming_provider }}_${{ inputs.cloud_provider }}.pub
with:
streaming_provider: kafka
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
echo "${{ secrets.SSH_PUBLIC_KEY }}" > ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
chmod 600 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}
chmod 644 ~/.ssh/${STREAMING_PROVIDER}_${CLOUD_PROVIDER}.pub
- name: Install python
uses: actions/setup-python@v4
Expand All @@ -263,15 +261,11 @@ jobs:
shell: bash
run: |
echo "[INFO] Provider is AutoMQ"
cd driver-${{ inputs.streaming_provider }}/deploy/${{ inputs.cloud_provider }}
infracost breakdown --path . --usage-file infracost/${{ inputs.cloud_provider }}-medium-500m-6t.yml >> /tmp/aws-cost.txt
cd driver-${STREAMING_PROVIDER}/deploy/${CLOUD_PROVIDER}
infracost breakdown --path . --usage-file infracost/${CLOUD_PROVIDER}-medium-500m-6t.yml >> /tmp/aws-cost.txt
COST_DETAIL_FILE=/tmp/aws-cost.txt
cat $COST_DETAIL_FILE
with:
streaming_provider: kafka
region: ${{ inputs.region || 'cn-northwest-1' }}
cloud_provider: ${{ inputs.cloud_provider || 'aws-cn' }}
- name: Execute Shared Steps
id: shared-kafka
Expand Down

0 comments on commit a1455b8

Please sign in to comment.