update #18
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Based on: https://learn.microsoft.com/en-us/azure/databricks/dev-tools/bundles/ci-cd | ||
# This workflow validates, deploys, and runs the specified bundle | ||
# within a pre-production target named "dev". | ||
name: "dev deployment" | ||
# Ensure that only a single job or workflow using the same concurrency group | ||
# runs at a time. | ||
concurrency: 1 | ||
# Trigger this workflow whenever a pull request is pushed to the repo's | ||
# main branch. | ||
on: | ||
push: | ||
branches: | ||
- main | ||
jobs: | ||
build-and-ut: | ||
name: "Build and Run Unit Tests" | ||
runs-on: ubuntu-latest | ||
steps: | ||
# Check out this repo, so that this workflow can access it. | ||
- uses: actions/checkout@v3 | ||
# Download the Databricks CLI. | ||
# See https://github.com/databricks/setup-cli | ||
- uses: databricks/setup-cli@main | ||
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer | ||
- run: curl -sSL https://install.python-poetry.org | python3 - | ||
# Install test dependencies | ||
- run: poetry install | ||
- run: databricks bundle build | ||
- run: | | ||
source $(poetry env info --path)/bin/activate | ||
pytest tests/unit --cov | ||
integration-test: | ||
name: "Run Integration Tests" | ||
runs-on: ubuntu-latest | ||
needs: | ||
- build-and-ut | ||
steps: | ||
# Check out this repo, so that this workflow can access it. | ||
- uses: actions/checkout@v3 | ||
# Download the Databricks CLI. | ||
# See https://github.com/databricks/setup-cli | ||
- uses: databricks/setup-cli@main | ||
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer | ||
- run: curl -sSL https://install.python-poetry.org | python3 - | ||
# Install test dependencies | ||
- run: poetry install | ||
# Setup databricks profile | ||
- run: | | ||
Check failure on line 64 in .github/workflows/dev.yaml
|
||
echo "[DEFAULT]" >> ~/.databrickscfg | ||
echo "host = ${{ DATABRICKS_TEST_HOST }}" >> ~/.databrickscfg | ||
echo "client_id = ${{ secrets.DATABRICKS_CLIENT_ID }}" >> ~/.databrickscfg | ||
echo "client_secret = ${{ secrets.DATABRICKS_CLIENT_SECRET }}" >> ~/.databrickscfg | ||
echo "cluster_id = ${{ secrets.DATABRICKS_TEST_CLUSTER_ID }}" >> ~/.databrickscfg | ||
- run: cat ~/.databrickscfg | ||
- run: | | ||
source $(poetry env info --path)/bin/activate | ||
pytest tests/integration --cov | ||
e2e: | ||
name: "Run End to End Tests" | ||
runs-on: ubuntu-latest | ||
needs: | ||
- integration-test | ||
steps: | ||
# Check out this repo, so that this workflow can access it. | ||
- uses: actions/checkout@v3 | ||
# Download the Databricks CLI. | ||
# See https://github.com/databricks/setup-cli | ||
- uses: databricks/setup-cli@main | ||
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer | ||
- run: curl -sSL https://install.python-poetry.org | python3 - | ||
# Install test dependencies | ||
- run: poetry install | ||
- run: databricks bundle deploy | ||
- run: | | ||
source $(poetry env info --path)/bin/activate | ||
pytest tests/e2e --cov | ||
env: | ||
DATABRICKS_TEST_CLUSTER_ID: ${{ secrets.DATABRICKS_TEST_CLUSTER_ID }} | ||
# Used by the "pipeline_update" job to deploy the bundle. | ||
# Bundle validation is automatically performed as part of this deployment. | ||
# If validation fails, this workflow fails. | ||
deploy: | ||
name: "Deploy Jobs" | ||
runs-on: ubuntu-latest | ||
needs: | ||
- e2e | ||
steps: | ||
# Check out this repo, so that this workflow can access it. | ||
- uses: actions/checkout@v3 | ||
# Download the Databricks CLI. | ||
# See https://github.com/databricks/setup-cli | ||
- uses: databricks/setup-cli@main | ||
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer | ||
- run: curl -sSL https://install.python-poetry.org | python3 - | ||
# Deploy the bundle to the "dev" target as defined | ||
# in the bundle's settings file. | ||
- run: databricks bundle deploy | ||
working-directory: . | ||
env: | ||
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }} | ||
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }} | ||
DATABRICKS_BUNDLE_ENV: dev | ||
# Validate, deploy, and then run the bundle. | ||
pipeline_update: | ||
name: "Run Jobs" | ||
runs-on: ubuntu-latest | ||
# Run the "deploy" job first. | ||
needs: | ||
- deploy | ||
steps: | ||
# Check out this repo, so that this workflow can access it. | ||
- uses: actions/checkout@v3 | ||
# Use the downloaded Databricks CLI. | ||
- uses: databricks/setup-cli@main | ||
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer | ||
- run: curl -sSL https://install.python-poetry.org | python3 - | ||
# Run the Databricks workflow named "my-job" as defined in the | ||
# bundle that was just deployed. | ||
- run: databricks bundle run marcin_project_job --refresh-all | ||
working-directory: . | ||
env: | ||
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }} | ||
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }} | ||
DATABRICKS_BUNDLE_ENV: dev |