Skip to content

updated project to use poetry #10

updated project to use poetry

updated project to use poetry #10

Workflow file for this run

# Based on: https://learn.microsoft.com/en-us/azure/databricks/dev-tools/bundles/ci-cd
# This workflow validates, deploys, and runs the specified bundle
# within a pre-production target named "dev".
name: "dev deployment"
# Ensure that only a single job or workflow using the same concurrency group
# runs at a time.
concurrency: 1
# Trigger this workflow whenever a pull request is pushed to the repo's
# main branch.
on:
push:
branches:
- main
jobs:
# Used by the "pipeline_update" job to deploy the bundle.
# Bundle validation is automatically performed as part of this deployment.
# If validation fails, this workflow fails.
deploy:
name: "Deploy bundle"
runs-on: ubuntu-latest
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Download the Databricks CLI.
# See https://github.com/databricks/setup-cli
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Deploy the bundle to the "dev" target as defined
# in the bundle's settings file.
- run: databricks bundle deploy
working-directory: .
env:
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
DATABRICKS_BUNDLE_ENV: dev
# Validate, deploy, and then run the bundle.
pipeline_update:
name: "Run pipeline"
runs-on: ubuntu-latest
# Run the "deploy" job first.
needs:
- deploy
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Use the downloaded Databricks CLI.
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Run the Databricks workflow named "my-job" as defined in the
# bundle that was just deployed.
- run: databricks bundle run marcin_project_job --refresh-all
working-directory: .
env:
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
DATABRICKS_BUNDLE_ENV: dev