-
Notifications
You must be signed in to change notification settings - Fork 0
160 lines (124 loc) · 4.99 KB
/
dev.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
# Based on: https://learn.microsoft.com/en-us/azure/databricks/dev-tools/bundles/ci-cd
# This workflow validates, deploys, and runs the specified bundle
# within a pre-production target named "dev".
name: "dev deployment"
# Ensure that only a single job or workflow using the same concurrency group
# runs at a time.
concurrency: 1
# Trigger this workflow whenever a pull request is pushed to the repo's
# main branch.
on:
push:
branches:
- main
jobs:
build-and-ut:
name: "Build and Run Unit Tests"
runs-on: ubuntu-latest
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Download the Databricks CLI.
# See https://github.com/databricks/setup-cli
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Install test dependencies
- run: poetry install
- run: databricks bundle build
- run: |
source $(poetry env info --path)/bin/activate
pytest tests/unit --cov
integration-test:
name: "Run Integration Tests"
runs-on: ubuntu-latest
needs:
- build-and-ut
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Download the Databricks CLI.
# See https://github.com/databricks/setup-cli
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Install test dependencies
- run: poetry install
# Setup databricks profile
- run: |
echo "[DEFAULT]" >> ~/.databrickscfg
echo "host = ${{ DATABRICKS_TEST_HOST }}" >> ~/.databrickscfg
echo "client_id = ${{ secrets.DATABRICKS_CLIENT_ID }}" >> ~/.databrickscfg
echo "client_secret = ${{ secrets.DATABRICKS_CLIENT_SECRET }}" >> ~/.databrickscfg
echo "cluster_id = ${{ secrets.DATABRICKS_TEST_CLUSTER_ID }}" >> ~/.databrickscfg
- run: cat ~/.databrickscfg
- run: |
source $(poetry env info --path)/bin/activate
pytest tests/integration --cov
e2e:
name: "Run End to End Tests"
runs-on: ubuntu-latest
needs:
- integration-test
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Download the Databricks CLI.
# See https://github.com/databricks/setup-cli
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Install test dependencies
- run: poetry install
- run: databricks bundle deploy
- run: |
source $(poetry env info --path)/bin/activate
pytest tests/e2e --cov
env:
DATABRICKS_TEST_CLUSTER_ID: ${{ secrets.DATABRICKS_TEST_CLUSTER_ID }}
# Used by the "pipeline_update" job to deploy the bundle.
# Bundle validation is automatically performed as part of this deployment.
# If validation fails, this workflow fails.
deploy:
name: "Deploy Jobs"
runs-on: ubuntu-latest
needs:
- e2e
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Download the Databricks CLI.
# See https://github.com/databricks/setup-cli
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Deploy the bundle to the "dev" target as defined
# in the bundle's settings file.
- run: databricks bundle deploy
working-directory: .
env:
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
DATABRICKS_BUNDLE_ENV: dev
# Validate, deploy, and then run the bundle.
pipeline_update:
name: "Run Jobs"
runs-on: ubuntu-latest
# Run the "deploy" job first.
needs:
- deploy
steps:
# Check out this repo, so that this workflow can access it.
- uses: actions/checkout@v3
# Use the downloaded Databricks CLI.
- uses: databricks/setup-cli@main
# Install poetry: https://python-poetry.org/docs/#installing-with-the-official-installer
- run: curl -sSL https://install.python-poetry.org | python3 -
# Run the Databricks workflow named "my-job" as defined in the
# bundle that was just deployed.
- run: databricks bundle run marcin_project_job --refresh-all
working-directory: .
env:
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
DATABRICKS_BUNDLE_ENV: dev