Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate path translation tests to acceptance tests #2122

Merged
merged 8 commits into from
Jan 17, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
bundle:
name: path_translation_nominal
name: fallback

include:
- "resources/*.yml"
Expand Down
125 changes: 125 additions & 0 deletions acceptance/bundle/paths/fallback/output.development.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
{
"jobs": {
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[email protected]/.bundle/fallback/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "default",
"new_cluster": {
"spark_version": "15.4.x-scala2.12"
}
}
],
"name": "placeholder",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {},
"tasks": [
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
{
"dbt_task": {
"commands": [
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
},
{
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
"task_key": "sql_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"whl": "dist/wheel1.whl"
},
{
"whl": "dist/wheel2.whl"
}
],
"python_wheel_task": {
"package_name": "my_package"
},
"task_key": "python_wheel_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"jar": "target/jar1.jar"
},
{
"jar": "target/jar2.jar"
}
],
"spark_jar_task": {
"main_class_name": "com.example.Main"
},
"task_key": "spark_jar_example"
}
]
}
},
"pipelines": {
"my_pipeline": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[email protected]/.bundle/fallback/development/state/metadata.json"
},
"libraries": [
{
"file": {
"path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/[email protected]/.bundle/fallback/development/files/src/notebook2"
}
}
],
"name": "placeholder",
"permissions": []
}
}
}
76 changes: 76 additions & 0 deletions acceptance/bundle/paths/fallback/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@

>>> $CLI bundle validate -t development -o json

Exit code: 0

>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]

Name: fallback
Target: error
Workspace:
User: [email protected]
Path: /Workspace/Users/[email protected]/.bundle/fallback/error

Found 1 error

Exit code: 1

>>> jq -e
.jobs.my_job.tasks[0].task_key == "notebook_example" and
(.jobs.my_job.tasks[0].notebook_task.notebook_path | endswith("/src/notebook"))
output.development.json
true
pietern marked this conversation as resolved.
Show resolved Hide resolved

>>> jq -e
.jobs.my_job.tasks[1].task_key == "spark_python_example" and
(.jobs.my_job.tasks[1].spark_python_task.python_file | endswith("/src/file.py"))
output.development.json
true

>>> jq -e
.jobs.my_job.tasks[2].task_key == "dbt_example" and
(.jobs.my_job.tasks[2].dbt_task.project_directory | endswith("/src/dbt_project"))
output.development.json
true

>>> jq -e
.jobs.my_job.tasks[3].task_key == "sql_example" and
(.jobs.my_job.tasks[3].sql_task.file.path | endswith("/src/sql.sql"))
output.development.json
true

>>> jq -e
.jobs.my_job.tasks[4].task_key == "python_wheel_example" and
(.jobs.my_job.tasks[4].libraries[0].whl == "dist/wheel1.whl") and
(.jobs.my_job.tasks[4].libraries[1].whl == "dist/wheel2.whl")
output.development.json
true

>>> jq -e
.jobs.my_job.tasks[5].task_key == "spark_jar_example" and
(.jobs.my_job.tasks[5].libraries[0].jar == "target/jar1.jar") and
(.jobs.my_job.tasks[5].libraries[1].jar == "target/jar2.jar")
output.development.json
true

>>> jq -e
(.pipelines.my_pipeline.libraries[0].file.path | endswith("/src/file1.py"))
output.development.json
true

>>> jq -e
(.pipelines.my_pipeline.libraries[1].notebook.path | endswith("/src/notebook1"))
output.development.json
true

>>> jq -e
(.pipelines.my_pipeline.libraries[2].file.path | endswith("/src/file2.py"))
output.development.json
true

>>> jq -e
(.pipelines.my_pipeline.libraries[3].notebook.path | endswith("/src/notebook2"))
output.development.json
true
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,45 @@ resources:
name: "placeholder"
tasks:
- task_key: notebook_example
job_cluster_key: default
notebook_task:
notebook_path: "this value is overridden"

- task_key: spark_python_example
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"

- task_key: dbt_example
job_cluster_key: default
dbt_task:
project_directory: "this value is overridden"
commands:
- "dbt run"

- task_key: sql_example
job_cluster_key: default
sql_task:
file:
path: "this value is overridden"
warehouse_id: cafef00d

- task_key: python_wheel_example
job_cluster_key: default
python_wheel_task:
package_name: my_package
libraries:
- whl: ../dist/wheel1.whl

- task_key: spark_jar_example
job_cluster_key: default
spark_jar_task:
main_class_name: com.example.Main
libraries:
- jar: ../target/jar1.jar

# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12
54 changes: 54 additions & 0 deletions acceptance/bundle/paths/fallback/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
errcode trace $CLI bundle validate -t development -o json | jq '.resources' > output.development.json
errcode trace $CLI bundle validate -t error

# Job assertions

trace jq -e '
.jobs.my_job.tasks[0].task_key == "notebook_example" and
(.jobs.my_job.tasks[0].notebook_task.notebook_path | endswith("/src/notebook"))
' output.development.json

trace jq -e '
.jobs.my_job.tasks[1].task_key == "spark_python_example" and
(.jobs.my_job.tasks[1].spark_python_task.python_file | endswith("/src/file.py"))
' output.development.json

trace jq -e '
.jobs.my_job.tasks[2].task_key == "dbt_example" and
(.jobs.my_job.tasks[2].dbt_task.project_directory | endswith("/src/dbt_project"))
' output.development.json

trace jq -e '
.jobs.my_job.tasks[3].task_key == "sql_example" and
(.jobs.my_job.tasks[3].sql_task.file.path | endswith("/src/sql.sql"))
' output.development.json

trace jq -e '
.jobs.my_job.tasks[4].task_key == "python_wheel_example" and
(.jobs.my_job.tasks[4].libraries[0].whl == "dist/wheel1.whl") and
(.jobs.my_job.tasks[4].libraries[1].whl == "dist/wheel2.whl")
' output.development.json

trace jq -e '
.jobs.my_job.tasks[5].task_key == "spark_jar_example" and
(.jobs.my_job.tasks[5].libraries[0].jar == "target/jar1.jar") and
(.jobs.my_job.tasks[5].libraries[1].jar == "target/jar2.jar")
' output.development.json

# Pipeline assertions

trace jq -e '
(.pipelines.my_pipeline.libraries[0].file.path | endswith("/src/file1.py"))
' output.development.json

trace jq -e '
(.pipelines.my_pipeline.libraries[1].notebook.path | endswith("/src/notebook1"))
' output.development.json

trace jq -e '
(.pipelines.my_pipeline.libraries[2].file.path | endswith("/src/file2.py"))
' output.development.json

trace jq -e '
(.pipelines.my_pipeline.libraries[3].notebook.path | endswith("/src/notebook2"))
' output.development.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
bundle:
name: path_translation_fallback
name: nominal

include:
- "resources/*.yml"
Expand Down
Loading
Loading