From f202596a6fa85451dee82d3ebe98840f8cd05c23 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 21 Mar 2024 11:37:05 +0100 Subject: [PATCH 01/17] Move bundle tests into bundle/tests (#1299) ## Changes These tests were located in `bundle/tests/bundle` which meant they were unable to reuse the helper functions defined in the `bundle/tests` package. There is no need for these tests to live outside the package. ## Tests Existing tests pass. --- bundle/tests/bundle/loader.go | 26 ------------------ .../pipeline_glob_paths/databricks.yml | 0 .../dlt/nyc_taxi_loader.py | 0 .../{bundle => }/pipeline_glob_paths_test.go | 2 +- .../python_wheel/.gitignore | 0 .../python_wheel/bundle.yml | 0 .../python_wheel/my_test_code/setup.py | 0 .../python_wheel/my_test_code/src/__init__.py | 0 .../python_wheel/my_test_code/src/__main__.py | 0 .../python_wheel_dbfs_lib/bundle.yml | 0 .../python_wheel_no_artifact/.gitignore | 0 .../python_wheel_no_artifact/bundle.yml | 0 .../my_test_code/__init__.py | 0 .../my_test_code/__main__.py | 0 .../python_wheel_no_artifact/setup.py | 0 .../.gitignore | 0 .../bundle.yml | 0 .../my_test_code-0.0.1-py3-none-any.whl | Bin .../wheel_test.go => python_wheel_test.go} | 22 +++++++-------- 19 files changed, 12 insertions(+), 38 deletions(-) delete mode 100644 bundle/tests/bundle/loader.go rename bundle/tests/{bundle => }/pipeline_glob_paths/databricks.yml (100%) rename bundle/tests/{bundle => }/pipeline_glob_paths/dlt/nyc_taxi_loader.py (100%) rename bundle/tests/{bundle => }/pipeline_glob_paths_test.go (98%) rename bundle/tests/{bundle => python_wheel}/python_wheel/.gitignore (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel/bundle.yml (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel/my_test_code/setup.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel/my_test_code/src/__init__.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel/my_test_code/src/__main__.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_dbfs_lib/bundle.yml (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact/.gitignore (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact/bundle.yml (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact/my_test_code/__init__.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact/my_test_code/__main__.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact/setup.py (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact_no_setup/.gitignore (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact_no_setup/bundle.yml (100%) rename bundle/tests/{bundle => python_wheel}/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl (100%) rename bundle/tests/{bundle/wheel_test.go => python_wheel_test.go} (70%) diff --git a/bundle/tests/bundle/loader.go b/bundle/tests/bundle/loader.go deleted file mode 100644 index 52744ca786..0000000000 --- a/bundle/tests/bundle/loader.go +++ /dev/null @@ -1,26 +0,0 @@ -package bundle - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/stretchr/testify/require" -) - -func loadTarget(t *testing.T, path, env string) *bundle.Bundle { - ctx := context.Background() - b, err := bundle.Load(ctx, path) - require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) - require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq( - mutator.RewriteSyncPaths(), - mutator.MergeJobClusters(), - mutator.MergeJobTasks(), - mutator.MergePipelineClusters(), - )) - require.NoError(t, err) - return b -} diff --git a/bundle/tests/bundle/pipeline_glob_paths/databricks.yml b/bundle/tests/pipeline_glob_paths/databricks.yml similarity index 100% rename from bundle/tests/bundle/pipeline_glob_paths/databricks.yml rename to bundle/tests/pipeline_glob_paths/databricks.yml diff --git a/bundle/tests/bundle/pipeline_glob_paths/dlt/nyc_taxi_loader.py b/bundle/tests/pipeline_glob_paths/dlt/nyc_taxi_loader.py similarity index 100% rename from bundle/tests/bundle/pipeline_glob_paths/dlt/nyc_taxi_loader.py rename to bundle/tests/pipeline_glob_paths/dlt/nyc_taxi_loader.py diff --git a/bundle/tests/bundle/pipeline_glob_paths_test.go b/bundle/tests/pipeline_glob_paths_test.go similarity index 98% rename from bundle/tests/bundle/pipeline_glob_paths_test.go rename to bundle/tests/pipeline_glob_paths_test.go index ed78c9668a..85a1379263 100644 --- a/bundle/tests/bundle/pipeline_glob_paths_test.go +++ b/bundle/tests/pipeline_glob_paths_test.go @@ -1,4 +1,4 @@ -package bundle +package config_tests import ( "context" diff --git a/bundle/tests/bundle/python_wheel/.gitignore b/bundle/tests/python_wheel/python_wheel/.gitignore similarity index 100% rename from bundle/tests/bundle/python_wheel/.gitignore rename to bundle/tests/python_wheel/python_wheel/.gitignore diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/python_wheel/python_wheel/bundle.yml similarity index 100% rename from bundle/tests/bundle/python_wheel/bundle.yml rename to bundle/tests/python_wheel/python_wheel/bundle.yml diff --git a/bundle/tests/bundle/python_wheel/my_test_code/setup.py b/bundle/tests/python_wheel/python_wheel/my_test_code/setup.py similarity index 100% rename from bundle/tests/bundle/python_wheel/my_test_code/setup.py rename to bundle/tests/python_wheel/python_wheel/my_test_code/setup.py diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py b/bundle/tests/python_wheel/python_wheel/my_test_code/src/__init__.py similarity index 100% rename from bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py rename to bundle/tests/python_wheel/python_wheel/my_test_code/src/__init__.py diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py b/bundle/tests/python_wheel/python_wheel/my_test_code/src/__main__.py similarity index 100% rename from bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py rename to bundle/tests/python_wheel/python_wheel/my_test_code/src/__main__.py diff --git a/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml b/bundle/tests/python_wheel/python_wheel_dbfs_lib/bundle.yml similarity index 100% rename from bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml rename to bundle/tests/python_wheel/python_wheel_dbfs_lib/bundle.yml diff --git a/bundle/tests/bundle/python_wheel_no_artifact/.gitignore b/bundle/tests/python_wheel/python_wheel_no_artifact/.gitignore similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact/.gitignore rename to bundle/tests/python_wheel/python_wheel_no_artifact/.gitignore diff --git a/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml b/bundle/tests/python_wheel/python_wheel_no_artifact/bundle.yml similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact/bundle.yml rename to bundle/tests/python_wheel/python_wheel_no_artifact/bundle.yml diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py b/bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__init__.py similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py rename to bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__init__.py diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py b/bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__main__.py similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py rename to bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__main__.py diff --git a/bundle/tests/bundle/python_wheel_no_artifact/setup.py b/bundle/tests/python_wheel/python_wheel_no_artifact/setup.py similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact/setup.py rename to bundle/tests/python_wheel/python_wheel_no_artifact/setup.py diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore b/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/.gitignore similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore rename to bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/.gitignore diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml b/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/bundle.yml similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml rename to bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/bundle.yml diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl b/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl similarity index 100% rename from bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl rename to bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/python_wheel_test.go similarity index 70% rename from bundle/tests/bundle/wheel_test.go rename to bundle/tests/python_wheel_test.go index 5171241f42..8351e96ae4 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/python_wheel_test.go @@ -1,4 +1,4 @@ -package bundle +package config_tests import ( "context" @@ -11,16 +11,16 @@ import ( "github.com/stretchr/testify/require" ) -func TestBundlePythonWheelBuild(t *testing.T) { +func TestPythonWheelBuild(t *testing.T) { ctx := context.Background() - b, err := bundle.Load(ctx, "./python_wheel") + b, err := bundle.Load(ctx, "./python_wheel/python_wheel") require.NoError(t, err) m := phases.Build() err = bundle.Apply(ctx, b, m) require.NoError(t, err) - matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") + matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) @@ -29,16 +29,16 @@ func TestBundlePythonWheelBuild(t *testing.T) { require.NoError(t, err) } -func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { +func TestPythonWheelBuildAutoDetect(t *testing.T) { ctx := context.Background() - b, err := bundle.Load(ctx, "./python_wheel_no_artifact") + b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact") require.NoError(t, err) m := phases.Build() err = bundle.Apply(ctx, b, m) require.NoError(t, err) - matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") + matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) @@ -47,9 +47,9 @@ func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { require.NoError(t, err) } -func TestBundlePythonWheelWithDBFSLib(t *testing.T) { +func TestPythonWheelWithDBFSLib(t *testing.T) { ctx := context.Background() - b, err := bundle.Load(ctx, "./python_wheel_dbfs_lib") + b, err := bundle.Load(ctx, "./python_wheel/python_wheel_dbfs_lib") require.NoError(t, err) m := phases.Build() @@ -61,9 +61,9 @@ func TestBundlePythonWheelWithDBFSLib(t *testing.T) { require.NoError(t, err) } -func TestBundlePythonWheelBuildNoBuildJustUpload(t *testing.T) { +func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { ctx := context.Background() - b, err := bundle.Load(ctx, "./python_wheel_no_artifact_no_setup") + b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_no_setup") require.NoError(t, err) m := phases.Build() From fd8dbff63104c0830d89f372eaf01ebcd48aa343 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Mar 2024 14:15:54 +0100 Subject: [PATCH 02/17] Update Go SDK to v0.36.0 (#1304) ## Changes SDK release: https://github.com/databricks/databricks-sdk-go/releases/tag/v0.36.0 No notable differences other than a few type name changes. ## Tests Tests pass. --- .codegen/_openapi_sha | 2 +- bundle/schema/docs/bundle_descriptions.json | 28 +++++++++---------- .../csp-enablement-account.go | 4 +-- .../esm-enablement-account.go | 4 +-- .../personal-compute/personal-compute.go | 8 +++--- .../automatic-cluster-update.go | 4 +-- .../csp-enablement/csp-enablement.go | 4 +-- .../default-namespace/default-namespace.go | 8 +++--- .../esm-enablement/esm-enablement.go | 4 +-- .../ip-access-lists/ip-access-lists.go | 4 +-- cmd/workspace/lakeview/lakeview.go | 12 ++++---- .../restrict-workspace-admins.go | 8 +++--- go.mod | 2 +- go.sum | 4 +-- 14 files changed, 48 insertions(+), 48 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 499e0da40a..f26f23179e 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -3821dc51952c5cf1c276dd84967da011b191e64a \ No newline at end of file +93763b0d7ae908520c229c786fff28b8fd623261 \ No newline at end of file diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 53b9be5325..c6b45a3eb9 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -193,7 +193,7 @@ "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "properties": { "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." } } }, @@ -725,7 +725,7 @@ "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "properties": { "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job.\nSee [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)\nfor details. This field is required.\"\n" @@ -785,7 +785,7 @@ "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument." @@ -1269,7 +1269,7 @@ "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.\n" }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -1371,7 +1371,7 @@ "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -1449,7 +1449,7 @@ "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -1551,7 +1551,7 @@ } }, "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." }, "table": { "description": "Table trigger settings.", @@ -2726,7 +2726,7 @@ "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "properties": { "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." } } }, @@ -3258,7 +3258,7 @@ "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "properties": { "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job.\nSee [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)\nfor details. This field is required.\"\n" @@ -3318,7 +3318,7 @@ "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument." @@ -3802,7 +3802,7 @@ "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.\n" }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -3904,7 +3904,7 @@ "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -3982,7 +3982,7 @@ "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths." }, "source": { - "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved\nfrom the local \u003cDatabricks\u003e workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a \u003cDatabricks\u003e workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.\n" + "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local \u003cDatabricks\u003e workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in \u003cDatabricks\u003e workspace.\n* `GIT`: Project is located in cloud Git provider.\n" } } }, @@ -4084,7 +4084,7 @@ } }, "pause_status": { - "description": "Indicate whether this schedule is paused or not." + "description": "Whether this trigger is paused or not." }, "table": { "description": "Table trigger settings.", diff --git a/cmd/account/csp-enablement-account/csp-enablement-account.go b/cmd/account/csp-enablement-account/csp-enablement-account.go index 5c7b9b926b..79819003b6 100755 --- a/cmd/account/csp-enablement-account/csp-enablement-account.go +++ b/cmd/account/csp-enablement-account/csp-enablement-account.go @@ -47,13 +47,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetCspEnablementAccountRequest, + *settings.GetCspEnablementAccountSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetCspEnablementAccountRequest + var getReq settings.GetCspEnablementAccountSettingRequest // TODO: short flags diff --git a/cmd/account/esm-enablement-account/esm-enablement-account.go b/cmd/account/esm-enablement-account/esm-enablement-account.go index 0c936c4ded..dd407e2e54 100755 --- a/cmd/account/esm-enablement-account/esm-enablement-account.go +++ b/cmd/account/esm-enablement-account/esm-enablement-account.go @@ -45,13 +45,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetEsmEnablementAccountRequest, + *settings.GetEsmEnablementAccountSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetEsmEnablementAccountRequest + var getReq settings.GetEsmEnablementAccountSettingRequest // TODO: short flags diff --git a/cmd/account/personal-compute/personal-compute.go b/cmd/account/personal-compute/personal-compute.go index 7a2a045254..2a14b0b33b 100755 --- a/cmd/account/personal-compute/personal-compute.go +++ b/cmd/account/personal-compute/personal-compute.go @@ -53,13 +53,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var deleteOverrides []func( *cobra.Command, - *settings.DeletePersonalComputeRequest, + *settings.DeletePersonalComputeSettingRequest, ) func newDelete() *cobra.Command { cmd := &cobra.Command{} - var deleteReq settings.DeletePersonalComputeRequest + var deleteReq settings.DeletePersonalComputeSettingRequest // TODO: short flags @@ -108,13 +108,13 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetPersonalComputeRequest, + *settings.GetPersonalComputeSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetPersonalComputeRequest + var getReq settings.GetPersonalComputeSettingRequest // TODO: short flags diff --git a/cmd/workspace/automatic-cluster-update/automatic-cluster-update.go b/cmd/workspace/automatic-cluster-update/automatic-cluster-update.go index 4e198eb461..2385195bb1 100755 --- a/cmd/workspace/automatic-cluster-update/automatic-cluster-update.go +++ b/cmd/workspace/automatic-cluster-update/automatic-cluster-update.go @@ -42,13 +42,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetAutomaticClusterUpdateRequest, + *settings.GetAutomaticClusterUpdateSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetAutomaticClusterUpdateRequest + var getReq settings.GetAutomaticClusterUpdateSettingRequest // TODO: short flags diff --git a/cmd/workspace/csp-enablement/csp-enablement.go b/cmd/workspace/csp-enablement/csp-enablement.go index 623a7e541f..3125915648 100755 --- a/cmd/workspace/csp-enablement/csp-enablement.go +++ b/cmd/workspace/csp-enablement/csp-enablement.go @@ -45,13 +45,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetCspEnablementRequest, + *settings.GetCspEnablementSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetCspEnablementRequest + var getReq settings.GetCspEnablementSettingRequest // TODO: short flags diff --git a/cmd/workspace/default-namespace/default-namespace.go b/cmd/workspace/default-namespace/default-namespace.go index 89c11d7cd0..b15907bec4 100755 --- a/cmd/workspace/default-namespace/default-namespace.go +++ b/cmd/workspace/default-namespace/default-namespace.go @@ -53,13 +53,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var deleteOverrides []func( *cobra.Command, - *settings.DeleteDefaultNamespaceRequest, + *settings.DeleteDefaultNamespaceSettingRequest, ) func newDelete() *cobra.Command { cmd := &cobra.Command{} - var deleteReq settings.DeleteDefaultNamespaceRequest + var deleteReq settings.DeleteDefaultNamespaceSettingRequest // TODO: short flags @@ -112,13 +112,13 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetDefaultNamespaceRequest, + *settings.GetDefaultNamespaceSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetDefaultNamespaceRequest + var getReq settings.GetDefaultNamespaceSettingRequest // TODO: short flags diff --git a/cmd/workspace/esm-enablement/esm-enablement.go b/cmd/workspace/esm-enablement/esm-enablement.go index be0eed2f8f..a65fe2f76d 100755 --- a/cmd/workspace/esm-enablement/esm-enablement.go +++ b/cmd/workspace/esm-enablement/esm-enablement.go @@ -47,13 +47,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetEsmEnablementRequest, + *settings.GetEsmEnablementSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetEsmEnablementRequest + var getReq settings.GetEsmEnablementSettingRequest // TODO: short flags diff --git a/cmd/workspace/ip-access-lists/ip-access-lists.go b/cmd/workspace/ip-access-lists/ip-access-lists.go index ec5958b5be..ec8be99f66 100755 --- a/cmd/workspace/ip-access-lists/ip-access-lists.go +++ b/cmd/workspace/ip-access-lists/ip-access-lists.go @@ -243,13 +243,13 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetIpAccessListRequest, + *settings.GetIpAccessList, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetIpAccessListRequest + var getReq settings.GetIpAccessList // TODO: short flags diff --git a/cmd/workspace/lakeview/lakeview.go b/cmd/workspace/lakeview/lakeview.go index 4fc7404a65..8481a6a8c3 100755 --- a/cmd/workspace/lakeview/lakeview.go +++ b/cmd/workspace/lakeview/lakeview.go @@ -130,13 +130,13 @@ func newCreate() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *dashboards.GetLakeviewRequest, + *dashboards.GetDashboardRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq dashboards.GetLakeviewRequest + var getReq dashboards.GetDashboardRequest // TODO: short flags @@ -188,13 +188,13 @@ func newGet() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getPublishedOverrides []func( *cobra.Command, - *dashboards.GetPublishedRequest, + *dashboards.GetPublishedDashboardRequest, ) func newGetPublished() *cobra.Command { cmd := &cobra.Command{} - var getPublishedReq dashboards.GetPublishedRequest + var getPublishedReq dashboards.GetPublishedDashboardRequest // TODO: short flags @@ -315,13 +315,13 @@ func newPublish() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var trashOverrides []func( *cobra.Command, - *dashboards.TrashRequest, + *dashboards.TrashDashboardRequest, ) func newTrash() *cobra.Command { cmd := &cobra.Command{} - var trashReq dashboards.TrashRequest + var trashReq dashboards.TrashDashboardRequest // TODO: short flags diff --git a/cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go b/cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go index e0ca8030f3..5e9f59d2ce 100755 --- a/cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go +++ b/cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go @@ -53,13 +53,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var deleteOverrides []func( *cobra.Command, - *settings.DeleteRestrictWorkspaceAdminRequest, + *settings.DeleteRestrictWorkspaceAdminsSettingRequest, ) func newDelete() *cobra.Command { cmd := &cobra.Command{} - var deleteReq settings.DeleteRestrictWorkspaceAdminRequest + var deleteReq settings.DeleteRestrictWorkspaceAdminsSettingRequest // TODO: short flags @@ -112,13 +112,13 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *settings.GetRestrictWorkspaceAdminRequest, + *settings.GetRestrictWorkspaceAdminsSettingRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq settings.GetRestrictWorkspaceAdminRequest + var getReq settings.GetRestrictWorkspaceAdminsSettingRequest // TODO: short flags diff --git a/go.mod b/go.mod index 4e904fb294..d9e6c24f08 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.35.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.36.0 // Apache 2.0 github.com/fatih/color v1.16.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.6.0 // BSD-3-Clause diff --git a/go.sum b/go.sum index 15685fd88b..a4a6eb40b8 100644 --- a/go.sum +++ b/go.sum @@ -28,8 +28,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/databricks/databricks-sdk-go v0.35.0 h1:Z5dflnYEqCreYtuDkwsCPadvRP/aucikI34+gzrvTYQ= -github.com/databricks/databricks-sdk-go v0.35.0/go.mod h1:Yjy1gREDLK65g4axpVbVNKYAHYE2Sqzj0AB9QWHCBVM= +github.com/databricks/databricks-sdk-go v0.36.0 h1:QOO9VxBh6JmzzPpCHh0h1f4Ijk+Y3mqBtNN1nzp2Nq8= +github.com/databricks/databricks-sdk-go v0.36.0/go.mod h1:Yjy1gREDLK65g4axpVbVNKYAHYE2Sqzj0AB9QWHCBVM= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= From 1b879d44e120d7a27d0fc9ed5420cb216f68dcc1 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 25 Mar 2024 10:17:52 +0100 Subject: [PATCH 03/17] Upgrade Terraform provider to 1.38.0 (#1308) ## Changes Update to the latest release. No schema changes. ## Tests Unit tests pass. Integration to be done as part of the release PR. --- bundle/internal/tf/codegen/schema/version.go | 2 +- bundle/internal/tf/schema/root.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go index a41b622571..363ad4e8ab 100644 --- a/bundle/internal/tf/codegen/schema/version.go +++ b/bundle/internal/tf/codegen/schema/version.go @@ -1,3 +1,3 @@ package schema -const ProviderVersion = "1.37.0" +const ProviderVersion = "1.38.0" diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index f0253c2853..118e2857d3 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -25,7 +25,7 @@ func NewRoot() *Root { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": "1.37.0", + "version": "1.38.0", }, }, }, From 1efebabbf9dbf9650edb7838afffea64306d6c0e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 25 Mar 2024 11:43:16 +0100 Subject: [PATCH 04/17] Release v0.216.0 (#1309) CLI: * Propagate correct `User-Agent` for CLI during OAuth flow ([#1264](https://github.com/databricks/cli/pull/1264)). * Add usage string when command fails with incorrect arguments ([#1276](https://github.com/databricks/cli/pull/1276)). Bundles: * Include `dyn.Path` as argument to the visit callback function ([#1260](https://github.com/databricks/cli/pull/1260)). * Inline logic to set a value in `dyn.SetByPath` ([#1261](https://github.com/databricks/cli/pull/1261)). * Add assertions for the `dyn.Path` argument to the visit callback ([#1265](https://github.com/databricks/cli/pull/1265)). * Add `dyn.MapByPattern` to map a function to values with matching paths ([#1266](https://github.com/databricks/cli/pull/1266)). * Filter current user from resource permissions ([#1262](https://github.com/databricks/cli/pull/1262)). * Retain location annotation when expanding globs for pipeline libraries ([#1274](https://github.com/databricks/cli/pull/1274)). * Added deployment state for bundles ([#1267](https://github.com/databricks/cli/pull/1267)). * Do CheckRunningResource only after terraform.Write ([#1292](https://github.com/databricks/cli/pull/1292)). * Rewrite relative paths using `dyn.Location` of the underlying value ([#1273](https://github.com/databricks/cli/pull/1273)). * Push deployment state right after files upload ([#1293](https://github.com/databricks/cli/pull/1293)). * Make `Append` function to `dyn.Path` return independent slice ([#1295](https://github.com/databricks/cli/pull/1295)). * Move bundle tests into bundle/tests ([#1299](https://github.com/databricks/cli/pull/1299)). * Upgrade Terraform provider to 1.38.0 ([#1308](https://github.com/databricks/cli/pull/1308)). Internal: * Add integration test for mlops-stacks initialization ([#1155](https://github.com/databricks/cli/pull/1155)). * Update actions/setup-python to v5 ([#1290](https://github.com/databricks/cli/pull/1290)). * Update codecov/codecov-action to v4 ([#1291](https://github.com/databricks/cli/pull/1291)). API Changes: * Changed `databricks catalogs list` command. * Changed `databricks online-tables create` command. * Changed `databricks lakeview publish` command. * Added `databricks lakeview create` command. * Added `databricks lakeview get` command. * Added `databricks lakeview get-published` command. * Added `databricks lakeview trash` command. * Added `databricks lakeview update` command. * Moved settings related commands to `databricks settings` and `databricks account settings`. OpenAPI commit 93763b0d7ae908520c229c786fff28b8fd623261 (2024-03-20) Dependency updates: * Bump golang.org/x/oauth2 from 0.17.0 to 0.18.0 ([#1270](https://github.com/databricks/cli/pull/1270)). * Bump golang.org/x/mod from 0.15.0 to 0.16.0 ([#1271](https://github.com/databricks/cli/pull/1271)). * Update Go SDK to v0.35.0 ([#1300](https://github.com/databricks/cli/pull/1300)). * Update Go SDK to v0.36.0 ([#1304](https://github.com/databricks/cli/pull/1304)). --- CHANGELOG.md | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 51c601150c..52d7590f9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,50 @@ # Version changelog +## 0.216.0 + +CLI: + * Propagate correct `User-Agent` for CLI during OAuth flow ([#1264](https://github.com/databricks/cli/pull/1264)). + * Add usage string when command fails with incorrect arguments ([#1276](https://github.com/databricks/cli/pull/1276)). + +Bundles: + * Include `dyn.Path` as argument to the visit callback function ([#1260](https://github.com/databricks/cli/pull/1260)). + * Inline logic to set a value in `dyn.SetByPath` ([#1261](https://github.com/databricks/cli/pull/1261)). + * Add assertions for the `dyn.Path` argument to the visit callback ([#1265](https://github.com/databricks/cli/pull/1265)). + * Add `dyn.MapByPattern` to map a function to values with matching paths ([#1266](https://github.com/databricks/cli/pull/1266)). + * Filter current user from resource permissions ([#1262](https://github.com/databricks/cli/pull/1262)). + * Retain location annotation when expanding globs for pipeline libraries ([#1274](https://github.com/databricks/cli/pull/1274)). + * Added deployment state for bundles ([#1267](https://github.com/databricks/cli/pull/1267)). + * Do CheckRunningResource only after terraform.Write ([#1292](https://github.com/databricks/cli/pull/1292)). + * Rewrite relative paths using `dyn.Location` of the underlying value ([#1273](https://github.com/databricks/cli/pull/1273)). + * Push deployment state right after files upload ([#1293](https://github.com/databricks/cli/pull/1293)). + * Make `Append` function to `dyn.Path` return independent slice ([#1295](https://github.com/databricks/cli/pull/1295)). + * Move bundle tests into bundle/tests ([#1299](https://github.com/databricks/cli/pull/1299)). + * Upgrade Terraform provider to 1.38.0 ([#1308](https://github.com/databricks/cli/pull/1308)). + +Internal: + * Add integration test for mlops-stacks initialization ([#1155](https://github.com/databricks/cli/pull/1155)). + * Update actions/setup-python to v5 ([#1290](https://github.com/databricks/cli/pull/1290)). + * Update codecov/codecov-action to v4 ([#1291](https://github.com/databricks/cli/pull/1291)). + +API Changes: + * Changed `databricks catalogs list` command. + * Changed `databricks online-tables create` command. + * Changed `databricks lakeview publish` command. + * Added `databricks lakeview create` command. + * Added `databricks lakeview get` command. + * Added `databricks lakeview get-published` command. + * Added `databricks lakeview trash` command. + * Added `databricks lakeview update` command. + * Moved settings related commands to `databricks settings` and `databricks account settings`. + +OpenAPI commit 93763b0d7ae908520c229c786fff28b8fd623261 (2024-03-20) + +Dependency updates: + * Bump golang.org/x/oauth2 from 0.17.0 to 0.18.0 ([#1270](https://github.com/databricks/cli/pull/1270)). + * Bump golang.org/x/mod from 0.15.0 to 0.16.0 ([#1271](https://github.com/databricks/cli/pull/1271)). + * Update Go SDK to v0.35.0 ([#1300](https://github.com/databricks/cli/pull/1300)). + * Update Go SDK to v0.36.0 ([#1304](https://github.com/databricks/cli/pull/1304)). + ## 0.215.0 CLI: From 26094f01a0e06dd6b7f00710ee7a3623f9c09a38 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 25 Mar 2024 12:01:09 +0100 Subject: [PATCH 05/17] Define `dyn.Mapping` to represent maps (#1301) ## Changes Before this change maps were stored as a regular Go map with string keys. This didn't let us capture metadata (location information) for map keys. To address this, this change replaces the use of the regular Go map with a dedicated type for a dynamic map. This type stores the `dyn.Value` for both the key and the value. It uses a map to still allow O(1) lookups and redirects those into a slice. ## Tests * All existing unit tests pass (some with minor modifications due to interface change). * Equality assertions with `assert.Equal` no longer worked because the new `dyn.Mapping` persists the order in which keys are set and is therefore susceptible to map ordering issues. To fix this, I added a `dynassert` package that forwards all assertions to `testify/assert` but intercepts equality for `dyn.Value` arguments. --- libs/dyn/convert/end_to_end_test.go | 2 +- libs/dyn/convert/from_typed.go | 33 +++- libs/dyn/convert/from_typed_test.go | 2 +- libs/dyn/convert/normalize.go | 35 ++-- libs/dyn/convert/normalize_test.go | 2 +- libs/dyn/convert/struct_info_test.go | 2 +- libs/dyn/convert/to_typed.go | 19 ++- libs/dyn/convert/to_typed_test.go | 2 +- libs/dyn/dynassert/assert.go | 113 +++++++++++++ libs/dyn/dynassert/assert_test.go | 45 ++++++ libs/dyn/dynvar/lookup_test.go | 2 +- libs/dyn/dynvar/ref_test.go | 2 +- libs/dyn/dynvar/resolve_test.go | 2 +- libs/dyn/kind.go | 2 +- libs/dyn/kind_test.go | 2 +- libs/dyn/location_test.go | 2 +- libs/dyn/mapping.go | 148 +++++++++++++++++ libs/dyn/mapping_test.go | 204 ++++++++++++++++++++++++ libs/dyn/merge/elements_by_key_test.go | 2 +- libs/dyn/merge/merge.go | 18 +-- libs/dyn/merge/merge_test.go | 2 +- libs/dyn/path_string_test.go | 2 +- libs/dyn/path_test.go | 2 +- libs/dyn/pattern.go | 13 +- libs/dyn/pattern_test.go | 2 +- libs/dyn/value.go | 24 +-- libs/dyn/value_test.go | 12 +- libs/dyn/value_underlying.go | 10 +- libs/dyn/value_underlying_test.go | 2 +- libs/dyn/visit.go | 7 +- libs/dyn/visit_get_test.go | 2 +- libs/dyn/visit_map.go | 11 +- libs/dyn/visit_map_test.go | 2 +- libs/dyn/visit_set.go | 5 +- libs/dyn/visit_set_test.go | 2 +- libs/dyn/walk.go | 10 +- libs/dyn/walk_test.go | 2 +- libs/dyn/yamlloader/loader.go | 17 +- libs/dyn/yamlloader/yaml_anchor_test.go | 2 +- libs/dyn/yamlloader/yaml_error_test.go | 2 +- libs/dyn/yamlloader/yaml_mix_test.go | 2 +- libs/dyn/yamlloader/yaml_test.go | 2 +- libs/dyn/yamlsaver/order_test.go | 2 +- libs/dyn/yamlsaver/saver.go | 19 +-- libs/dyn/yamlsaver/saver_test.go | 2 +- libs/dyn/yamlsaver/utils.go | 4 +- libs/dyn/yamlsaver/utils_test.go | 6 +- 47 files changed, 680 insertions(+), 127 deletions(-) create mode 100644 libs/dyn/dynassert/assert.go create mode 100644 libs/dyn/dynassert/assert_test.go create mode 100644 libs/dyn/mapping.go create mode 100644 libs/dyn/mapping_test.go diff --git a/libs/dyn/convert/end_to_end_test.go b/libs/dyn/convert/end_to_end_test.go index 7c048136ee..33902bea85 100644 --- a/libs/dyn/convert/end_to_end_test.go +++ b/libs/dyn/convert/end_to_end_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index 4778edb960..c344d12dff 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -71,17 +71,28 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } - out := make(map[string]dyn.Value) + refm, _ := ref.AsMap() + out := dyn.NewMapping() info := getStructInfo(src.Type()) for k, v := range info.FieldValues(src) { + pair, ok := refm.GetPairByString(k) + refk := pair.Key + refv := pair.Value + + // Use nil reference if there is no reference for this key + if !ok { + refk = dyn.V(k) + refv = dyn.NilValue + } + // Convert the field taking into account the reference value (may be equal to config.NilValue). - nv, err := fromTyped(v.Interface(), ref.Get(k)) + nv, err := fromTyped(v.Interface(), refv) if err != nil { return dyn.InvalidValue, err } if nv != dyn.NilValue { - out[k] = nv + out.Set(refk, nv) } } @@ -101,21 +112,31 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { return dyn.NilValue, nil } - out := make(map[string]dyn.Value) + refm, _ := ref.AsMap() + out := dyn.NewMapping() iter := src.MapRange() for iter.Next() { k := iter.Key().String() v := iter.Value() + pair, ok := refm.GetPairByString(k) + refk := pair.Key + refv := pair.Value + + // Use nil reference if there is no reference for this key + if !ok { + refk = dyn.V(k) + refv = dyn.NilValue + } // Convert entry taking into account the reference value (may be equal to dyn.NilValue). - nv, err := fromTyped(v.Interface(), ref.Get(k), includeZeroValues) + nv, err := fromTyped(v.Interface(), refv, includeZeroValues) if err != nil { return dyn.InvalidValue, err } // Every entry is represented, even if it is a nil. // Otherwise, a map with zero-valued structs would yield a nil as well. - out[k] = nv + out.Set(refk, nv) } return dyn.NewValue(out, ref.Location()), nil diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index f7e97fc7e7..f75470f420 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index d6539be952..f18b27fd24 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -74,30 +74,32 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen switch src.Kind() { case dyn.KindMap: - out := make(map[string]dyn.Value) + out := dyn.NewMapping() info := getStructInfo(typ) - for k, v := range src.MustMap() { - index, ok := info.Fields[k] + for _, pair := range src.MustMap().Pairs() { + pk := pair.Key + pv := pair.Value + index, ok := info.Fields[pk.MustString()] if !ok { diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, - Summary: fmt.Sprintf("unknown field: %s", k), - Location: src.Location(), + Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), + Location: pk.Location(), }) continue } // Normalize the value according to the field type. - v, err := n.normalizeType(typ.FieldByIndex(index).Type, v, seen) + nv, err := n.normalizeType(typ.FieldByIndex(index).Type, pv, seen) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. - if !v.IsValid() { + if !nv.IsValid() { continue } } - out[k] = v + out.Set(pk, nv) } // Return the normalized value if missing fields are not included. @@ -107,7 +109,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen // Populate missing fields with their zero values. for k, index := range info.Fields { - if _, ok := out[k]; ok { + if _, ok := out.GetByString(k); ok { continue } @@ -143,7 +145,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen continue } if v.IsValid() { - out[k] = v + out.Set(dyn.V(k), v) } } @@ -160,19 +162,22 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r switch src.Kind() { case dyn.KindMap: - out := make(map[string]dyn.Value) - for k, v := range src.MustMap() { + out := dyn.NewMapping() + for _, pair := range src.MustMap().Pairs() { + pk := pair.Key + pv := pair.Value + // Normalize the value according to the map element type. - v, err := n.normalizeType(typ.Elem(), v, seen) + nv, err := n.normalizeType(typ.Elem(), pv, seen) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. - if !v.IsValid() { + if !nv.IsValid() { continue } } - out[k] = v + out.Set(pk, nv) } return dyn.NewValue(out, src.Location()), diags diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index a2a6038e43..78c487d3f3 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -5,7 +5,7 @@ import ( "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestNormalizeStruct(t *testing.T) { diff --git a/libs/dyn/convert/struct_info_test.go b/libs/dyn/convert/struct_info_test.go index 08be3c47ef..20348ff601 100644 --- a/libs/dyn/convert/struct_info_test.go +++ b/libs/dyn/convert/struct_info_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestStructInfoPlain(t *testing.T) { diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index 8b3cf3bb8a..f10853a2e4 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -59,8 +59,11 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error { dst.SetZero() info := getStructInfo(dst.Type()) - for k, v := range src.MustMap() { - index, ok := info.Fields[k] + for _, pair := range src.MustMap().Pairs() { + pk := pair.Key + pv := pair.Value + + index, ok := info.Fields[pk.MustString()] if !ok { // Ignore unknown fields. // A warning will be printed later. See PR #904. @@ -82,7 +85,7 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error { f = f.Field(x) } - err := ToTyped(f.Addr().Interface(), v) + err := ToTyped(f.Addr().Interface(), pv) if err != nil { return err } @@ -112,12 +115,14 @@ func toTypedMap(dst reflect.Value, src dyn.Value) error { m := src.MustMap() // Always overwrite. - dst.Set(reflect.MakeMapWithSize(dst.Type(), len(m))) - for k, v := range m { - kv := reflect.ValueOf(k) + dst.Set(reflect.MakeMapWithSize(dst.Type(), m.Len())) + for _, pair := range m.Pairs() { + pk := pair.Key + pv := pair.Value + kv := reflect.ValueOf(pk.MustString()) kt := dst.Type().Key() vv := reflect.New(dst.Type().Elem()) - err := ToTyped(vv.Interface(), v) + err := ToTyped(vv.Interface(), pv) if err != nil { return err } diff --git a/libs/dyn/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go index a3c340e81c..56d98a3cf4 100644 --- a/libs/dyn/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/dynassert/assert.go b/libs/dyn/dynassert/assert.go new file mode 100644 index 0000000000..dc6676ca27 --- /dev/null +++ b/libs/dyn/dynassert/assert.go @@ -0,0 +1,113 @@ +package dynassert + +import ( + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func Equal(t assert.TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + ev, eok := expected.(dyn.Value) + av, aok := actual.(dyn.Value) + if eok && aok && ev.IsValid() && av.IsValid() { + if !assert.Equal(t, ev.AsAny(), av.AsAny(), msgAndArgs...) { + return false + } + + // The values are equal on contents. Now compare the locations. + if !assert.Equal(t, ev.Location(), av.Location(), msgAndArgs...) { + return false + } + + // Walk ev and av and compare the locations of each element. + _, err := dyn.Walk(ev, func(p dyn.Path, evv dyn.Value) (dyn.Value, error) { + avv, err := dyn.GetByPath(av, p) + if assert.NoError(t, err, "unable to get value from actual value at path %v", p.String()) { + assert.Equal(t, evv.Location(), avv.Location()) + } + return evv, nil + }) + return assert.NoError(t, err) + } + + return assert.Equal(t, expected, actual, msgAndArgs...) +} + +func EqualValues(t assert.TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + return assert.EqualValues(t, expected, actual, msgAndArgs...) +} + +func NotEqual(t assert.TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + return assert.NotEqual(t, expected, actual, msgAndArgs...) +} + +func Len(t assert.TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { + return assert.Len(t, object, length, msgAndArgs...) +} + +func Empty(t assert.TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return assert.Empty(t, object, msgAndArgs...) +} + +func Nil(t assert.TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return assert.Nil(t, object, msgAndArgs...) +} + +func NotNil(t assert.TestingT, object interface{}, msgAndArgs ...interface{}) bool { + return assert.NotNil(t, object, msgAndArgs...) +} + +func NoError(t assert.TestingT, err error, msgAndArgs ...interface{}) bool { + return assert.NoError(t, err, msgAndArgs...) +} + +func Error(t assert.TestingT, err error, msgAndArgs ...interface{}) bool { + return assert.Error(t, err, msgAndArgs...) +} + +func EqualError(t assert.TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { + return assert.EqualError(t, theError, errString, msgAndArgs...) +} + +func ErrorContains(t assert.TestingT, theError error, contains string, msgAndArgs ...interface{}) bool { + return assert.ErrorContains(t, theError, contains, msgAndArgs...) +} + +func ErrorIs(t assert.TestingT, theError, target error, msgAndArgs ...interface{}) bool { + return assert.ErrorIs(t, theError, target, msgAndArgs...) +} + +func True(t assert.TestingT, value bool, msgAndArgs ...interface{}) bool { + return assert.True(t, value, msgAndArgs...) +} + +func False(t assert.TestingT, value bool, msgAndArgs ...interface{}) bool { + return assert.False(t, value, msgAndArgs...) +} + +func Contains(t assert.TestingT, list interface{}, element interface{}, msgAndArgs ...interface{}) bool { + return assert.Contains(t, list, element, msgAndArgs...) +} + +func NotContains(t assert.TestingT, list interface{}, element interface{}, msgAndArgs ...interface{}) bool { + return assert.NotContains(t, list, element, msgAndArgs...) +} + +func ElementsMatch(t assert.TestingT, listA, listB interface{}, msgAndArgs ...interface{}) bool { + return assert.ElementsMatch(t, listA, listB, msgAndArgs...) +} + +func Panics(t assert.TestingT, f func(), msgAndArgs ...interface{}) bool { + return assert.Panics(t, f, msgAndArgs...) +} + +func PanicsWithValue(t assert.TestingT, expected interface{}, f func(), msgAndArgs ...interface{}) bool { + return assert.PanicsWithValue(t, expected, f, msgAndArgs...) +} + +func PanicsWithError(t assert.TestingT, errString string, f func(), msgAndArgs ...interface{}) bool { + return assert.PanicsWithError(t, errString, f, msgAndArgs...) +} + +func NotPanics(t assert.TestingT, f func(), msgAndArgs ...interface{}) bool { + return assert.NotPanics(t, f, msgAndArgs...) +} diff --git a/libs/dyn/dynassert/assert_test.go b/libs/dyn/dynassert/assert_test.go new file mode 100644 index 0000000000..43258bd205 --- /dev/null +++ b/libs/dyn/dynassert/assert_test.go @@ -0,0 +1,45 @@ +package dynassert + +import ( + "go/parser" + "go/token" + "io/fs" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestThatThisTestPackageIsUsed(t *testing.T) { + var base = ".." + var files []string + err := fs.WalkDir(os.DirFS(base), ".", func(path string, d fs.DirEntry, err error) error { + if d.IsDir() { + // Filter this directory. + if filepath.Base(path) == "dynassert" { + return fs.SkipDir + } + } + if ok, _ := filepath.Match("*_test.go", d.Name()); ok { + files = append(files, filepath.Join(base, path)) + } + return nil + }) + require.NoError(t, err) + + // Confirm that none of the test files under `libs/dyn` import the + // `testify/assert` package and instead import this package for asserts. + fset := token.NewFileSet() + for _, file := range files { + f, err := parser.ParseFile(fset, file, nil, parser.ParseComments) + require.NoError(t, err) + + for _, imp := range f.Imports { + if strings.Contains(imp.Path.Value, `github.com/stretchr/testify/assert`) { + t.Errorf("File %s should not import github.com/stretchr/testify/assert", file) + } + } + } +} diff --git a/libs/dyn/dynvar/lookup_test.go b/libs/dyn/dynvar/lookup_test.go index 2341d72084..b78115ee8f 100644 --- a/libs/dyn/dynvar/lookup_test.go +++ b/libs/dyn/dynvar/lookup_test.go @@ -4,8 +4,8 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/cli/libs/dyn/dynvar" - "github.com/stretchr/testify/assert" ) func TestDefaultLookup(t *testing.T) { diff --git a/libs/dyn/dynvar/ref_test.go b/libs/dyn/dynvar/ref_test.go index 0922373687..aff3643e02 100644 --- a/libs/dyn/dynvar/ref_test.go +++ b/libs/dyn/dynvar/ref_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index 304ed9391c..bbecbb7760 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -4,8 +4,8 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/cli/libs/dyn/dynvar" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/kind.go b/libs/dyn/kind.go index 8f51c25c66..9d507fbc52 100644 --- a/libs/dyn/kind.go +++ b/libs/dyn/kind.go @@ -22,7 +22,7 @@ const ( func kindOf(v any) Kind { switch v.(type) { - case map[string]Value: + case Mapping: return KindMap case []Value: return KindSequence diff --git a/libs/dyn/kind_test.go b/libs/dyn/kind_test.go index 84c90713fb..9889d31e11 100644 --- a/libs/dyn/kind_test.go +++ b/libs/dyn/kind_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestKindZeroValue(t *testing.T) { diff --git a/libs/dyn/location_test.go b/libs/dyn/location_test.go index 6d856410b6..e11f7cb56b 100644 --- a/libs/dyn/location_test.go +++ b/libs/dyn/location_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestLocation(t *testing.T) { diff --git a/libs/dyn/mapping.go b/libs/dyn/mapping.go new file mode 100644 index 0000000000..668f57ecc4 --- /dev/null +++ b/libs/dyn/mapping.go @@ -0,0 +1,148 @@ +package dyn + +import ( + "fmt" + "maps" + "slices" +) + +// Pair represents a single key-value pair in a Mapping. +type Pair struct { + Key Value + Value Value +} + +// Mapping represents a key-value map of dynamic values. +// It exists because plain Go maps cannot use dynamic values for keys. +// We need to use dynamic values for keys because it lets us associate metadata +// with keys (i.e. their definition location). Keys must be strings. +type Mapping struct { + pairs []Pair + index map[string]int +} + +// NewMapping creates a new empty Mapping. +func NewMapping() Mapping { + return Mapping{ + pairs: make([]Pair, 0), + index: make(map[string]int), + } +} + +// newMappingWithSize creates a new Mapping preallocated to the specified size. +func newMappingWithSize(size int) Mapping { + return Mapping{ + pairs: make([]Pair, 0, size), + index: make(map[string]int, size), + } +} + +// newMappingFromGoMap creates a new Mapping from a Go map of string keys and dynamic values. +func newMappingFromGoMap(vin map[string]Value) Mapping { + m := newMappingWithSize(len(vin)) + for k, v := range vin { + m.Set(V(k), v) + } + return m +} + +// Pairs returns all the key-value pairs in the Mapping. +func (m Mapping) Pairs() []Pair { + return m.pairs +} + +// Len returns the number of key-value pairs in the Mapping. +func (m Mapping) Len() int { + return len(m.pairs) +} + +// GetPair returns the key-value pair with the specified key. +// It also returns a boolean indicating whether the pair was found. +func (m Mapping) GetPair(key Value) (Pair, bool) { + skey, ok := key.AsString() + if !ok { + return Pair{}, false + } + return m.GetPairByString(skey) +} + +// GetPairByString returns the key-value pair with the specified string key. +// It also returns a boolean indicating whether the pair was found. +func (m Mapping) GetPairByString(skey string) (Pair, bool) { + if i, ok := m.index[skey]; ok { + return m.pairs[i], true + } + return Pair{}, false +} + +// Get returns the value associated with the specified key. +// It also returns a boolean indicating whether the value was found. +func (m Mapping) Get(key Value) (Value, bool) { + p, ok := m.GetPair(key) + return p.Value, ok +} + +// GetByString returns the value associated with the specified string key. +// It also returns a boolean indicating whether the value was found. +func (m *Mapping) GetByString(skey string) (Value, bool) { + p, ok := m.GetPairByString(skey) + return p.Value, ok +} + +// Set sets the value for the given key in the mapping. +// If the key already exists, the value is updated. +// If the key does not exist, a new key-value pair is added. +// The key must be a string, otherwise an error is returned. +func (m *Mapping) Set(key Value, value Value) error { + skey, ok := key.AsString() + if !ok { + return fmt.Errorf("key must be a string, got %s", key.Kind()) + } + + // If the key already exists, update the value. + if i, ok := m.index[skey]; ok { + m.pairs[i].Value = value + return nil + } + + // Otherwise, add a new pair. + m.pairs = append(m.pairs, Pair{key, value}) + if m.index == nil { + m.index = make(map[string]int) + } + m.index[skey] = len(m.pairs) - 1 + return nil +} + +// Keys returns all the keys in the Mapping. +func (m Mapping) Keys() []Value { + keys := make([]Value, 0, len(m.pairs)) + for _, p := range m.pairs { + keys = append(keys, p.Key) + } + return keys +} + +// Values returns all the values in the Mapping. +func (m Mapping) Values() []Value { + values := make([]Value, 0, len(m.pairs)) + for _, p := range m.pairs { + values = append(values, p.Value) + } + return values +} + +// Clone creates a shallow copy of the Mapping. +func (m Mapping) Clone() Mapping { + return Mapping{ + pairs: slices.Clone(m.pairs), + index: maps.Clone(m.index), + } +} + +// Merge merges the key-value pairs from another Mapping into the current Mapping. +func (m *Mapping) Merge(n Mapping) { + for _, p := range n.pairs { + m.Set(p.Key, p.Value) + } +} diff --git a/libs/dyn/mapping_test.go b/libs/dyn/mapping_test.go new file mode 100644 index 0000000000..43b24b0c5a --- /dev/null +++ b/libs/dyn/mapping_test.go @@ -0,0 +1,204 @@ +package dyn_test + +import ( + "fmt" + "testing" + + "github.com/databricks/cli/libs/dyn" + assert "github.com/databricks/cli/libs/dyn/dynassert" + "github.com/stretchr/testify/require" +) + +func TestNewMapping(t *testing.T) { + m := dyn.NewMapping() + assert.Equal(t, 0, m.Len()) +} + +func TestMappingZeroValue(t *testing.T) { + var m dyn.Mapping + assert.Equal(t, 0, m.Len()) + + value, ok := m.Get(dyn.V("key")) + assert.Equal(t, dyn.InvalidValue, value) + assert.False(t, ok) + assert.Len(t, m.Keys(), 0) + assert.Len(t, m.Values(), 0) +} + +func TestMappingGet(t *testing.T) { + var m dyn.Mapping + err := m.Set(dyn.V("key"), dyn.V("value")) + assert.NoError(t, err) + assert.Equal(t, 1, m.Len()) + + // Call GetPair + p, ok := m.GetPair(dyn.V("key")) + assert.True(t, ok) + assert.Equal(t, dyn.V("key"), p.Key) + assert.Equal(t, dyn.V("value"), p.Value) + + // Modify the value to make sure we're not getting a reference + p.Value = dyn.V("newvalue") + + // Call GetPair with invalid key + p, ok = m.GetPair(dyn.V(1234)) + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, p.Key) + assert.Equal(t, dyn.InvalidValue, p.Value) + + // Call GetPair with non-existent key + p, ok = m.GetPair(dyn.V("enoexist")) + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, p.Key) + assert.Equal(t, dyn.InvalidValue, p.Value) + + // Call GetPairByString + p, ok = m.GetPairByString("key") + assert.True(t, ok) + assert.Equal(t, dyn.V("key"), p.Key) + assert.Equal(t, dyn.V("value"), p.Value) + + // Modify the value to make sure we're not getting a reference + p.Value = dyn.V("newvalue") + + // Call GetPairByString with with non-existent key + p, ok = m.GetPairByString("enoexist") + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, p.Key) + assert.Equal(t, dyn.InvalidValue, p.Value) + + // Call Get + value, ok := m.Get(dyn.V("key")) + assert.True(t, ok) + assert.Equal(t, dyn.V("value"), value) + + // Call Get with invalid key + value, ok = m.Get(dyn.V(1234)) + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, value) + + // Call Get with non-existent key + value, ok = m.Get(dyn.V("enoexist")) + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, value) + + // Call GetByString + value, ok = m.GetByString("key") + assert.True(t, ok) + assert.Equal(t, dyn.V("value"), value) + + // Call GetByString with non-existent key + value, ok = m.GetByString("enoexist") + assert.False(t, ok) + assert.Equal(t, dyn.InvalidValue, value) +} + +func TestMappingSet(t *testing.T) { + var err error + var m dyn.Mapping + + // Set a value + err = m.Set(dyn.V("key1"), dyn.V("foo")) + assert.NoError(t, err) + assert.Equal(t, 1, m.Len()) + + // Confirm the value + value, ok := m.GetByString("key1") + assert.True(t, ok) + assert.Equal(t, dyn.V("foo"), value) + + // Set another value + err = m.Set(dyn.V("key2"), dyn.V("bar")) + assert.NoError(t, err) + assert.Equal(t, 2, m.Len()) + + // Confirm the value + value, ok = m.Get(dyn.V("key2")) + assert.True(t, ok) + assert.Equal(t, dyn.V("bar"), value) + + // Overwrite first value + err = m.Set(dyn.V("key1"), dyn.V("qux")) + assert.NoError(t, err) + assert.Equal(t, 2, m.Len()) + + // Confirm the value + value, ok = m.Get(dyn.V("key1")) + assert.True(t, ok) + assert.Equal(t, dyn.V("qux"), value) + + // Try to set non-string key + err = m.Set(dyn.V(1), dyn.V("qux")) + assert.Error(t, err) + assert.Equal(t, 2, m.Len()) +} + +func TestMappingKeysValues(t *testing.T) { + var err error + + // Configure mapping + var m dyn.Mapping + err = m.Set(dyn.V("key1"), dyn.V("foo")) + assert.NoError(t, err) + err = m.Set(dyn.V("key2"), dyn.V("bar")) + assert.NoError(t, err) + + // Confirm keys + keys := m.Keys() + assert.Len(t, keys, 2) + assert.Contains(t, keys, dyn.V("key1")) + assert.Contains(t, keys, dyn.V("key2")) + + // Confirm values + values := m.Values() + assert.Len(t, values, 2) + assert.Contains(t, values, dyn.V("foo")) + assert.Contains(t, values, dyn.V("bar")) +} + +func TestMappingClone(t *testing.T) { + var err error + + // Configure mapping + var m1 dyn.Mapping + err = m1.Set(dyn.V("key1"), dyn.V("foo")) + assert.NoError(t, err) + err = m1.Set(dyn.V("key2"), dyn.V("bar")) + assert.NoError(t, err) + + // Clone mapping + m2 := m1.Clone() + assert.Equal(t, m1.Len(), m2.Len()) + + // Modify original mapping + err = m1.Set(dyn.V("key1"), dyn.V("qux")) + assert.NoError(t, err) + + // Confirm values + value, ok := m1.Get(dyn.V("key1")) + assert.True(t, ok) + assert.Equal(t, dyn.V("qux"), value) + value, ok = m2.Get(dyn.V("key1")) + assert.True(t, ok) + assert.Equal(t, dyn.V("foo"), value) +} + +func TestMappingMerge(t *testing.T) { + var m1 dyn.Mapping + for i := 0; i < 10; i++ { + err := m1.Set(dyn.V(fmt.Sprintf("%d", i)), dyn.V(i)) + require.NoError(t, err) + } + + var m2 dyn.Mapping + for i := 5; i < 15; i++ { + err := m2.Set(dyn.V(fmt.Sprintf("%d", i)), dyn.V(i)) + require.NoError(t, err) + } + + var out dyn.Mapping + out.Merge(m1) + assert.Equal(t, 10, out.Len()) + out.Merge(m2) + assert.Equal(t, 15, out.Len()) +} diff --git a/libs/dyn/merge/elements_by_key_test.go b/libs/dyn/merge/elements_by_key_test.go index c61f834e5f..ef316cc666 100644 --- a/libs/dyn/merge/elements_by_key_test.go +++ b/libs/dyn/merge/elements_by_key_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/merge/merge.go b/libs/dyn/merge/merge.go index 1cadbea608..69ccf516ae 100644 --- a/libs/dyn/merge/merge.go +++ b/libs/dyn/merge/merge.go @@ -51,27 +51,27 @@ func merge(a, b dyn.Value) (dyn.Value, error) { } func mergeMap(a, b dyn.Value) (dyn.Value, error) { - out := make(map[string]dyn.Value) + out := dyn.NewMapping() am := a.MustMap() bm := b.MustMap() // Add the values from a into the output map. - for k, v := range am { - out[k] = v - } + out.Merge(am) // Merge the values from b into the output map. - for k, v := range bm { - if _, ok := out[k]; ok { + for _, pair := range bm.Pairs() { + pk := pair.Key + pv := pair.Value + if ov, ok := out.Get(pk); ok { // If the key already exists, merge the values. - merged, err := merge(out[k], v) + merged, err := merge(ov, pv) if err != nil { return dyn.NilValue, err } - out[k] = merged + out.Set(pk, merged) } else { // Otherwise, just set the value. - out[k] = v + out.Set(pk, pv) } } diff --git a/libs/dyn/merge/merge_test.go b/libs/dyn/merge/merge_test.go index c4928e3536..eaaaab16f4 100644 --- a/libs/dyn/merge/merge_test.go +++ b/libs/dyn/merge/merge_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestMergeMaps(t *testing.T) { diff --git a/libs/dyn/path_string_test.go b/libs/dyn/path_string_test.go index 9af394c6f1..0d64bf1107 100644 --- a/libs/dyn/path_string_test.go +++ b/libs/dyn/path_string_test.go @@ -5,7 +5,7 @@ import ( "testing" . "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestNewPathFromString(t *testing.T) { diff --git a/libs/dyn/path_test.go b/libs/dyn/path_test.go index 1152a060ad..44df2050b0 100644 --- a/libs/dyn/path_test.go +++ b/libs/dyn/path_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestPathAppend(t *testing.T) { diff --git a/libs/dyn/pattern.go b/libs/dyn/pattern.go index 960a50d5b8..a265dad081 100644 --- a/libs/dyn/pattern.go +++ b/libs/dyn/pattern.go @@ -2,7 +2,6 @@ package dyn import ( "fmt" - "maps" "slices" ) @@ -55,10 +54,13 @@ func (c anyKeyComponent) visit(v Value, prefix Path, suffix Pattern, opts visitO return InvalidValue, fmt.Errorf("expected a map at %q, found %s", prefix, v.Kind()) } - m = maps.Clone(m) - for key, value := range m { + m = m.Clone() + for _, pair := range m.Pairs() { + pk := pair.Key + pv := pair.Value + var err error - nv, err := visit(value, append(prefix, Key(key)), suffix, opts) + nv, err := visit(pv, append(prefix, Key(pk.MustString())), suffix, opts) if err != nil { // Leave the value intact if the suffix pattern didn't match any value. if IsNoSuchKeyError(err) || IsIndexOutOfBoundsError(err) { @@ -66,7 +68,8 @@ func (c anyKeyComponent) visit(v Value, prefix Path, suffix Pattern, opts visitO } return InvalidValue, err } - m[key] = nv + + m.Set(pk, nv) } return NewValue(m, v.Location()), nil diff --git a/libs/dyn/pattern_test.go b/libs/dyn/pattern_test.go index 372fe74678..1b54953efe 100644 --- a/libs/dyn/pattern_test.go +++ b/libs/dyn/pattern_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestNewPattern(t *testing.T) { diff --git a/libs/dyn/value.go b/libs/dyn/value.go index ecf21abbe8..2e8f1b9aff 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -27,14 +27,16 @@ var NilValue = Value{ // V constructs a new Value with the given value. func V(v any) Value { - return Value{ - v: v, - k: kindOf(v), - } + return NewValue(v, Location{}) } // NewValue constructs a new Value with the given value and location. func NewValue(v any, loc Location) Value { + switch vin := v.(type) { + case map[string]Value: + v = newMappingFromGoMap(vin) + } + return Value{ v: v, k: kindOf(v), @@ -72,12 +74,14 @@ func (v Value) AsAny() any { case KindInvalid: panic("invoked AsAny on invalid value") case KindMap: - vv := v.v.(map[string]Value) - m := make(map[string]any, len(vv)) - for k, v := range vv { - m[k] = v.AsAny() + m := v.v.(Mapping) + out := make(map[string]any, m.Len()) + for _, pair := range m.pairs { + pk := pair.Key + pv := pair.Value + out[pk.MustString()] = pv.AsAny() } - return m + return out case KindSequence: vv := v.v.([]Value) a := make([]any, len(vv)) @@ -109,7 +113,7 @@ func (v Value) Get(key string) Value { return NilValue } - vv, ok := m[key] + vv, ok := m.GetByString(key) if !ok { return NilValue } diff --git a/libs/dyn/value_test.go b/libs/dyn/value_test.go index 7c9a9d990e..bbdc2c96ba 100644 --- a/libs/dyn/value_test.go +++ b/libs/dyn/value_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestInvalidValue(t *testing.T) { @@ -22,14 +22,12 @@ func TestValueIsAnchor(t *testing.T) { func TestValueAsMap(t *testing.T) { var zeroValue dyn.Value - m, ok := zeroValue.AsMap() + _, ok := zeroValue.AsMap() assert.False(t, ok) - assert.Nil(t, m) var intValue = dyn.NewValue(1, dyn.Location{}) - m, ok = intValue.AsMap() + _, ok = intValue.AsMap() assert.False(t, ok) - assert.Nil(t, m) var mapValue = dyn.NewValue( map[string]dyn.Value{ @@ -37,9 +35,9 @@ func TestValueAsMap(t *testing.T) { }, dyn.Location{File: "file", Line: 1, Column: 2}, ) - m, ok = mapValue.AsMap() + m, ok := mapValue.AsMap() assert.True(t, ok) - assert.Len(t, m, 1) + assert.Equal(t, 1, m.Len()) } func TestValueIsValid(t *testing.T) { diff --git a/libs/dyn/value_underlying.go b/libs/dyn/value_underlying.go index c8c5037900..2f0f26a1f6 100644 --- a/libs/dyn/value_underlying.go +++ b/libs/dyn/value_underlying.go @@ -5,16 +5,16 @@ import ( "time" ) -// AsMap returns the underlying map if this value is a map, +// AsMap returns the underlying mapping if this value is a map, // the zero value and false otherwise. -func (v Value) AsMap() (map[string]Value, bool) { - vv, ok := v.v.(map[string]Value) +func (v Value) AsMap() (Mapping, bool) { + vv, ok := v.v.(Mapping) return vv, ok } -// MustMap returns the underlying map if this value is a map, +// MustMap returns the underlying mapping if this value is a map, // panics otherwise. -func (v Value) MustMap() map[string]Value { +func (v Value) MustMap() Mapping { vv, ok := v.AsMap() if !ok || v.k != KindMap { panic(fmt.Sprintf("expected kind %s, got %s", KindMap, v.k)) diff --git a/libs/dyn/value_underlying_test.go b/libs/dyn/value_underlying_test.go index 17cb959418..9878cfaf9d 100644 --- a/libs/dyn/value_underlying_test.go +++ b/libs/dyn/value_underlying_test.go @@ -5,7 +5,7 @@ import ( "time" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestValueUnderlyingMap(t *testing.T) { diff --git a/libs/dyn/visit.go b/libs/dyn/visit.go index 376dcc22db..3fe3561943 100644 --- a/libs/dyn/visit.go +++ b/libs/dyn/visit.go @@ -3,7 +3,6 @@ package dyn import ( "errors" "fmt" - "maps" "slices" ) @@ -77,7 +76,7 @@ func (component pathComponent) visit(v Value, prefix Path, suffix Pattern, opts } // Lookup current value in the map. - ev, ok := m[component.key] + ev, ok := m.GetByString(component.key) if !ok { return InvalidValue, noSuchKeyError{path} } @@ -94,8 +93,8 @@ func (component pathComponent) visit(v Value, prefix Path, suffix Pattern, opts } // Return an updated map value. - m = maps.Clone(m) - m[component.key] = nv + m = m.Clone() + m.Set(V(component.key), nv) return Value{ v: m, k: KindMap, diff --git a/libs/dyn/visit_get_test.go b/libs/dyn/visit_get_test.go index 22dce0858b..adc307794c 100644 --- a/libs/dyn/visit_get_test.go +++ b/libs/dyn/visit_get_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestGetWithEmptyPath(t *testing.T) { diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go index 18fc668ede..f5cfea3114 100644 --- a/libs/dyn/visit_map.go +++ b/libs/dyn/visit_map.go @@ -2,7 +2,6 @@ package dyn import ( "fmt" - "maps" "slices" ) @@ -15,13 +14,15 @@ func Foreach(fn MapFunc) MapFunc { return func(p Path, v Value) (Value, error) { switch v.Kind() { case KindMap: - m := maps.Clone(v.MustMap()) - for key, value := range m { - var err error - m[key], err = fn(append(p, Key(key)), value) + m := v.MustMap().Clone() + for _, pair := range m.Pairs() { + pk := pair.Key + pv := pair.Value + nv, err := fn(append(p, Key(pk.MustString())), pv) if err != nil { return InvalidValue, err } + m.Set(pk, nv) } return NewValue(m, v.Location()), nil case KindSequence: diff --git a/libs/dyn/visit_map_test.go b/libs/dyn/visit_map_test.go index f87f0a40d4..df6bad4965 100644 --- a/libs/dyn/visit_map_test.go +++ b/libs/dyn/visit_map_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/visit_set.go b/libs/dyn/visit_set.go index edcd9bb735..b086fb8a91 100644 --- a/libs/dyn/visit_set.go +++ b/libs/dyn/visit_set.go @@ -2,7 +2,6 @@ package dyn import ( "fmt" - "maps" "slices" ) @@ -41,8 +40,8 @@ func SetByPath(v Value, p Path, nv Value) (Value, error) { } // Return an updated map value. - m = maps.Clone(m) - m[component.key] = nv + m = m.Clone() + m.Set(V(component.key), nv) return Value{ v: m, k: KindMap, diff --git a/libs/dyn/visit_set_test.go b/libs/dyn/visit_set_test.go index b384715875..df58941e17 100644 --- a/libs/dyn/visit_set_test.go +++ b/libs/dyn/visit_set_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestSetWithEmptyPath(t *testing.T) { diff --git a/libs/dyn/walk.go b/libs/dyn/walk.go index 26ddfc11d9..97b99b061e 100644 --- a/libs/dyn/walk.go +++ b/libs/dyn/walk.go @@ -34,16 +34,18 @@ func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, erro switch v.Kind() { case KindMap: m := v.MustMap() - out := make(map[string]Value, len(m)) - for k := range m { - nv, err := walk(m[k], append(p, Key(k)), fn) + out := newMappingWithSize(m.Len()) + for _, pair := range m.Pairs() { + pk := pair.Key + pv := pair.Value + nv, err := walk(pv, append(p, Key(pk.MustString())), fn) if err == ErrDrop { continue } if err != nil { return NilValue, err } - out[k] = nv + out.Set(pk, nv) } v.v = out case KindSequence: diff --git a/libs/dyn/walk_test.go b/libs/dyn/walk_test.go index 1b94ad9027..d62b9a4db8 100644 --- a/libs/dyn/walk_test.go +++ b/libs/dyn/walk_test.go @@ -5,7 +5,7 @@ import ( "testing" . "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/stretchr/testify/require" ) diff --git a/libs/dyn/yamlloader/loader.go b/libs/dyn/yamlloader/loader.go index 899e1d7b8a..908793d584 100644 --- a/libs/dyn/yamlloader/loader.go +++ b/libs/dyn/yamlloader/loader.go @@ -92,7 +92,7 @@ func (d *loader) loadSequence(node *yaml.Node, loc dyn.Location) (dyn.Value, err func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { var merge *yaml.Node - acc := make(map[string]dyn.Value) + acc := dyn.NewMapping() for i := 0; i < len(node.Content); i += 2 { key := node.Content[i] val := node.Content[i+1] @@ -116,12 +116,17 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro return dyn.NilValue, errorf(loc, "invalid key tag: %v", st) } + k, err := d.load(key) + if err != nil { + return dyn.NilValue, err + } + v, err := d.load(val) if err != nil { return dyn.NilValue, err } - acc[key.Value] = v + acc.Set(k, v) } if merge == nil { @@ -146,7 +151,7 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro // Build a sequence of values to merge. // The entries that we already accumulated have precedence. - var seq []map[string]dyn.Value + var seq []dyn.Mapping for _, n := range mnodes { v, err := d.load(n) if err != nil { @@ -161,11 +166,9 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro // Append the accumulated entries to the sequence. seq = append(seq, acc) - out := make(map[string]dyn.Value) + out := dyn.NewMapping() for _, m := range seq { - for k, v := range m { - out[k] = v - } + out.Merge(m) } return dyn.NewValue(out, loc), nil diff --git a/libs/dyn/yamlloader/yaml_anchor_test.go b/libs/dyn/yamlloader/yaml_anchor_test.go index 05beb5401d..29ce69f0ac 100644 --- a/libs/dyn/yamlloader/yaml_anchor_test.go +++ b/libs/dyn/yamlloader/yaml_anchor_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestYAMLAnchor01(t *testing.T) { diff --git a/libs/dyn/yamlloader/yaml_error_test.go b/libs/dyn/yamlloader/yaml_error_test.go index 11c444ad36..0ae424341e 100644 --- a/libs/dyn/yamlloader/yaml_error_test.go +++ b/libs/dyn/yamlloader/yaml_error_test.go @@ -5,8 +5,8 @@ import ( "os" "testing" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/cli/libs/dyn/yamlloader" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" ) diff --git a/libs/dyn/yamlloader/yaml_mix_test.go b/libs/dyn/yamlloader/yaml_mix_test.go index 307b93dbf3..55ded6bafc 100644 --- a/libs/dyn/yamlloader/yaml_mix_test.go +++ b/libs/dyn/yamlloader/yaml_mix_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestYAMLMix01(t *testing.T) { diff --git a/libs/dyn/yamlloader/yaml_test.go b/libs/dyn/yamlloader/yaml_test.go index 14269feeef..9bb0377dd7 100644 --- a/libs/dyn/yamlloader/yaml_test.go +++ b/libs/dyn/yamlloader/yaml_test.go @@ -6,8 +6,8 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" + assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/cli/libs/dyn/yamlloader" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" ) diff --git a/libs/dyn/yamlsaver/order_test.go b/libs/dyn/yamlsaver/order_test.go index ed2877f6c8..ee9dc4752f 100644 --- a/libs/dyn/yamlsaver/order_test.go +++ b/libs/dyn/yamlsaver/order_test.go @@ -3,7 +3,7 @@ package yamlsaver import ( "testing" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestOrderReturnsIncreasingIndex(t *testing.T) { diff --git a/libs/dyn/yamlsaver/saver.go b/libs/dyn/yamlsaver/saver.go index 84483a12f0..fe4cfb8549 100644 --- a/libs/dyn/yamlsaver/saver.go +++ b/libs/dyn/yamlsaver/saver.go @@ -9,7 +9,6 @@ import ( "strconv" "github.com/databricks/cli/libs/dyn" - "golang.org/x/exp/maps" "gopkg.in/yaml.v3" ) @@ -75,25 +74,27 @@ func (s *saver) toYamlNodeWithStyle(v dyn.Value, style yaml.Style) (*yaml.Node, switch v.Kind() { case dyn.KindMap: m, _ := v.AsMap() - keys := maps.Keys(m) + // We're using location lines to define the order of keys in YAML. // The location is set when we convert API response struct to config.Value representation // See convert.convertMap for details - sort.SliceStable(keys, func(i, j int) bool { - return m[keys[i]].Location().Line < m[keys[j]].Location().Line + pairs := m.Pairs() + sort.SliceStable(pairs, func(i, j int) bool { + return pairs[i].Value.Location().Line < pairs[j].Value.Location().Line }) content := make([]*yaml.Node, 0) - for _, k := range keys { - item := m[k] - node := yaml.Node{Kind: yaml.ScalarNode, Value: k, Style: style} + for _, pair := range pairs { + pk := pair.Key + pv := pair.Value + node := yaml.Node{Kind: yaml.ScalarNode, Value: pk.MustString(), Style: style} var nestedNodeStyle yaml.Style - if customStyle, ok := s.hasStyle(k); ok { + if customStyle, ok := s.hasStyle(pk.MustString()); ok { nestedNodeStyle = customStyle } else { nestedNodeStyle = style } - c, err := s.toYamlNodeWithStyle(item, nestedNodeStyle) + c, err := s.toYamlNodeWithStyle(pv, nestedNodeStyle) if err != nil { return nil, err } diff --git a/libs/dyn/yamlsaver/saver_test.go b/libs/dyn/yamlsaver/saver_test.go index ec44a42987..bdf1891cdd 100644 --- a/libs/dyn/yamlsaver/saver_test.go +++ b/libs/dyn/yamlsaver/saver_test.go @@ -5,7 +5,7 @@ import ( "time" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" "gopkg.in/yaml.v3" ) diff --git a/libs/dyn/yamlsaver/utils.go b/libs/dyn/yamlsaver/utils.go index 0fb4064b54..6149491d60 100644 --- a/libs/dyn/yamlsaver/utils.go +++ b/libs/dyn/yamlsaver/utils.go @@ -26,7 +26,9 @@ func ConvertToMapValue(strct any, order *Order, skipFields []string, dst map[str } func skipAndOrder(mv dyn.Value, order *Order, skipFields []string, dst map[string]dyn.Value) (dyn.Value, error) { - for k, v := range mv.MustMap() { + for _, pair := range mv.MustMap().Pairs() { + k := pair.Key.MustString() + v := pair.Value if v.Kind() == dyn.KindNil { continue } diff --git a/libs/dyn/yamlsaver/utils_test.go b/libs/dyn/yamlsaver/utils_test.go index 32c9143bea..04b4c404fb 100644 --- a/libs/dyn/yamlsaver/utils_test.go +++ b/libs/dyn/yamlsaver/utils_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/databricks/cli/libs/dyn" - "github.com/stretchr/testify/assert" + assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestConvertToMapValueWithOrder(t *testing.T) { @@ -32,7 +32,7 @@ func TestConvertToMapValueWithOrder(t *testing.T) { result, err := ConvertToMapValue(v, NewOrder([]string{"list", "name", "map"}), []string{"format"}, map[string]dyn.Value{}) assert.NoError(t, err) - assert.Equal(t, map[string]dyn.Value{ + assert.Equal(t, dyn.V(map[string]dyn.Value{ "list": dyn.NewValue([]dyn.Value{ dyn.V("a"), dyn.V("b"), @@ -44,5 +44,5 @@ func TestConvertToMapValueWithOrder(t *testing.T) { "key2": dyn.V("value2"), }, dyn.Location{Line: -1}), "long_name_field": dyn.NewValue("long name goes here", dyn.Location{Line: 1}), - }, result.MustMap()) + }), result) } From 9cf3dbe686302708f8d1afa4fc1f3cd89e2c49e3 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 25 Mar 2024 12:32:45 +0100 Subject: [PATCH 06/17] Use UserName field to identify if service principal is used (#1310) ## Changes Use UserName field to identify if service principal is used ## Tests Integration test passed --- internal/init_test.go | 2 +- libs/auth/service_principal.go | 8 ++++---- libs/template/helpers.go | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/internal/init_test.go b/internal/init_test.go index bed1119f8d..c3cb0127e3 100644 --- a/internal/init_test.go +++ b/internal/init_test.go @@ -138,7 +138,7 @@ func TestAccBundleInitHelpers(t *testing.T) { }, { funcName: "{{is_service_principal}}", - expected: strconv.FormatBool(auth.IsServicePrincipal(me.Id)), + expected: strconv.FormatBool(auth.IsServicePrincipal(me.UserName)), }, { funcName: "{{smallest_node_type}}", diff --git a/libs/auth/service_principal.go b/libs/auth/service_principal.go index cb488d16e5..5f1854e3aa 100644 --- a/libs/auth/service_principal.go +++ b/libs/auth/service_principal.go @@ -4,12 +4,12 @@ import ( "github.com/google/uuid" ) -// Determines whether a given user id is a service principal. -// This function uses a heuristic: if the user id is a UUID, then we assume +// Determines whether a given user name is a service principal. +// This function uses a heuristic: if the user name is a UUID, then we assume // it's a service principal. Unfortunately, the service principal listing API is too // slow for our purposes. And the "users" and "service principals get" APIs // only allow access by workspace admins. -func IsServicePrincipal(userId string) bool { - _, err := uuid.Parse(userId) +func IsServicePrincipal(userName string) bool { + _, err := uuid.Parse(userName) return err == nil } diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 56710dfbd6..d15a801d69 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -140,7 +140,7 @@ func loadHelpers(ctx context.Context) template.FuncMap { return false, err } } - result := auth.IsServicePrincipal(cachedUser.Id) + result := auth.IsServicePrincipal(cachedUser.UserName) cachedIsServicePrincipal = &result return result, nil }, From ed194668dbc58a0b1fa64a3609c3dbdf498b0c06 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 25 Mar 2024 15:18:47 +0100 Subject: [PATCH 07/17] Return `diag.Diagnostics` from mutators (#1305) ## Changes This diagnostics type allows us to capture multiple warnings as well as errors in the return value. This is a preparation for returning additional warnings from mutators in case we detect non-fatal problems. * All return statements that previously returned an error now return `diag.FromErr` * All return statements that previously returned `fmt.Errorf` now return `diag.Errorf` * All `err != nil` checks now use `diags.HasError()` or `diags.Error()` ## Tests * Existing tests pass. * I confirmed no call site under `./bundle` or `./cmd/bundle` uses `errors.Is` on the return value from mutators. This is relevant because we cannot wrap errors with `%w` when calling `diag.Errorf` (like `fmt.Errorf`; context in https://github.com/golang/go/issues/47641). --- bundle/artifacts/all.go | 5 +- bundle/artifacts/artifacts.go | 19 ++++---- bundle/artifacts/autodetect.go | 3 +- bundle/artifacts/build.go | 7 +-- bundle/artifacts/infer.go | 5 +- bundle/artifacts/upload.go | 17 +++---- bundle/artifacts/upload_test.go | 11 +++-- bundle/artifacts/whl/autodetect.go | 5 +- bundle/artifacts/whl/build.go | 9 ++-- bundle/artifacts/whl/from_libraries.go | 3 +- bundle/artifacts/whl/infer.go | 5 +- bundle/config/mutator/default_target.go | 3 +- bundle/config/mutator/default_target_test.go | 10 ++-- .../config/mutator/default_workspace_paths.go | 6 +-- .../mutator/default_workspace_paths_test.go | 8 ++-- .../config/mutator/default_workspace_root.go | 7 +-- .../mutator/default_workspace_root_test.go | 5 +- bundle/config/mutator/environments_compat.go | 7 ++- .../mutator/environments_compat_test.go | 13 ++--- .../mutator/expand_pipeline_glob_paths.go | 7 ++- .../expand_pipeline_glob_paths_test.go | 4 +- .../config/mutator/expand_workspace_root.go | 7 +-- .../mutator/expand_workspace_root_test.go | 16 +++---- bundle/config/mutator/if.go | 3 +- bundle/config/mutator/initialize_variables.go | 3 +- .../mutator/initialize_variables_test.go | 8 ++-- .../mutator/initialize_workspace_client.go | 5 +- bundle/config/mutator/load_git_details.go | 9 ++-- bundle/config/mutator/merge_job_clusters.go | 7 ++- .../config/mutator/merge_job_clusters_test.go | 8 ++-- bundle/config/mutator/merge_job_tasks.go | 7 ++- bundle/config/mutator/merge_job_tasks_test.go | 8 ++-- .../config/mutator/merge_pipeline_clusters.go | 7 ++- .../mutator/merge_pipeline_clusters_test.go | 16 +++---- bundle/config/mutator/noop.go | 3 +- bundle/config/mutator/override_compute.go | 6 +-- .../config/mutator/override_compute_test.go | 20 ++++---- .../config/mutator/populate_current_user.go | 5 +- bundle/config/mutator/process_include.go | 9 ++-- bundle/config/mutator/process_include_test.go | 4 +- .../config/mutator/process_root_includes.go | 14 +++--- .../mutator/process_root_includes_test.go | 38 +++++++-------- bundle/config/mutator/process_target_mode.go | 24 +++++----- .../mutator/process_target_mode_test.go | 44 ++++++++--------- .../mutator/resolve_resource_references.go | 5 +- .../resolve_resource_references_test.go | 16 +++---- .../mutator/resolve_variable_references.go | 7 ++- .../resolve_variable_references_test.go | 37 +++++++------- bundle/config/mutator/rewrite_sync_paths.go | 7 ++- .../config/mutator/rewrite_sync_paths_test.go | 16 +++---- bundle/config/mutator/run_as.go | 3 +- .../config/mutator/select_default_target.go | 10 ++-- .../mutator/select_default_target_test.go | 24 +++++----- bundle/config/mutator/select_target.go | 9 ++-- bundle/config/mutator/select_target_test.go | 8 ++-- bundle/config/mutator/set_variables.go | 21 ++++---- bundle/config/mutator/set_variables_test.go | 24 +++++----- bundle/config/mutator/trampoline.go | 5 +- bundle/config/mutator/trampoline_test.go | 4 +- bundle/config/mutator/translate_paths.go | 7 ++- bundle/config/mutator/translate_paths_test.go | 48 +++++++++---------- bundle/config/mutator/validate_git_details.go | 6 +-- .../mutator/validate_git_details_test.go | 14 +++--- bundle/deferred.go | 15 +++--- bundle/deferred_test.go | 32 ++++++++----- bundle/deploy/check_running_resources.go | 11 +++-- bundle/deploy/files/delete.go | 11 +++-- bundle/deploy/files/upload.go | 7 +-- bundle/deploy/lock/acquire.go | 10 ++-- bundle/deploy/lock/release.go | 12 ++--- bundle/deploy/metadata/annotate_jobs.go | 3 +- bundle/deploy/metadata/annotate_jobs_test.go | 9 ++-- bundle/deploy/metadata/compute.go | 6 +-- bundle/deploy/metadata/compute_test.go | 4 +- bundle/deploy/metadata/upload.go | 9 ++-- bundle/deploy/state_pull.go | 23 ++++----- bundle/deploy/state_pull_test.go | 14 +++--- bundle/deploy/state_push.go | 11 +++-- bundle/deploy/state_push_test.go | 4 +- bundle/deploy/state_update.go | 19 ++++---- bundle/deploy/state_update_test.go | 12 ++--- bundle/deploy/terraform/apply.go | 10 ++-- bundle/deploy/terraform/destroy.go | 15 +++--- bundle/deploy/terraform/import.go | 25 +++++----- bundle/deploy/terraform/init.go | 19 ++++---- bundle/deploy/terraform/init_test.go | 4 +- bundle/deploy/terraform/interpolate.go | 7 ++- bundle/deploy/terraform/interpolate_test.go | 8 ++-- bundle/deploy/terraform/load.go | 13 ++--- bundle/deploy/terraform/load_test.go | 4 +- bundle/deploy/terraform/plan.go | 11 +++-- bundle/deploy/terraform/state_pull.go | 13 ++--- bundle/deploy/terraform/state_pull_test.go | 25 +++++----- bundle/deploy/terraform/state_push.go | 11 +++-- bundle/deploy/terraform/state_push_test.go | 4 +- bundle/deploy/terraform/unbind.go | 9 ++-- bundle/deploy/terraform/write.go | 11 +++-- bundle/libraries/match.go | 8 ++-- bundle/log_string.go | 3 +- bundle/mutator.go | 25 ++++++---- bundle/mutator_test.go | 7 +-- bundle/permissions/filter.go | 7 ++- bundle/permissions/filter_test.go | 12 ++--- bundle/permissions/mutator.go | 5 +- bundle/permissions/mutator_test.go | 8 ++-- bundle/permissions/workspace_root.go | 5 +- bundle/permissions/workspace_root_test.go | 4 +- bundle/phases/phase.go | 3 +- bundle/python/conditional_transform_test.go | 8 ++-- bundle/python/transform_test.go | 4 +- bundle/python/warning.go | 6 +-- bundle/python/warning_test.go | 8 ++-- bundle/scripts/scripts.go | 9 ++-- bundle/scripts/scripts_test.go | 4 +- bundle/seq.go | 17 ++++--- bundle/seq_test.go | 20 ++++---- bundle/tests/bundle_permissions_test.go | 10 ++-- bundle/tests/conflicting_resource_ids_test.go | 8 ++-- bundle/tests/git_test.go | 4 +- bundle/tests/include_test.go | 6 +-- bundle/tests/interpolation_test.go | 9 ++-- bundle/tests/loader.go | 12 ++--- bundle/tests/path_translation_test.go | 16 +++---- bundle/tests/pipeline_glob_paths_test.go | 8 ++-- bundle/tests/python_wheel_test.go | 32 ++++++------- .../tests/relative_path_with_includes_test.go | 4 +- bundle/tests/run_as_test.go | 13 ++--- bundle/tests/variables_test.go | 36 +++++++------- cmd/bundle/deploy.go | 9 +++- cmd/bundle/deployment/bind.go | 7 +-- cmd/bundle/deployment/unbind.go | 9 +++- cmd/bundle/destroy.go | 9 +++- cmd/bundle/run.go | 4 +- cmd/bundle/summary.go | 12 ++--- cmd/bundle/sync.go | 4 +- cmd/bundle/utils/utils.go | 7 ++- cmd/bundle/validate.go | 4 +- cmd/root/bundle.go | 13 ++--- internal/bundle/artifacts_test.go | 4 +- libs/diag/diagnostic.go | 23 +++++++++ libs/template/renderer_test.go | 11 +++-- 141 files changed, 841 insertions(+), 698 deletions(-) diff --git a/bundle/artifacts/all.go b/bundle/artifacts/all.go index 1a1661e5fa..305193e2eb 100644 --- a/bundle/artifacts/all.go +++ b/bundle/artifacts/all.go @@ -7,6 +7,7 @@ import ( "slices" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "golang.org/x/exp/maps" ) @@ -21,7 +22,7 @@ func (m *all) Name() string { return fmt.Sprintf("artifacts.%sAll", m.name) } -func (m *all) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *all) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { var out []bundle.Mutator // Iterate with stable ordering. @@ -31,7 +32,7 @@ func (m *all) Apply(ctx context.Context, b *bundle.Bundle) error { for _, name := range keys { m, err := m.fn(name) if err != nil { - return err + return diag.FromErr(err) } if m != nil { out = append(out, m) diff --git a/bundle/artifacts/artifacts.go b/bundle/artifacts/artifacts.go index ce2e165b7f..b7a22d09dc 100644 --- a/bundle/artifacts/artifacts.go +++ b/bundle/artifacts/artifacts.go @@ -14,6 +14,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" ) @@ -57,17 +58,17 @@ func (m *basicBuild) Name() string { return fmt.Sprintf("artifacts.Build(%s)", m.name) } -func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name)) out, err := artifact.Build(ctx) if err != nil { - return fmt.Errorf("build for %s failed, error: %w, output: %s", m.name, err, out) + return diag.Errorf("build for %s failed, error: %v, output: %s", m.name, err, out) } log.Infof(ctx, "Build succeeded") @@ -87,29 +88,29 @@ func (m *basicUpload) Name() string { return fmt.Sprintf("artifacts.Upload(%s)", m.name) } -func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } if len(artifact.Files) == 0 { - return fmt.Errorf("artifact source is not configured: %s", m.name) + return diag.Errorf("artifact source is not configured: %s", m.name) } uploadPath, err := getUploadBasePath(b) if err != nil { - return err + return diag.FromErr(err) } client, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath) if err != nil { - return err + return diag.FromErr(err) } err = uploadArtifact(ctx, b, artifact, uploadPath, client) if err != nil { - return fmt.Errorf("upload for %s failed, error: %w", m.name, err) + return diag.Errorf("upload for %s failed, error: %v", m.name, err) } return nil diff --git a/bundle/artifacts/autodetect.go b/bundle/artifacts/autodetect.go index 6e80ef0b63..0e94edd820 100644 --- a/bundle/artifacts/autodetect.go +++ b/bundle/artifacts/autodetect.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -19,7 +20,7 @@ func (m *autodetect) Name() string { return "artifacts.DetectPackages" } -func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // If artifacts section explicitly defined, do not try to auto detect packages if b.Config.Artifacts != nil { log.Debugf(ctx, "artifacts block is defined, skipping auto-detecting") diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index a78958e60b..f3ee097c28 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -6,6 +6,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) func BuildAll() bundle.Mutator { @@ -27,10 +28,10 @@ func (m *build) Name() string { return fmt.Sprintf("artifacts.Build(%s)", m.name) } -func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } // Skip building if build command is not specified or infered @@ -38,7 +39,7 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { // If no build command was specified or infered and there is no // artifact output files specified, artifact is misconfigured if len(artifact.Files) == 0 { - return fmt.Errorf("misconfigured artifact: please specify 'build' or 'files' property") + return diag.Errorf("misconfigured artifact: please specify 'build' or 'files' property") } return nil } diff --git a/bundle/artifacts/infer.go b/bundle/artifacts/infer.go index ade5def516..abc5091070 100644 --- a/bundle/artifacts/infer.go +++ b/bundle/artifacts/infer.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" ) var inferMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ @@ -41,10 +42,10 @@ func (m *infer) Name() string { return fmt.Sprintf("artifacts.Infer(%s)", m.name) } -func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } // only try to infer command if it's not already defined diff --git a/bundle/artifacts/upload.go b/bundle/artifacts/upload.go index 61e6520866..e2c2fc1c94 100644 --- a/bundle/artifacts/upload.go +++ b/bundle/artifacts/upload.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/workspace" ) @@ -33,14 +34,14 @@ func (m *upload) Name() string { return fmt.Sprintf("artifacts.Upload(%s)", m.name) } -func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } if len(artifact.Files) == 0 { - return fmt.Errorf("artifact source is not configured: %s", m.name) + return diag.Errorf("artifact source is not configured: %s", m.name) } // Check if source paths are absolute, if not, make them absolute @@ -57,11 +58,11 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { for _, f := range artifact.Files { matches, err := filepath.Glob(f.Source) if err != nil { - return fmt.Errorf("unable to find files for %s: %w", f.Source, err) + return diag.Errorf("unable to find files for %s: %v", f.Source, err) } if len(matches) == 0 { - return fmt.Errorf("no files found for %s", f.Source) + return diag.Errorf("no files found for %s", f.Source) } for _, match := range matches { @@ -81,10 +82,10 @@ func (m *cleanUp) Name() string { return "artifacts.CleanUp" } -func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { uploadPath, err := getUploadBasePath(b) if err != nil { - return err + return diag.FromErr(err) } b.WorkspaceClient().Workspace.Delete(ctx, workspace.Delete{ @@ -94,7 +95,7 @@ func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error { err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, uploadPath) if err != nil { - return fmt.Errorf("unable to create directory for %s: %w", uploadPath, err) + return diag.Errorf("unable to create directory for %s: %v", uploadPath, err) } return nil diff --git a/bundle/artifacts/upload_test.go b/bundle/artifacts/upload_test.go index 6dea1c1457..ec71100958 100644 --- a/bundle/artifacts/upload_test.go +++ b/bundle/artifacts/upload_test.go @@ -9,13 +9,14 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/internal/bundletest" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/testfile" "github.com/stretchr/testify/require" ) type noop struct{} -func (n *noop) Apply(context.Context, *bundle.Bundle) error { +func (n *noop) Apply(context.Context, *bundle.Bundle) diag.Diagnostics { return nil } @@ -57,8 +58,8 @@ func TestExpandGlobFilesSource(t *testing.T) { return &noop{} } - err = bundle.Apply(context.Background(), b, u) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, u) + require.NoError(t, diags.Error()) require.Equal(t, 2, len(b.Config.Artifacts["test"].Files)) require.Equal(t, filepath.Join(rootPath, "test", "myjar1.jar"), b.Config.Artifacts["test"].Files[0].Source) @@ -93,6 +94,6 @@ func TestExpandGlobFilesSourceWithNoMatches(t *testing.T) { return &noop{} } - err = bundle.Apply(context.Background(), b, u) - require.ErrorContains(t, err, "no files found for") + diags := bundle.Apply(context.Background(), b, u) + require.ErrorContains(t, diags.Error(), "no files found for") } diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go index c858a38c0e..d11db83110 100644 --- a/bundle/artifacts/whl/autodetect.go +++ b/bundle/artifacts/whl/autodetect.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -25,7 +26,7 @@ func (m *detectPkg) Name() string { return "artifacts.whl.AutoDetect" } -func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { wheelTasks := libraries.FindAllWheelTasksWithLocalLibraries(b) if len(wheelTasks) == 0 { log.Infof(ctx, "No local wheel tasks in databricks.yml config, skipping auto detect") @@ -50,7 +51,7 @@ func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { pkgPath, err := filepath.Abs(b.Config.Path) if err != nil { - return err + return diag.FromErr(err) } b.Config.Artifacts[module] = &config.Artifact{ Path: pkgPath, diff --git a/bundle/artifacts/whl/build.go b/bundle/artifacts/whl/build.go index aeec31a637..992ade297b 100644 --- a/bundle/artifacts/whl/build.go +++ b/bundle/artifacts/whl/build.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/python" ) @@ -27,10 +28,10 @@ func (m *build) Name() string { return fmt.Sprintf("artifacts.whl.Build(%s)", m.name) } -func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact, ok := b.Config.Artifacts[m.name] if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) + return diag.Errorf("artifact doesn't exist: %s", m.name) } cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name)) @@ -43,13 +44,13 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { out, err := artifact.Build(ctx) if err != nil { - return fmt.Errorf("build failed %s, error: %w, output: %s", m.name, err, out) + return diag.Errorf("build failed %s, error: %v, output: %s", m.name, err, out) } log.Infof(ctx, "Build succeeded") wheels := python.FindFilesWithSuffixInPath(distPath, ".whl") if len(wheels) == 0 { - return fmt.Errorf("cannot find built wheel in %s for package %s", dir, m.name) + return diag.Errorf("cannot find built wheel in %s for package %s", dir, m.name) } for _, wheel := range wheels { artifact.Files = append(artifact.Files, config.ArtifactFile{ diff --git a/bundle/artifacts/whl/from_libraries.go b/bundle/artifacts/whl/from_libraries.go index 9d35f63146..a2045aaf8c 100644 --- a/bundle/artifacts/whl/from_libraries.go +++ b/bundle/artifacts/whl/from_libraries.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -20,7 +21,7 @@ func (m *fromLibraries) Name() string { return "artifacts.whl.DefineArtifactsFromLibraries" } -func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) error { +func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if len(b.Config.Artifacts) != 0 { log.Debugf(ctx, "Skipping defining artifacts from libraries because artifacts section is explicitly defined") return nil diff --git a/bundle/artifacts/whl/infer.go b/bundle/artifacts/whl/infer.go index dc2b8e233e..dd4ad2956d 100644 --- a/bundle/artifacts/whl/infer.go +++ b/bundle/artifacts/whl/infer.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/python" ) @@ -12,11 +13,11 @@ type infer struct { name string } -func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { artifact := b.Config.Artifacts[m.name] py, err := python.DetectExecutable(ctx) if err != nil { - return err + return diag.FromErr(err) } // Note: using --build-number (build tag) flag does not help with re-installing diff --git a/bundle/config/mutator/default_target.go b/bundle/config/mutator/default_target.go index d5318a3e26..73d99002a0 100644 --- a/bundle/config/mutator/default_target.go +++ b/bundle/config/mutator/default_target.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" ) type defineDefaultTarget struct { @@ -24,7 +25,7 @@ func (m *defineDefaultTarget) Name() string { return fmt.Sprintf("DefineDefaultTarget(%s)", m.name) } -func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { // Nothing to do if the configuration has at least 1 target. if len(b.Config.Targets) > 0 { return nil diff --git a/bundle/config/mutator/default_target_test.go b/bundle/config/mutator/default_target_test.go index 61a5a01384..d60b14aad8 100644 --- a/bundle/config/mutator/default_target_test.go +++ b/bundle/config/mutator/default_target_test.go @@ -13,8 +13,9 @@ import ( func TestDefaultTarget(t *testing.T) { b := &bundle.Bundle{} - err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) + require.NoError(t, diags.Error()) + env, ok := b.Config.Targets["default"] assert.True(t, ok) assert.Equal(t, &config.Target{}, env) @@ -28,8 +29,9 @@ func TestDefaultTargetAlreadySpecified(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) + require.NoError(t, diags.Error()) + _, ok := b.Config.Targets["default"] assert.False(t, ok) } diff --git a/bundle/config/mutator/default_workspace_paths.go b/bundle/config/mutator/default_workspace_paths.go index 04f2b0dc0f..71e562b51f 100644 --- a/bundle/config/mutator/default_workspace_paths.go +++ b/bundle/config/mutator/default_workspace_paths.go @@ -2,10 +2,10 @@ package mutator import ( "context" - "fmt" "path" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type defineDefaultWorkspacePaths struct{} @@ -19,10 +19,10 @@ func (m *defineDefaultWorkspacePaths) Name() string { return "DefaultWorkspacePaths" } -func (m *defineDefaultWorkspacePaths) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *defineDefaultWorkspacePaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { root := b.Config.Workspace.RootPath if root == "" { - return fmt.Errorf("unable to define default workspace paths: workspace root not defined") + return diag.Errorf("unable to define default workspace paths: workspace root not defined") } if b.Config.Workspace.FilePath == "" { diff --git a/bundle/config/mutator/default_workspace_paths_test.go b/bundle/config/mutator/default_workspace_paths_test.go index 1ad0ca7862..0ba20ea2bd 100644 --- a/bundle/config/mutator/default_workspace_paths_test.go +++ b/bundle/config/mutator/default_workspace_paths_test.go @@ -19,8 +19,8 @@ func TestDefineDefaultWorkspacePaths(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) + require.NoError(t, diags.Error()) assert.Equal(t, "/files", b.Config.Workspace.FilePath) assert.Equal(t, "/artifacts", b.Config.Workspace.ArtifactPath) assert.Equal(t, "/state", b.Config.Workspace.StatePath) @@ -37,8 +37,8 @@ func TestDefineDefaultWorkspacePathsAlreadySet(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) + require.NoError(t, diags.Error()) assert.Equal(t, "/foo/bar", b.Config.Workspace.FilePath) assert.Equal(t, "/foo/bar", b.Config.Workspace.ArtifactPath) assert.Equal(t, "/foo/bar", b.Config.Workspace.StatePath) diff --git a/bundle/config/mutator/default_workspace_root.go b/bundle/config/mutator/default_workspace_root.go index 260a59584b..d7c24a5b55 100644 --- a/bundle/config/mutator/default_workspace_root.go +++ b/bundle/config/mutator/default_workspace_root.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type defineDefaultWorkspaceRoot struct{} @@ -18,17 +19,17 @@ func (m *defineDefaultWorkspaceRoot) Name() string { return "DefineDefaultWorkspaceRoot" } -func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if b.Config.Workspace.RootPath != "" { return nil } if b.Config.Bundle.Name == "" { - return fmt.Errorf("unable to define default workspace root: bundle name not defined") + return diag.Errorf("unable to define default workspace root: bundle name not defined") } if b.Config.Bundle.Target == "" { - return fmt.Errorf("unable to define default workspace root: bundle target not selected") + return diag.Errorf("unable to define default workspace root: bundle target not selected") } b.Config.Workspace.RootPath = fmt.Sprintf( diff --git a/bundle/config/mutator/default_workspace_root_test.go b/bundle/config/mutator/default_workspace_root_test.go index 9dd549a390..b05520f62d 100644 --- a/bundle/config/mutator/default_workspace_root_test.go +++ b/bundle/config/mutator/default_workspace_root_test.go @@ -20,7 +20,8 @@ func TestDefaultWorkspaceRoot(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspaceRoot()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspaceRoot()) + require.NoError(t, diags.Error()) + assert.Equal(t, "~/.bundle/name/environment", b.Config.Workspace.RootPath) } diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index 0eb996b14c..cbedcaefd5 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) @@ -18,7 +19,7 @@ func (m *environmentsToTargets) Name() string { return "EnvironmentsToTargets" } -func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Short circuit if the "environments" key is not set. // This is the common case. if b.Config.Environments == nil { @@ -26,7 +27,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err } // The "environments" key is set; validate and rewrite it to "targets". - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { environments := v.Get("environments") targets := v.Get("targets") @@ -60,4 +61,6 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err return v, nil }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/environments_compat_test.go b/bundle/config/mutator/environments_compat_test.go index f7045b3df2..8a21298479 100644 --- a/bundle/config/mutator/environments_compat_test.go +++ b/bundle/config/mutator/environments_compat_test.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) { @@ -26,8 +27,8 @@ func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) - assert.ErrorContains(t, err, `both 'environments' and 'targets' are specified;`) + diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + assert.ErrorContains(t, diags.Error(), `both 'environments' and 'targets' are specified;`) } func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) { @@ -41,8 +42,8 @@ func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + require.NoError(t, diags.Error()) assert.Len(t, b.Config.Environments, 0) assert.Len(t, b.Config.Targets, 1) } @@ -58,8 +59,8 @@ func TestEnvironmentsToTargetsWithTargetsDefined(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + require.NoError(t, diags.Error()) assert.Len(t, b.Config.Environments, 0) assert.Len(t, b.Config.Targets, 1) } diff --git a/bundle/config/mutator/expand_pipeline_glob_paths.go b/bundle/config/mutator/expand_pipeline_glob_paths.go index 843bc12718..268d8fa48b 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) @@ -92,8 +93,8 @@ func (m *expandPipelineGlobPaths) expandSequence(p dyn.Path, v dyn.Value) (dyn.V return dyn.NewValue(vs, v.Location()), nil } -func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { +func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { p := dyn.NewPattern( dyn.Key("resources"), dyn.Key("pipelines"), @@ -104,6 +105,8 @@ func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) err // Visit each pipeline's "libraries" field and expand any glob patterns. return dyn.MapByPattern(v, p, m.expandSequence) }) + + return diag.FromErr(err) } func (*expandPipelineGlobPaths) Name() string { diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index 828eac3dec..db80be028a 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -109,8 +109,8 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { bundletest.SetLocation(b, "resources.pipelines.pipeline.libraries[3]", filepath.Join(dir, "relative", "resource.yml")) m := ExpandPipelineGlobPaths() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) libraries := b.Config.Resources.Pipelines["pipeline"].Libraries require.Len(t, libraries, 13) diff --git a/bundle/config/mutator/expand_workspace_root.go b/bundle/config/mutator/expand_workspace_root.go index 59f19ccc44..8954abd464 100644 --- a/bundle/config/mutator/expand_workspace_root.go +++ b/bundle/config/mutator/expand_workspace_root.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type expandWorkspaceRoot struct{} @@ -20,15 +21,15 @@ func (m *expandWorkspaceRoot) Name() string { return "ExpandWorkspaceRoot" } -func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { root := b.Config.Workspace.RootPath if root == "" { - return fmt.Errorf("unable to expand workspace root: workspace root not defined") + return diag.Errorf("unable to expand workspace root: workspace root not defined") } currentUser := b.Config.Workspace.CurrentUser if currentUser == nil || currentUser.UserName == "" { - return fmt.Errorf("unable to expand workspace root: current user not set") + return diag.Errorf("unable to expand workspace root: current user not set") } if strings.HasPrefix(root, "~/") { diff --git a/bundle/config/mutator/expand_workspace_root_test.go b/bundle/config/mutator/expand_workspace_root_test.go index 17ee065097..e6260dbd8f 100644 --- a/bundle/config/mutator/expand_workspace_root_test.go +++ b/bundle/config/mutator/expand_workspace_root_test.go @@ -25,8 +25,8 @@ func TestExpandWorkspaceRoot(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) + require.NoError(t, diags.Error()) assert.Equal(t, "/Users/jane@doe.com/foo", b.Config.Workspace.RootPath) } @@ -43,8 +43,8 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) + require.NoError(t, diags.Error()) assert.Equal(t, "/Users/charly@doe.com/foo", b.Config.Workspace.RootPath) } @@ -60,8 +60,8 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) - require.Error(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) + require.True(t, diags.HasError()) } func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) { @@ -72,6 +72,6 @@ func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) - require.Error(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) + require.True(t, diags.HasError()) } diff --git a/bundle/config/mutator/if.go b/bundle/config/mutator/if.go index 462d8f004c..1b7856b3c3 100644 --- a/bundle/config/mutator/if.go +++ b/bundle/config/mutator/if.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type ifMutator struct { @@ -22,7 +23,7 @@ func If( } } -func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if m.condition(b) { return bundle.Apply(ctx, b, m.onTrueMutator) } else { diff --git a/bundle/config/mutator/initialize_variables.go b/bundle/config/mutator/initialize_variables.go index 8e50b4d041..e72cdde310 100644 --- a/bundle/config/mutator/initialize_variables.go +++ b/bundle/config/mutator/initialize_variables.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/diag" ) type initializeVariables struct{} @@ -18,7 +19,7 @@ func (m *initializeVariables) Name() string { return "InitializeVariables" } -func (m *initializeVariables) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *initializeVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { vars := b.Config.Variables for k, v := range vars { if v == nil { diff --git a/bundle/config/mutator/initialize_variables_test.go b/bundle/config/mutator/initialize_variables_test.go index 46445591a0..3ca4384fa2 100644 --- a/bundle/config/mutator/initialize_variables_test.go +++ b/bundle/config/mutator/initialize_variables_test.go @@ -23,8 +23,8 @@ func TestInitializeVariables(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) + require.NoError(t, diags.Error()) assert.NotNil(t, b.Config.Variables["foo"]) assert.NotNil(t, b.Config.Variables["bar"]) assert.Equal(t, "This is a description", b.Config.Variables["bar"].Description) @@ -36,7 +36,7 @@ func TestInitializeVariablesWithoutVariables(t *testing.T) { Variables: nil, }, } - err := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) + require.NoError(t, diags.Error()) assert.Nil(t, b.Config.Variables) } diff --git a/bundle/config/mutator/initialize_workspace_client.go b/bundle/config/mutator/initialize_workspace_client.go index afc38d4d55..5c905f40c2 100644 --- a/bundle/config/mutator/initialize_workspace_client.go +++ b/bundle/config/mutator/initialize_workspace_client.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type initializeWorkspaceClient struct{} @@ -19,7 +20,7 @@ func (m *initializeWorkspaceClient) Name() string { // Apply initializes the workspace client for the bundle. We do this here so // downstream calls to b.WorkspaceClient() do not panic if there's an error in the // auth configuration. -func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { _, err := b.InitializeWorkspaceClient() - return err + return diag.FromErr(err) } diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index 3a50d683ef..6ff9aad622 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -5,6 +5,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/log" ) @@ -19,11 +20,11 @@ func (m *loadGitDetails) Name() string { return "LoadGitDetails" } -func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Load relevant git repository repo, err := git.NewRepository(b.Config.Path) if err != nil { - return err + return diag.FromErr(err) } // Read branch name of current checkout @@ -57,12 +58,12 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { // Compute relative path of the bundle root from the Git repo root. absBundlePath, err := filepath.Abs(b.Config.Path) if err != nil { - return err + return diag.FromErr(err) } // repo.Root() returns the absolute path of the repo relBundlePath, err := filepath.Rel(repo.Root(), absBundlePath) if err != nil { - return err + return diag.FromErr(err) } b.Config.Bundle.Git.BundleRootPath = filepath.ToSlash(relBundlePath) return nil diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go index 9c99cfaadc..20f4efe851 100644 --- a/bundle/config/mutator/merge_job_clusters.go +++ b/bundle/config/mutator/merge_job_clusters.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/merge" ) @@ -29,8 +30,8 @@ func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string { } } -func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { +func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { if v == dyn.NilValue { return v, nil } @@ -39,4 +40,6 @@ func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { return dyn.Map(job, "job_clusters", merge.ElementsByKey("job_cluster_key", m.jobClusterKey)) })) }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/merge_job_clusters_test.go b/bundle/config/mutator/merge_job_clusters_test.go index a32b70281f..3ddb2b63a7 100644 --- a/bundle/config/mutator/merge_job_clusters_test.go +++ b/bundle/config/mutator/merge_job_clusters_test.go @@ -50,8 +50,8 @@ func TestMergeJobClusters(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) + assert.NoError(t, diags.Error()) j := b.Config.Resources.Jobs["foo"] @@ -99,7 +99,7 @@ func TestMergeJobClustersWithNilKey(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) + assert.NoError(t, diags.Error()) assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) } diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go index 91aee3a031..68c05383c3 100644 --- a/bundle/config/mutator/merge_job_tasks.go +++ b/bundle/config/mutator/merge_job_tasks.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/merge" ) @@ -29,8 +30,8 @@ func (m *mergeJobTasks) taskKeyString(v dyn.Value) string { } } -func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { +func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { if v == dyn.NilValue { return v, nil } @@ -39,4 +40,6 @@ func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { return dyn.Map(job, "tasks", merge.ElementsByKey("task_key", m.taskKeyString)) })) }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/merge_job_tasks_test.go b/bundle/config/mutator/merge_job_tasks_test.go index b3fb357e0b..a9dae1e10b 100644 --- a/bundle/config/mutator/merge_job_tasks_test.go +++ b/bundle/config/mutator/merge_job_tasks_test.go @@ -58,8 +58,8 @@ func TestMergeJobTasks(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) + assert.NoError(t, diags.Error()) j := b.Config.Resources.Jobs["foo"] @@ -111,7 +111,7 @@ func TestMergeJobTasksWithNilKey(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) + assert.NoError(t, diags.Error()) assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 1) } diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index 552d997b9f..0b1cf89836 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/merge" ) @@ -32,8 +33,8 @@ func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string { } } -func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { +func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { if v == dyn.NilValue { return v, nil } @@ -42,4 +43,6 @@ func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) err return dyn.Map(pipeline, "clusters", merge.ElementsByKey("label", m.clusterLabel)) })) }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/merge_pipeline_clusters_test.go b/bundle/config/mutator/merge_pipeline_clusters_test.go index fb54a67d24..f117d93991 100644 --- a/bundle/config/mutator/merge_pipeline_clusters_test.go +++ b/bundle/config/mutator/merge_pipeline_clusters_test.go @@ -42,8 +42,8 @@ func TestMergePipelineClusters(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, diags.Error()) p := b.Config.Resources.Pipelines["foo"] @@ -86,8 +86,8 @@ func TestMergePipelineClustersCaseInsensitive(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, diags.Error()) p := b.Config.Resources.Pipelines["foo"] assert.Len(t, p.Clusters, 1) @@ -107,8 +107,8 @@ func TestMergePipelineClustersNilPipelines(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, diags.Error()) } func TestMergePipelineClustersEmptyPipelines(t *testing.T) { @@ -120,6 +120,6 @@ func TestMergePipelineClustersEmptyPipelines(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, diags.Error()) } diff --git a/bundle/config/mutator/noop.go b/bundle/config/mutator/noop.go index 91c16385bf..f27c940e38 100644 --- a/bundle/config/mutator/noop.go +++ b/bundle/config/mutator/noop.go @@ -4,11 +4,12 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type noop struct{} -func (*noop) Apply(context.Context, *bundle.Bundle) error { +func (*noop) Apply(context.Context, *bundle.Bundle) diag.Diagnostics { return nil } diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index 21d9501357..6b5c89be10 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -2,11 +2,11 @@ package mutator import ( "context" - "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/env" ) @@ -32,10 +32,10 @@ func overrideJobCompute(j *resources.Job, compute string) { } } -func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if b.Config.Bundle.Mode != config.Development { if b.Config.Bundle.ComputeID != "" { - return fmt.Errorf("cannot override compute for an target that does not use 'mode: development'") + return diag.Errorf("cannot override compute for an target that does not use 'mode: development'") } return nil } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 7cc500c608..e5087167dd 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -49,8 +49,8 @@ func TestOverrideDevelopment(t *testing.T) { } m := mutator.OverrideCompute() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) @@ -85,8 +85,8 @@ func TestOverrideDevelopmentEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } @@ -110,8 +110,8 @@ func TestOverridePipelineTask(t *testing.T) { } m := mutator.OverrideCompute() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) } @@ -140,8 +140,8 @@ func TestOverrideProduction(t *testing.T) { } m := mutator.OverrideCompute() - err := bundle.Apply(context.Background(), b, m) - require.Error(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.True(t, diags.HasError()) } func TestOverrideProductionEnv(t *testing.T) { @@ -167,6 +167,6 @@ func TestOverrideProductionEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) } diff --git a/bundle/config/mutator/populate_current_user.go b/bundle/config/mutator/populate_current_user.go index a604cb9025..b5e0bd4374 100644 --- a/bundle/config/mutator/populate_current_user.go +++ b/bundle/config/mutator/populate_current_user.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/libs/auth" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/tags" ) @@ -20,7 +21,7 @@ func (m *populateCurrentUser) Name() string { return "PopulateCurrentUser" } -func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if b.Config.Workspace.CurrentUser != nil { return nil } @@ -28,7 +29,7 @@ func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error w := b.WorkspaceClient() me, err := w.CurrentUser.Me(ctx) if err != nil { - return err + return diag.FromErr(err) } b.Config.Workspace.CurrentUser = &config.User{ diff --git a/bundle/config/mutator/process_include.go b/bundle/config/mutator/process_include.go index 350c3c49c9..23acdf12a0 100644 --- a/bundle/config/mutator/process_include.go +++ b/bundle/config/mutator/process_include.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" ) type processInclude struct { @@ -25,10 +26,12 @@ func (m *processInclude) Name() string { return fmt.Sprintf("ProcessInclude(%s)", m.relPath) } -func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { this, err := config.Load(m.fullPath) if err != nil { - return err + return diag.FromErr(err) } - return b.Config.Merge(this) + // TODO: Return actual warnings. + err = b.Config.Merge(this) + return diag.FromErr(err) } diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/mutator/process_include_test.go index 7ca5d19811..0e5351b634 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/mutator/process_include_test.go @@ -32,7 +32,7 @@ func TestProcessInclude(t *testing.T) { f.Close() assert.Equal(t, "foo", b.Config.Workspace.Host) - err = bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) + require.NoError(t, diags.Error()) assert.Equal(t, "bar", b.Config.Workspace.Host) } diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 5a5ab1b19c..dbf99f2dc6 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -2,7 +2,6 @@ package mutator import ( "context" - "fmt" "os" "path/filepath" "slices" @@ -11,6 +10,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/env" + "github.com/databricks/cli/libs/diag" ) // Get extra include paths from environment variable @@ -34,7 +34,7 @@ func (m *processRootIncludes) Name() string { return "ProcessRootIncludes" } -func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { var out []bundle.Mutator // Map with files we've already seen to avoid loading them twice. @@ -53,7 +53,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error if filepath.IsAbs(extraIncludePath) { rel, err := filepath.Rel(b.Config.Path, extraIncludePath) if err != nil { - return fmt.Errorf("unable to include file '%s': %w", extraIncludePath, err) + return diag.Errorf("unable to include file '%s': %v", extraIncludePath, err) } extraIncludePath = rel } @@ -66,19 +66,19 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error for _, entry := range b.Config.Include { // Include paths must be relative. if filepath.IsAbs(entry) { - return fmt.Errorf("%s: includes must be relative paths", entry) + return diag.Errorf("%s: includes must be relative paths", entry) } // Anchor includes to the bundle root path. matches, err := filepath.Glob(filepath.Join(b.Config.Path, entry)) if err != nil { - return err + return diag.FromErr(err) } // If the entry is not a glob pattern and no matches found, // return an error because the file defined is not found if len(matches) == 0 && !strings.ContainsAny(entry, "*?[") { - return fmt.Errorf("%s defined in 'include' section does not match any files", entry) + return diag.Errorf("%s defined in 'include' section does not match any files", entry) } // Filter matches to ones we haven't seen yet. @@ -86,7 +86,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error for _, match := range matches { rel, err := filepath.Rel(b.Config.Path, match) if err != nil { - return err + return diag.FromErr(err) } if _, ok := seen[rel]; ok { continue diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 645eb89a99..7b21945539 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -23,8 +23,8 @@ func TestProcessRootIncludesEmpty(t *testing.T) { Path: ".", }, } - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) } func TestProcessRootIncludesAbs(t *testing.T) { @@ -43,9 +43,9 @@ func TestProcessRootIncludesAbs(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.Error(t, err) - assert.Contains(t, err.Error(), "must be relative paths") + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.True(t, diags.HasError()) + assert.ErrorContains(t, diags.Error(), "must be relative paths") } func TestProcessRootIncludesSingleGlob(t *testing.T) { @@ -62,9 +62,8 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { testutil.Touch(t, b.Config.Path, "a.yml") testutil.Touch(t, b.Config.Path, "b.yml") - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) - + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) } @@ -82,9 +81,8 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { testutil.Touch(t, b.Config.Path, "a1.yml") testutil.Touch(t, b.Config.Path, "b1.yml") - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) - + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) } @@ -101,8 +99,8 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { testutil.Touch(t, b.Config.Path, "a.yml") - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) assert.Equal(t, []string{"a.yml"}, b.Config.Include) } @@ -115,9 +113,9 @@ func TestProcessRootIncludesNotExists(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.Error(t, err) - assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.True(t, diags.HasError()) + assert.ErrorContains(t, diags.Error(), "notexist.yml defined in 'include' section does not match any files") } func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { @@ -132,8 +130,8 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) assert.Contains(t, b.Config.Include, testYamlName) } @@ -155,7 +153,7 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + require.NoError(t, diags.Error()) assert.Equal(t, []string{testYamlName}, b.Config.Include) } diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index e57509452e..d3de5728c9 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -2,13 +2,13 @@ package mutator import ( "context" - "fmt" "path" "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/libs/auth" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" @@ -29,7 +29,7 @@ func (m *processTargetMode) Name() string { // Mark all resources as being for 'development' purposes, i.e. // changing their their name, adding tags, and (in the future) // marking them as 'hidden' in the UI. -func transformDevelopmentMode(b *bundle.Bundle) error { +func transformDevelopmentMode(b *bundle.Bundle) diag.Diagnostics { r := b.Config.Resources shortName := b.Config.Workspace.CurrentUser.ShortName @@ -100,9 +100,9 @@ func transformDevelopmentMode(b *bundle.Bundle) error { return nil } -func validateDevelopmentMode(b *bundle.Bundle) error { +func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics { if path := findNonUserPath(b); path != "" { - return fmt.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path) + return diag.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path) } return nil } @@ -125,7 +125,7 @@ func findNonUserPath(b *bundle.Bundle) string { return "" } -func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { +func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) diag.Diagnostics { if b.Config.Bundle.Git.Inferred { env := b.Config.Bundle.Target log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env) @@ -134,12 +134,12 @@ func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUs r := b.Config.Resources for i := range r.Pipelines { if r.Pipelines[i].Development { - return fmt.Errorf("target with 'mode: production' cannot include a pipeline with 'development: true'") + return diag.Errorf("target with 'mode: production' cannot include a pipeline with 'development: true'") } } if !isPrincipalUsed && !isRunAsSet(r) { - return fmt.Errorf("'run_as' must be set for all jobs when using 'mode: production'") + return diag.Errorf("'run_as' must be set for all jobs when using 'mode: production'") } return nil } @@ -156,12 +156,12 @@ func isRunAsSet(r config.Resources) bool { return true } -func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { switch b.Config.Bundle.Mode { case config.Development: - err := validateDevelopmentMode(b) - if err != nil { - return err + diags := validateDevelopmentMode(b) + if diags != nil { + return diags } return transformDevelopmentMode(b) case config.Production: @@ -170,7 +170,7 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { case "": // No action default: - return fmt.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode) + return diag.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode) } return nil diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index a5f61284c7..17f8381608 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -110,8 +110,8 @@ func TestProcessTargetModeDevelopment(t *testing.T) { b := mockBundle(config.Development) m := ProcessTargetMode() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) // Job 1 assert.Equal(t, "[dev lennart] job1", b.Config.Resources.Jobs["job1"].Name) @@ -154,8 +154,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := bundle.Apply(context.Background(), b, ProcessTargetMode()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ProcessTargetMode()) + require.NoError(t, diags.Error()) // Assert that tag normalization took place. assert.Equal(t, "Hello world__", b.Config.Resources.Jobs["job1"].Tags["dev"]) @@ -168,8 +168,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := bundle.Apply(context.Background(), b, ProcessTargetMode()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ProcessTargetMode()) + require.NoError(t, diags.Error()) // Assert that tag normalization took place (Azure allows more characters than AWS). assert.Equal(t, "Héllö wörld?!", b.Config.Resources.Jobs["job1"].Tags["dev"]) @@ -182,8 +182,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := bundle.Apply(context.Background(), b, ProcessTargetMode()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ProcessTargetMode()) + require.NoError(t, diags.Error()) // Assert that tag normalization took place. assert.Equal(t, "Hello_world", b.Config.Resources.Jobs["job1"].Tags["dev"]) @@ -193,8 +193,8 @@ func TestProcessTargetModeDefault(t *testing.T) { b := mockBundle("") m := ProcessTargetMode() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) @@ -205,15 +205,15 @@ func TestProcessTargetModeDefault(t *testing.T) { func TestProcessTargetModeProduction(t *testing.T) { b := mockBundle(config.Production) - err := validateProductionMode(context.Background(), b, false) - require.ErrorContains(t, err, "run_as") + diags := validateProductionMode(context.Background(), b, false) + require.ErrorContains(t, diags.Error(), "run_as") b.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" b.Config.Workspace.ArtifactPath = "/Shared/.bundle/x/y/artifacts" b.Config.Workspace.FilePath = "/Shared/.bundle/x/y/files" - err = validateProductionMode(context.Background(), b, false) - require.ErrorContains(t, err, "production") + diags = validateProductionMode(context.Background(), b, false) + require.ErrorContains(t, diags.Error(), "production") permissions := []resources.Permission{ { @@ -232,8 +232,8 @@ func TestProcessTargetModeProduction(t *testing.T) { b.Config.Resources.Models["model1"].Permissions = permissions b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions - err = validateProductionMode(context.Background(), b, false) - require.NoError(t, err) + diags = validateProductionMode(context.Background(), b, false) + require.NoError(t, diags.Error()) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) @@ -246,12 +246,12 @@ func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { b := mockBundle(config.Production) // Our target has all kinds of problems when not using service principals ... - err := validateProductionMode(context.Background(), b, false) - require.Error(t, err) + diags := validateProductionMode(context.Background(), b, false) + require.Error(t, diags.Error()) // ... but we're much less strict when a principal is used - err = validateProductionMode(context.Background(), b, true) - require.NoError(t, err) + diags = validateProductionMode(context.Background(), b, true) + require.NoError(t, diags.Error()) } // Make sure that we have test coverage for all resource types @@ -277,8 +277,8 @@ func TestAllResourcesRenamed(t *testing.T) { b := mockBundle(config.Development) m := ProcessTargetMode() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) resources := reflect.ValueOf(b.Config.Resources) for i := 0; i < resources.NumField(); i++ { diff --git a/bundle/config/mutator/resolve_resource_references.go b/bundle/config/mutator/resolve_resource_references.go index 7a7462ab9d..89eaa346c6 100644 --- a/bundle/config/mutator/resolve_resource_references.go +++ b/bundle/config/mutator/resolve_resource_references.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "golang.org/x/sync/errgroup" ) @@ -15,7 +16,7 @@ func ResolveResourceReferences() bundle.Mutator { return &resolveResourceReferences{} } -func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { errs, errCtx := errgroup.WithContext(ctx) for k := range b.Config.Variables { @@ -40,7 +41,7 @@ func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) }) } - return errs.Wait() + return diag.FromErr(errs.Wait()) } func (*resolveResourceReferences) Name() string { diff --git a/bundle/config/mutator/resolve_resource_references_test.go b/bundle/config/mutator/resolve_resource_references_test.go index 5f5dab3162..16934ff38b 100644 --- a/bundle/config/mutator/resolve_resource_references_test.go +++ b/bundle/config/mutator/resolve_resource_references_test.go @@ -50,8 +50,8 @@ func TestResolveClusterReference(t *testing.T) { ClusterId: "9876-5432-xywz", }, nil) - err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) + require.NoError(t, diags.Error()) require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value) require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value) } @@ -79,8 +79,8 @@ func TestResolveNonExistentClusterReference(t *testing.T) { clusterApi := m.GetMockClustersAPI() clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef).Return(nil, fmt.Errorf("ClusterDetails named '%s' does not exist", clusterRef)) - err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) - require.ErrorContains(t, err, "failed to resolve cluster: Random, err: ClusterDetails named 'Random' does not exist") + diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) + require.ErrorContains(t, diags.Error(), "failed to resolve cluster: Random, err: ClusterDetails named 'Random' does not exist") } func TestNoLookupIfVariableIsSet(t *testing.T) { @@ -102,8 +102,8 @@ func TestNoLookupIfVariableIsSet(t *testing.T) { b.Config.Variables["my-cluster-id"].Set("random value") - err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) + require.NoError(t, diags.Error()) require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value) } @@ -129,7 +129,7 @@ func TestResolveServicePrincipal(t *testing.T) { ApplicationId: "app-1234", }, nil) - err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) + require.NoError(t, diags.Error()) require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value) } diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index 1075e83e35..0738c9bcb5 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/dynvar" @@ -26,7 +27,7 @@ func (m *resolveVariableReferences) Validate(ctx context.Context, b *bundle.Bund return nil } -func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { prefixes := make([]dyn.Path, len(m.prefixes)) for i, prefix := range m.prefixes { prefixes[i] = dyn.MustPathFromString(prefix) @@ -36,7 +37,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) // We rewrite it here to make the resolution logic simpler. varPath := dyn.NewPath(dyn.Key("var")) - return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { // Synthesize a copy of the root that has all fields that are present in the type // but not set in the dynamic value set to their corresponding empty value. // This enables users to interpolate variable references to fields that haven't @@ -92,4 +93,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) } return root, nil }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/resolve_variable_references_test.go b/bundle/config/mutator/resolve_variable_references_test.go index 8190c360f4..651ea3d2ce 100644 --- a/bundle/config/mutator/resolve_variable_references_test.go +++ b/bundle/config/mutator/resolve_variable_references_test.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" @@ -29,14 +30,14 @@ func TestResolveVariableReferences(t *testing.T) { } // Apply with an invalid prefix. This should not change the workspace root path. - err := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist")) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist")) + require.NoError(t, diags.Error()) require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath) require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath) // Apply with a valid prefix. This should change the workspace root path. - err = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace")) - require.NoError(t, err) + diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace")) + require.NoError(t, diags.Error()) require.Equal(t, "example/bar", b.Config.Workspace.RootPath) require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath) } @@ -63,8 +64,8 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) { } // Apply with a valid prefix. This should change the workspace root path. - err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables")) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables")) + require.NoError(t, diags.Error()) require.Equal(t, "example/bar", b.Config.Workspace.RootPath) } @@ -92,15 +93,15 @@ func TestResolveVariableReferencesToEmptyFields(t *testing.T) { } // Apply for the bundle prefix. - err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle")) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle")) + require.NoError(t, diags.Error()) // The job settings should have been interpolated to an empty string. require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"]) } func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { - var err error + var diags diag.Diagnostics b := &bundle.Bundle{ Config: config.Root{ @@ -142,20 +143,21 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { ctx := context.Background() // Initialize the variables. - err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { - return b.Config.InitializeVariables([]string{ + diags = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.InitializeVariables([]string{ "no_alert_for_canceled_runs=true", "no_alert_for_skipped_runs=true", "min_workers=1", "max_workers=2", "spot_bid_max_price=0.5", }) + return diag.FromErr(err) }) - require.NoError(t, err) + require.NoError(t, diags.Error()) // Assign the variables to the dynamic configuration. - err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + diags = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { var p dyn.Path var err error @@ -180,12 +182,13 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { return v, nil }) + return diag.FromErr(err) }) - require.NoError(t, err) + require.NoError(t, diags.Error()) // Apply for the variable prefix. This should resolve the variables to their values. - err = bundle.Apply(context.Background(), b, ResolveVariableReferences("variables")) - require.NoError(t, err) + diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("variables")) + require.NoError(t, diags.Error()) assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForCanceledRuns) assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForSkippedRuns) assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MinWorkers) diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index 5e17b1b5fb..0785c64300 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) @@ -41,8 +42,8 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc { } } -func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { +func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) { v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) if err != nil { @@ -55,4 +56,6 @@ func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { return v, nil }) }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index 576333e928..667f811ac9 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -34,8 +34,8 @@ func TestRewriteSyncPathsRelative(t *testing.T) { bundletest.SetLocation(b, "sync.exclude[0]", "./a/b/file.yml") bundletest.SetLocation(b, "sync.exclude[1]", "./a/b/c/file.yml") - err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, diags.Error()) assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) @@ -65,8 +65,8 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) { bundletest.SetLocation(b, "sync.exclude[0]", "/tmp/dir/a/b/file.yml") bundletest.SetLocation(b, "sync.exclude[1]", "/tmp/dir/a/b/c/file.yml") - err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, diags.Error()) assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) @@ -82,8 +82,8 @@ func TestRewriteSyncPathsErrorPaths(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, diags.Error()) }) t.Run("empty include/exclude blocks", func(t *testing.T) { @@ -97,7 +97,7 @@ func TestRewriteSyncPathsErrorPaths(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, diags.Error()) }) } diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index 7d1a491753..243f8ef7d9 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -23,7 +24,7 @@ func (m *setRunAs) Name() string { return "SetRunAs" } -func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { runAs := b.Config.RunAs if runAs == nil { return nil diff --git a/bundle/config/mutator/select_default_target.go b/bundle/config/mutator/select_default_target.go index be5046f82c..4ac0aae6f8 100644 --- a/bundle/config/mutator/select_default_target.go +++ b/bundle/config/mutator/select_default_target.go @@ -2,10 +2,10 @@ package mutator import ( "context" - "fmt" "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "golang.org/x/exp/maps" ) @@ -20,9 +20,9 @@ func (m *selectDefaultTarget) Name() string { return "SelectDefaultTarget" } -func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if len(b.Config.Targets) == 0 { - return fmt.Errorf("no targets defined") + return diag.Errorf("no targets defined") } // One target means there's only one default. @@ -41,12 +41,12 @@ func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error // It is invalid to have multiple targets with the `default` flag set. if len(defaults) > 1 { - return fmt.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", ")) + return diag.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", ")) } // If no target has the `default` flag set, ask the user to specify one. if len(defaults) == 0 { - return fmt.Errorf("please specify target") + return diag.Errorf("please specify target") } // One default remaining. diff --git a/bundle/config/mutator/select_default_target_test.go b/bundle/config/mutator/select_default_target_test.go index 1c2e451fea..dfea4ff672 100644 --- a/bundle/config/mutator/select_default_target_test.go +++ b/bundle/config/mutator/select_default_target_test.go @@ -16,8 +16,8 @@ func TestSelectDefaultTargetNoTargets(t *testing.T) { Targets: map[string]*config.Target{}, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.ErrorContains(t, err, "no targets defined") + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.ErrorContains(t, diags.Error(), "no targets defined") } func TestSelectDefaultTargetSingleTargets(t *testing.T) { @@ -28,8 +28,8 @@ func TestSelectDefaultTargetSingleTargets(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.NoError(t, diags.Error()) assert.Equal(t, "foo", b.Config.Bundle.Target) } @@ -43,8 +43,8 @@ func TestSelectDefaultTargetNoDefaults(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.ErrorContains(t, err, "please specify target") + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.ErrorContains(t, diags.Error(), "please specify target") } func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { @@ -56,8 +56,8 @@ func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.ErrorContains(t, err, "please specify target") + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.ErrorContains(t, diags.Error(), "please specify target") } func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { @@ -70,8 +70,8 @@ func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.ErrorContains(t, err, "multiple targets are marked as default") + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.ErrorContains(t, diags.Error(), "multiple targets are marked as default") } func TestSelectDefaultTargetSingleDefault(t *testing.T) { @@ -84,7 +84,7 @@ func TestSelectDefaultTargetSingleDefault(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) + assert.NoError(t, diags.Error()) assert.Equal(t, "bar", b.Config.Bundle.Target) } diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go index 95558f030f..178686b6ed 100644 --- a/bundle/config/mutator/select_target.go +++ b/bundle/config/mutator/select_target.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "golang.org/x/exp/maps" ) @@ -24,21 +25,21 @@ func (m *selectTarget) Name() string { return fmt.Sprintf("SelectTarget(%s)", m.name) } -func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { if b.Config.Targets == nil { - return fmt.Errorf("no targets defined") + return diag.Errorf("no targets defined") } // Get specified target _, ok := b.Config.Targets[m.name] if !ok { - return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) + return diag.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) } // Merge specified target into root configuration structure. err := b.Config.MergeTargetOverrides(m.name) if err != nil { - return err + return diag.FromErr(err) } // Store specified target in configuration for reference. diff --git a/bundle/config/mutator/select_target_test.go b/bundle/config/mutator/select_target_test.go index 20467270b8..a7c5ac93c4 100644 --- a/bundle/config/mutator/select_target_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -26,8 +26,8 @@ func TestSelectTarget(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectTarget("default")) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.SelectTarget("default")) + require.NoError(t, diags.Error()) assert.Equal(t, "bar", b.Config.Workspace.Host) } @@ -39,6 +39,6 @@ func TestSelectTargetNotFound(t *testing.T) { }, }, } - err := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist")) - require.Error(t, err, "no targets defined") + diags := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist")) + require.Error(t, diags.Error(), "no targets defined") } diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 3b9ac8ae76..bb88379e0f 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -2,10 +2,10 @@ package mutator import ( "context" - "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/env" ) @@ -21,7 +21,7 @@ func (m *setVariables) Name() string { return "SetVariables" } -func setVariable(ctx context.Context, v *variable.Variable, name string) error { +func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Diagnostics { // case: variable already has value initialized, so skip if v.HasValue() { return nil @@ -32,7 +32,7 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error { if val, ok := env.Lookup(ctx, envVarName); ok { err := v.Set(val) if err != nil { - return fmt.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %w`, val, name, envVarName, err) + return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err) } return nil } @@ -41,7 +41,7 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error { if v.HasDefault() { err := v.Set(*v.Default) if err != nil { - return fmt.Errorf(`failed to assign default value from config "%s" to variable %s with error: %w`, *v.Default, name, err) + return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err) } return nil } @@ -55,15 +55,16 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error { // We should have had a value to set for the variable at this point. // TODO: use cmdio to request values for unassigned variables if current // terminal is a tty. Tracked in https://github.com/databricks/cli/issues/379 - return fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) + return diag.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) } -func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + var diags diag.Diagnostics for name, variable := range b.Config.Variables { - err := setVariable(ctx, variable, name) - if err != nil { - return err + diags = diags.Extend(setVariable(ctx, variable, name)) + if diags.HasError() { + return diags } } - return nil + return diags } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index 15a98e5cf6..ae4f798969 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -21,8 +21,8 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { // set value for variable as an environment variable t.Setenv("BUNDLE_VAR_foo", "process-env") - err := setVariable(context.Background(), &variable, "foo") - require.NoError(t, err) + diags := setVariable(context.Background(), &variable, "foo") + require.NoError(t, diags.Error()) assert.Equal(t, *variable.Value, "process-env") } @@ -33,8 +33,8 @@ func TestSetVariableUsingDefaultValue(t *testing.T) { Default: &defaultVal, } - err := setVariable(context.Background(), &variable, "foo") - require.NoError(t, err) + diags := setVariable(context.Background(), &variable, "foo") + require.NoError(t, diags.Error()) assert.Equal(t, *variable.Value, "default") } @@ -49,8 +49,8 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the default value - err := setVariable(context.Background(), &variable, "foo") - require.NoError(t, err) + diags := setVariable(context.Background(), &variable, "foo") + require.NoError(t, diags.Error()) assert.Equal(t, *variable.Value, "assigned-value") } @@ -68,8 +68,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the value from environment - err := setVariable(context.Background(), &variable, "foo") - require.NoError(t, err) + diags := setVariable(context.Background(), &variable, "foo") + require.NoError(t, diags.Error()) assert.Equal(t, *variable.Value, "assigned-value") } @@ -79,8 +79,8 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { } // fails because we could not resolve a value for the variable - err := setVariable(context.Background(), &variable, "foo") - assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable") + diags := setVariable(context.Background(), &variable, "foo") + assert.ErrorContains(t, diags.Error(), "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable") } func TestSetVariablesMutator(t *testing.T) { @@ -108,8 +108,8 @@ func TestSetVariablesMutator(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") - err := bundle.Apply(context.Background(), b, SetVariables()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, SetVariables()) + require.NoError(t, diags.Error()) assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) diff --git a/bundle/config/mutator/trampoline.go b/bundle/config/mutator/trampoline.go index 24600f52fa..72c053b594 100644 --- a/bundle/config/mutator/trampoline.go +++ b/bundle/config/mutator/trampoline.go @@ -9,6 +9,7 @@ import ( "text/template" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -40,12 +41,12 @@ func (m *trampoline) Name() string { return fmt.Sprintf("trampoline(%s)", m.name) } -func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tasks := m.functions.GetTasks(b) for _, task := range tasks { err := m.generateNotebookWrapper(ctx, b, task) if err != nil { - return err + return diag.FromErr(err) } } return nil diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go index a3e06b3033..8a375aa9ba 100644 --- a/bundle/config/mutator/trampoline_test.go +++ b/bundle/config/mutator/trampoline_test.go @@ -80,8 +80,8 @@ func TestGenerateTrampoline(t *testing.T) { funcs := functions{} trampoline := NewTrampoline("test_trampoline", &funcs, "Hello from {{.MyName}}") - err := bundle.Apply(ctx, b, trampoline) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, trampoline) + require.NoError(t, diags.Error()) dir, err := b.InternalDir(ctx) require.NoError(t, err) diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index ac1da5bf21..af6896ee0d 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -11,6 +11,7 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/notebook" ) @@ -185,10 +186,10 @@ func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.V return dyn.InvalidValue, err } -func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { m.seen = make(map[string]string) - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { var err error for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){ m.applyJobTranslations, @@ -202,4 +203,6 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error { } return v, nil }) + + return diag.FromErr(err) } diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 7e2f12ab04..bd2ec809ba 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -78,8 +78,8 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + require.NoError(t, diags.Error()) assert.Equal( t, @@ -201,8 +201,8 @@ func TestTranslatePaths(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + require.NoError(t, diags.Error()) // Assert that the path in the tasks now refer to the artifact. assert.Equal( @@ -332,8 +332,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { bundletest.SetLocation(b, "resources.jobs", filepath.Join(dir, "job/resource.yml")) bundletest.SetLocation(b, "resources.pipelines", filepath.Join(dir, "pipeline/resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + require.NoError(t, diags.Error()) assert.Equal( t, @@ -392,8 +392,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "../resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.ErrorContains(t, err, "is not contained in bundle root") + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.ErrorContains(t, diags.Error(), "is not contained in bundle root") } func TestJobNotebookDoesNotExistError(t *testing.T) { @@ -422,8 +422,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.EqualError(t, diags.Error(), "notebook ./doesnt_exist.py not found") } func TestJobFileDoesNotExistError(t *testing.T) { @@ -452,8 +452,8 @@ func TestJobFileDoesNotExistError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.EqualError(t, err, "file ./doesnt_exist.py not found") + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.EqualError(t, diags.Error(), "file ./doesnt_exist.py not found") } func TestPipelineNotebookDoesNotExistError(t *testing.T) { @@ -482,8 +482,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.EqualError(t, diags.Error(), "notebook ./doesnt_exist.py not found") } func TestPipelineFileDoesNotExistError(t *testing.T) { @@ -512,8 +512,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.EqualError(t, err, "file ./doesnt_exist.py not found") + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.EqualError(t, diags.Error(), "file ./doesnt_exist.py not found") } func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { @@ -546,8 +546,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.ErrorContains(t, err, `expected a file for "resources.jobs.job.tasks[0].spark_python_task.python_file" but got a notebook`) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.ErrorContains(t, diags.Error(), `expected a file for "resources.jobs.job.tasks[0].spark_python_task.python_file" but got a notebook`) } func TestJobNotebookTaskWithFileSourceError(t *testing.T) { @@ -580,8 +580,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.ErrorContains(t, err, `expected a notebook for "resources.jobs.job.tasks[0].notebook_task.notebook_path" but got a file`) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.ErrorContains(t, diags.Error(), `expected a notebook for "resources.jobs.job.tasks[0].notebook_task.notebook_path" but got a file`) } func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { @@ -614,8 +614,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.ErrorContains(t, err, `expected a notebook for "resources.pipelines.pipeline.libraries[0].notebook.path" but got a file`) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.ErrorContains(t, diags.Error(), `expected a notebook for "resources.pipelines.pipeline.libraries[0].notebook.path" but got a file`) } func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { @@ -648,6 +648,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) - err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) - assert.ErrorContains(t, err, `expected a file for "resources.pipelines.pipeline.libraries[0].file.path" but got a notebook`) + diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) + assert.ErrorContains(t, diags.Error(), `expected a file for "resources.pipelines.pipeline.libraries[0].file.path" but got a notebook`) } diff --git a/bundle/config/mutator/validate_git_details.go b/bundle/config/mutator/validate_git_details.go index 116498bfc1..69a4221fdc 100644 --- a/bundle/config/mutator/validate_git_details.go +++ b/bundle/config/mutator/validate_git_details.go @@ -2,9 +2,9 @@ package mutator import ( "context" - "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) type validateGitDetails struct{} @@ -17,13 +17,13 @@ func (m *validateGitDetails) Name() string { return "ValidateGitDetails" } -func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if b.Config.Bundle.Git.Branch == "" || b.Config.Bundle.Git.ActualBranch == "" { return nil } if b.Config.Bundle.Git.Branch != b.Config.Bundle.Git.ActualBranch && !b.Config.Bundle.Force { - return fmt.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch) + return diag.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch) } return nil } diff --git a/bundle/config/mutator/validate_git_details_test.go b/bundle/config/mutator/validate_git_details_test.go index f207d9cf96..952e0b5721 100644 --- a/bundle/config/mutator/validate_git_details_test.go +++ b/bundle/config/mutator/validate_git_details_test.go @@ -22,9 +22,8 @@ func TestValidateGitDetailsMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := bundle.Apply(context.Background(), b, m) - - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + assert.NoError(t, diags.Error()) } func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { @@ -40,10 +39,10 @@ func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := bundle.Apply(context.Background(), b, m) + diags := bundle.Apply(context.Background(), b, m) expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" - assert.EqualError(t, err, expectedError) + assert.EqualError(t, diags.Error(), expectedError) } func TestValidateGitDetailsNotUsingGit(t *testing.T) { @@ -59,7 +58,6 @@ func TestValidateGitDetailsNotUsingGit(t *testing.T) { } m := ValidateGitDetails() - err := bundle.Apply(context.Background(), b, m) - - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + assert.NoError(t, diags.Error()) } diff --git a/bundle/deferred.go b/bundle/deferred.go index 5f3351fcfd..56c2bdca21 100644 --- a/bundle/deferred.go +++ b/bundle/deferred.go @@ -3,7 +3,7 @@ package bundle import ( "context" - "github.com/databricks/cli/libs/errs" + "github.com/databricks/cli/libs/diag" ) type DeferredMutator struct { @@ -22,12 +22,9 @@ func Defer(mutator Mutator, finally Mutator) Mutator { } } -func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) error { - mainErr := Apply(ctx, b, d.mutator) - errOnFinish := Apply(ctx, b, d.finally) - if mainErr != nil || errOnFinish != nil { - return errs.FromMany(mainErr, errOnFinish) - } - - return nil +func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { + var diags diag.Diagnostics + diags = diags.Extend(Apply(ctx, b, d.mutator)) + diags = diags.Extend(Apply(ctx, b, d.finally)) + return diags } diff --git a/bundle/deferred_test.go b/bundle/deferred_test.go index f75867d696..3abc4aa102 100644 --- a/bundle/deferred_test.go +++ b/bundle/deferred_test.go @@ -2,9 +2,9 @@ package bundle import ( "context" - "fmt" "testing" + "github.com/databricks/cli/libs/diag" "github.com/stretchr/testify/assert" ) @@ -17,9 +17,9 @@ func (t *mutatorWithError) Name() string { return "mutatorWithError" } -func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) error { +func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics { t.applyCalled++ - return fmt.Errorf(t.errorMsg) + return diag.Errorf(t.errorMsg) } func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { @@ -30,8 +30,8 @@ func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { deferredMutator := Defer(Seq(m1, m2, m3), cleanup) b := &Bundle{} - err := Apply(context.Background(), b, deferredMutator) - assert.NoError(t, err) + diags := Apply(context.Background(), b, deferredMutator) + assert.NoError(t, diags.Error()) assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) @@ -47,8 +47,8 @@ func TestDeferredMutatorWhenFirstFails(t *testing.T) { deferredMutator := Defer(Seq(mErr, m1, m2), cleanup) b := &Bundle{} - err := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, err, "mutator error occurred") + diags := Apply(context.Background(), b, deferredMutator) + assert.ErrorContains(t, diags.Error(), "mutator error occurred") assert.Equal(t, 1, mErr.applyCalled) assert.Equal(t, 0, m1.applyCalled) @@ -64,8 +64,8 @@ func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) { deferredMutator := Defer(Seq(m1, mErr, m2), cleanup) b := &Bundle{} - err := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, err, "mutator error occurred") + diags := Apply(context.Background(), b, deferredMutator) + assert.ErrorContains(t, diags.Error(), "mutator error occurred") assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, mErr.applyCalled) @@ -81,8 +81,8 @@ func TestDeferredMutatorWhenLastOneFails(t *testing.T) { deferredMutator := Defer(Seq(m1, m2, mErr), cleanup) b := &Bundle{} - err := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, err, "mutator error occurred") + diags := Apply(context.Background(), b, deferredMutator) + assert.ErrorContains(t, diags.Error(), "mutator error occurred") assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) @@ -98,8 +98,14 @@ func TestDeferredMutatorCombinesErrorMessages(t *testing.T) { deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr) b := &Bundle{} - err := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, err, "mutator error occurred\ncleanup error occurred") + diags := Apply(context.Background(), b, deferredMutator) + + var errs []string + for _, d := range diags { + errs = append(errs, d.Summary) + } + assert.Contains(t, errs, "mutator error occurred") + assert.Contains(t, errs, "cleanup error occurred") assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) diff --git a/bundle/deploy/check_running_resources.go b/bundle/deploy/check_running_resources.go index deb7775c6b..7f7a9bcace 100644 --- a/bundle/deploy/check_running_resources.go +++ b/bundle/deploy/check_running_resources.go @@ -6,6 +6,7 @@ import ( "strconv" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -30,29 +31,29 @@ func (l *checkRunningResources) Name() string { return "check-running-resources" } -func (l *checkRunningResources) Apply(ctx context.Context, b *bundle.Bundle) error { +func (l *checkRunningResources) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if !b.Config.Bundle.Deployment.FailOnActiveRuns { return nil } tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } err := tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } state, err := b.Terraform.Show(ctx) if err != nil { - return err + return diag.FromErr(err) } err = checkAnyResourceRunning(ctx, b.WorkspaceClient(), state) if err != nil { - return fmt.Errorf("deployment aborted, err: %w", err) + return diag.Errorf("deployment aborted, err: %v", err) } return nil diff --git a/bundle/deploy/files/delete.go b/bundle/deploy/files/delete.go index 8585ec3c84..9367e2a624 100644 --- a/bundle/deploy/files/delete.go +++ b/bundle/deploy/files/delete.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/fatih/color" ) @@ -16,7 +17,7 @@ func (m *delete) Name() string { return "files.Delete" } -func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Do not delete files if terraform destroy was not consented if !b.Plan.IsEmpty && !b.Plan.ConfirmApply { return nil @@ -29,7 +30,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { if !b.AutoApprove { proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) if err != nil { - return err + return diag.FromErr(err) } if !proceed { return nil @@ -41,17 +42,17 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { Recursive: true, }) if err != nil { - return err + return diag.FromErr(err) } // Clean up sync snapshot file sync, err := GetSync(ctx, b) if err != nil { - return err + return diag.FromErr(err) } err = sync.DestroySnapshot(ctx) if err != nil { - return err + return diag.FromErr(err) } cmdio.LogString(ctx, fmt.Sprintf("Deleted snapshot file at %s", sync.SnapshotPath())) diff --git a/bundle/deploy/files/upload.go b/bundle/deploy/files/upload.go index 4da41e202e..58cb3c0f03 100644 --- a/bundle/deploy/files/upload.go +++ b/bundle/deploy/files/upload.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -15,16 +16,16 @@ func (m *upload) Name() string { return "files.Upload" } -func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { cmdio.LogString(ctx, fmt.Sprintf("Uploading bundle files to %s...", b.Config.Workspace.FilePath)) sync, err := GetSync(ctx, b) if err != nil { - return err + return diag.FromErr(err) } err = sync.RunOnce(ctx) if err != nil { - return err + return diag.FromErr(err) } log.Infof(ctx, "Uploaded bundle files") diff --git a/bundle/deploy/lock/acquire.go b/bundle/deploy/lock/acquire.go index 69e6663fc7..7d3d0eca85 100644 --- a/bundle/deploy/lock/acquire.go +++ b/bundle/deploy/lock/acquire.go @@ -3,9 +3,9 @@ package lock import ( "context" "errors" - "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/log" @@ -33,7 +33,7 @@ func (m *acquire) init(b *bundle.Bundle) error { return nil } -func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Return early if locking is disabled. if !b.Config.Bundle.Deployment.Lock.IsEnabled() { log.Infof(ctx, "Skipping; locking is disabled") @@ -42,7 +42,7 @@ func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error { err := m.init(b) if err != nil { - return err + return diag.FromErr(err) } force := b.Config.Bundle.Deployment.Lock.Force @@ -55,9 +55,9 @@ func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error { if errors.As(err, ¬ExistsError) { // If we get a "doesn't exist" error from the API this indicates // we either don't have permissions or the path is invalid. - return fmt.Errorf("cannot write to deployment root (this can indicate a previous deploy was done with a different identity): %s", b.Config.Workspace.RootPath) + return diag.Errorf("cannot write to deployment root (this can indicate a previous deploy was done with a different identity): %s", b.Config.Workspace.RootPath) } - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/lock/release.go b/bundle/deploy/lock/release.go index 4ea47c2f97..26f95edfc9 100644 --- a/bundle/deploy/lock/release.go +++ b/bundle/deploy/lock/release.go @@ -2,9 +2,9 @@ package lock import ( "context" - "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/log" ) @@ -30,7 +30,7 @@ func (m *release) Name() string { return "lock:release" } -func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *release) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Return early if locking is disabled. if !b.Config.Bundle.Deployment.Lock.IsEnabled() { log.Infof(ctx, "Skipping; locking is disabled") @@ -47,12 +47,12 @@ func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error { log.Infof(ctx, "Releasing deployment lock") switch m.goal { case GoalDeploy: - return b.Locker.Unlock(ctx) + return diag.FromErr(b.Locker.Unlock(ctx)) case GoalBind, GoalUnbind: - return b.Locker.Unlock(ctx) + return diag.FromErr(b.Locker.Unlock(ctx)) case GoalDestroy: - return b.Locker.Unlock(ctx, locker.AllowLockFileNotExist) + return diag.FromErr(b.Locker.Unlock(ctx, locker.AllowLockFileNotExist)) default: - return fmt.Errorf("unknown goal for lock release: %s", m.goal) + return diag.Errorf("unknown goal for lock release: %s", m.goal) } } diff --git a/bundle/deploy/metadata/annotate_jobs.go b/bundle/deploy/metadata/annotate_jobs.go index 5b9ae5b885..372cbca13a 100644 --- a/bundle/deploy/metadata/annotate_jobs.go +++ b/bundle/deploy/metadata/annotate_jobs.go @@ -5,6 +5,7 @@ import ( "path" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -18,7 +19,7 @@ func (m *annotateJobs) Name() string { return "metadata.AnnotateJobs" } -func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { for _, job := range b.Config.Resources.Jobs { if job.JobSettings == nil { continue diff --git a/bundle/deploy/metadata/annotate_jobs_test.go b/bundle/deploy/metadata/annotate_jobs_test.go index c7a02e754c..8f2ab9c034 100644 --- a/bundle/deploy/metadata/annotate_jobs_test.go +++ b/bundle/deploy/metadata/annotate_jobs_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestAnnotateJobsMutator(t *testing.T) { @@ -34,8 +35,8 @@ func TestAnnotateJobsMutator(t *testing.T) { }, } - err := AnnotateJobs().Apply(context.Background(), b) - assert.NoError(t, err) + diags := AnnotateJobs().Apply(context.Background(), b) + require.NoError(t, diags.Error()) assert.Equal(t, &jobs.JobDeployment{ @@ -67,6 +68,6 @@ func TestAnnotateJobsMutatorJobWithoutSettings(t *testing.T) { }, } - err := AnnotateJobs().Apply(context.Background(), b) - assert.NoError(t, err) + diags := AnnotateJobs().Apply(context.Background(), b) + require.NoError(t, diags.Error()) } diff --git a/bundle/deploy/metadata/compute.go b/bundle/deploy/metadata/compute.go index c612d33a3f..5a46cd67f8 100644 --- a/bundle/deploy/metadata/compute.go +++ b/bundle/deploy/metadata/compute.go @@ -2,12 +2,12 @@ package metadata import ( "context" - "fmt" "path/filepath" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/metadata" + "github.com/databricks/cli/libs/diag" ) type compute struct{} @@ -20,7 +20,7 @@ func (m *compute) Name() string { return "metadata.Compute" } -func (m *compute) Apply(_ context.Context, b *bundle.Bundle) error { +func (m *compute) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { b.Metadata = metadata.Metadata{ Version: metadata.Version, Config: metadata.Config{}, @@ -41,7 +41,7 @@ func (m *compute) Apply(_ context.Context, b *bundle.Bundle) error { // root relativePath, err := filepath.Rel(b.Config.Path, job.ConfigFilePath) if err != nil { - return fmt.Errorf("failed to compute relative path for job %s: %w", name, err) + return diag.Errorf("failed to compute relative path for job %s: %v", name, err) } // Metadata for the job jobsMetadata[name] = &metadata.Job{ diff --git a/bundle/deploy/metadata/compute_test.go b/bundle/deploy/metadata/compute_test.go index e717ebd53e..6d43f845b1 100644 --- a/bundle/deploy/metadata/compute_test.go +++ b/bundle/deploy/metadata/compute_test.go @@ -91,8 +91,8 @@ func TestComputeMetadataMutator(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, Compute()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, Compute()) + require.NoError(t, diags.Error()) assert.Equal(t, expectedMetadata, b.Metadata) } diff --git a/bundle/deploy/metadata/upload.go b/bundle/deploy/metadata/upload.go index f550a66e72..a040a0ae8f 100644 --- a/bundle/deploy/metadata/upload.go +++ b/bundle/deploy/metadata/upload.go @@ -6,6 +6,7 @@ import ( "encoding/json" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" ) @@ -21,16 +22,16 @@ func (m *upload) Name() string { return "metadata.Upload" } -func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.StatePath) if err != nil { - return err + return diag.FromErr(err) } metadata, err := json.MarshalIndent(b.Metadata, "", " ") if err != nil { - return err + return diag.FromErr(err) } - return f.Write(ctx, MetadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists) + return diag.FromErr(f.Write(ctx, MetadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists)) } diff --git a/bundle/deploy/state_pull.go b/bundle/deploy/state_pull.go index 089a870cb0..61f5426a09 100644 --- a/bundle/deploy/state_pull.go +++ b/bundle/deploy/state_pull.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy/files" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/sync" @@ -20,10 +21,10 @@ type statePull struct { filerFactory FilerFactory } -func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { +func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { f, err := s.filerFactory(b) if err != nil { - return err + return diag.FromErr(err) } // Download deployment state file from filer to local cache directory. @@ -31,7 +32,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { remote, err := s.remoteState(ctx, f) if err != nil { log.Infof(ctx, "Unable to open remote deployment state file: %s", err) - return err + return diag.FromErr(err) } if remote == nil { log.Infof(ctx, "Remote deployment state file does not exist") @@ -40,19 +41,19 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { statePath, err := getPathToStateFile(ctx, b) if err != nil { - return err + return diag.FromErr(err) } local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600) if err != nil { - return err + return diag.FromErr(err) } defer local.Close() data := remote.Bytes() err = validateRemoteStateCompatibility(bytes.NewReader(data)) if err != nil { - return err + return diag.FromErr(err) } if !isLocalStateStale(local, bytes.NewReader(data)) { @@ -68,30 +69,30 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { log.Infof(ctx, "Writing remote deployment state file to local cache directory") _, err = io.Copy(local, bytes.NewReader(data)) if err != nil { - return err + return diag.FromErr(err) } var state DeploymentState err = json.Unmarshal(data, &state) if err != nil { - return err + return diag.FromErr(err) } // Create a new snapshot based on the deployment state file. opts, err := files.GetSyncOptions(ctx, b) if err != nil { - return err + return diag.FromErr(err) } log.Infof(ctx, "Creating new snapshot") snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.Config.Path), opts) if err != nil { - return err + return diag.FromErr(err) } // Persist the snapshot to disk. log.Infof(ctx, "Persisting snapshot to disk") - return snapshot.Save(ctx) + return diag.FromErr(snapshot.Save(ctx)) } func (s *statePull) remoteState(ctx context.Context, f filer.Filer) (*bytes.Buffer, error) { diff --git a/bundle/deploy/state_pull_test.go b/bundle/deploy/state_pull_test.go index 50eb90916d..9716a1e04b 100644 --- a/bundle/deploy/state_pull_test.go +++ b/bundle/deploy/state_pull_test.go @@ -106,8 +106,8 @@ func testStatePull(t *testing.T, opts statePullOpts) { require.NoError(t, err) } - err := bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) // Check that deployment state was written statePath, err := getPathToStateFile(ctx, b) @@ -263,8 +263,8 @@ func TestStatePullNoState(t *testing.T) { } ctx := context.Background() - err := bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) // Check that deployment state was not written statePath, err := getPathToStateFile(ctx, b) @@ -451,7 +451,7 @@ func TestStatePullNewerDeploymentStateVersion(t *testing.T) { } ctx := context.Background() - err := bundle.Apply(ctx, b, s) - require.Error(t, err) - require.Contains(t, err.Error(), "remote deployment state is incompatible with the current version of the CLI, please upgrade to at least 1.2.3") + diags := bundle.Apply(ctx, b, s) + require.True(t, diags.HasError()) + require.ErrorContains(t, diags.Error(), "remote deployment state is incompatible with the current version of the CLI, please upgrade to at least 1.2.3") } diff --git a/bundle/deploy/state_push.go b/bundle/deploy/state_push.go index 8818d0a73b..176a907c8d 100644 --- a/bundle/deploy/state_push.go +++ b/bundle/deploy/state_push.go @@ -5,6 +5,7 @@ import ( "os" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" ) @@ -17,27 +18,27 @@ func (s *statePush) Name() string { return "deploy:state-push" } -func (s *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { +func (s *statePush) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { f, err := s.filerFactory(b) if err != nil { - return err + return diag.FromErr(err) } statePath, err := getPathToStateFile(ctx, b) if err != nil { - return err + return diag.FromErr(err) } local, err := os.Open(statePath) if err != nil { - return err + return diag.FromErr(err) } defer local.Close() log.Infof(ctx, "Writing local deployment state file to remote state directory") err = f.Write(ctx, DeploymentStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/state_push_test.go b/bundle/deploy/state_push_test.go index 37b865ecba..c6d9f88f5a 100644 --- a/bundle/deploy/state_push_test.go +++ b/bundle/deploy/state_push_test.go @@ -77,6 +77,6 @@ func TestStatePush(t *testing.T) { err = os.WriteFile(statePath, data, 0644) require.NoError(t, err) - err = bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) } diff --git a/bundle/deploy/state_update.go b/bundle/deploy/state_update.go index 0ae61a6e2b..cf2e9ac9ee 100644 --- a/bundle/deploy/state_update.go +++ b/bundle/deploy/state_update.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/internal/build" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -21,10 +22,10 @@ func (s *stateUpdate) Name() string { return "deploy:state-update" } -func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) error { +func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { state, err := load(ctx, b) if err != nil { - return err + return diag.FromErr(err) } // Increment the state sequence. @@ -40,41 +41,41 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) error { // Get the current file list. sync, err := files.GetSync(ctx, b) if err != nil { - return err + return diag.FromErr(err) } files, err := sync.GetFileList(ctx) if err != nil { - return err + return diag.FromErr(err) } // Update the state with the current file list. fl, err := FromSlice(files) if err != nil { - return err + return diag.FromErr(err) } state.Files = fl statePath, err := getPathToStateFile(ctx, b) if err != nil { - return err + return diag.FromErr(err) } // Write the state back to the file. f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600) if err != nil { log.Infof(ctx, "Unable to open deployment state file: %s", err) - return err + return diag.FromErr(err) } defer f.Close() data, err := json.Marshal(state) if err != nil { - return err + return diag.FromErr(err) } _, err = io.Copy(f, bytes.NewReader(data)) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/state_update_test.go b/bundle/deploy/state_update_test.go index 5e16dd0087..73b7fe4b34 100644 --- a/bundle/deploy/state_update_test.go +++ b/bundle/deploy/state_update_test.go @@ -55,8 +55,8 @@ func TestStateUpdate(t *testing.T) { ctx := context.Background() - err := bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) // Check that the state file was updated. state, err := load(ctx, b) @@ -66,8 +66,8 @@ func TestStateUpdate(t *testing.T) { require.Len(t, state.Files, 3) require.Equal(t, build.GetInfo().Version, state.CliVersion) - err = bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags = bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) // Check that the state file was updated again. state, err = load(ctx, b) @@ -136,8 +136,8 @@ func TestStateUpdateWithExistingState(t *testing.T) { err = os.WriteFile(statePath, data, 0644) require.NoError(t, err) - err = bundle.Apply(ctx, b, s) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, s) + require.NoError(t, diags.Error()) // Check that the state file was updated. state, err = load(ctx, b) diff --git a/bundle/deploy/terraform/apply.go b/bundle/deploy/terraform/apply.go index 117cdfc18c..e4acda852f 100644 --- a/bundle/deploy/terraform/apply.go +++ b/bundle/deploy/terraform/apply.go @@ -2,10 +2,10 @@ package terraform import ( "context" - "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -16,22 +16,22 @@ func (w *apply) Name() string { return "terraform.Apply" } -func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) error { +func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } cmdio.LogString(ctx, "Deploying resources...") err := tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } err = tf.Apply(ctx) if err != nil { - return fmt.Errorf("terraform apply: %w", err) + return diag.Errorf("terraform apply: %v", err) } log.Infof(ctx, "Resource deployment completed") diff --git a/bundle/deploy/terraform/destroy.go b/bundle/deploy/terraform/destroy.go index 0b3baba3bc..16f074a222 100644 --- a/bundle/deploy/terraform/destroy.go +++ b/bundle/deploy/terraform/destroy.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/fatih/color" "github.com/hashicorp/terraform-exec/tfexec" tfjson "github.com/hashicorp/terraform-json" @@ -62,7 +63,7 @@ func (w *destroy) Name() string { return "terraform.Destroy" } -func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { +func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // return early if plan is empty if b.Plan.IsEmpty { cmdio.LogString(ctx, "No resources to destroy in plan. Skipping destroy!") @@ -71,19 +72,19 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } // read plan file plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) if err != nil { - return err + return diag.FromErr(err) } // print the resources that will be destroyed err = logDestroyPlan(ctx, plan.ResourceChanges) if err != nil { - return err + return diag.FromErr(err) } // Ask for confirmation, if needed @@ -91,7 +92,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { red := color.New(color.FgRed).SprintFunc() b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) if err != nil { - return err + return diag.FromErr(err) } } @@ -101,7 +102,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { } if b.Plan.Path == "" { - return fmt.Errorf("no plan found") + return diag.Errorf("no plan found") } cmdio.LogString(ctx, "Starting to destroy resources") @@ -109,7 +110,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { // Apply terraform according to the computed destroy plan err = tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.Path)) if err != nil { - return fmt.Errorf("terraform destroy: %w", err) + return diag.Errorf("terraform destroy: %v", err) } cmdio.LogString(ctx, "Successfully destroyed resources!") diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index 5fc436f201..7c1a681583 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -25,31 +26,31 @@ type importResource struct { } // Apply implements bundle.Mutator. -func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { dir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } err = tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } tmpDir, err := os.MkdirTemp("", "state-*") if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } tmpState := filepath.Join(tmpDir, TerraformStateFileName) importAddress := fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey) err = tf.Import(ctx, importAddress, m.opts.ResourceId, tfexec.StateOut(tmpState)) if err != nil { - return fmt.Errorf("terraform import: %w", err) + return diag.Errorf("terraform import: %v", err) } buf := bytes.NewBuffer(nil) @@ -58,7 +59,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { //lint:ignore SA1019 We use legacy -state flag for now to plan the import changes based on temporary state file changed, err := tf.Plan(ctx, tfexec.State(tmpState), tfexec.Target(importAddress)) if err != nil { - return fmt.Errorf("terraform plan: %w", err) + return diag.Errorf("terraform plan: %v", err) } defer os.RemoveAll(tmpDir) @@ -70,29 +71,29 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { cmdio.LogString(ctx, output) ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") if err != nil { - return err + return diag.FromErr(err) } if !ans { - return fmt.Errorf("import aborted") + return diag.Errorf("import aborted") } } // If user confirmed changes, move the state file from temp dir to state location f, err := os.Create(filepath.Join(dir, TerraformStateFileName)) if err != nil { - return err + return diag.FromErr(err) } defer f.Close() tmpF, err := os.Open(tmpState) if err != nil { - return err + return diag.FromErr(err) } defer tmpF.Close() _, err = io.Copy(f, tmpF) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 503a1db24f..ca1fc8caf0 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -12,6 +12,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/log" "github.com/hashicorp/go-version" @@ -151,7 +152,7 @@ func setProxyEnvVars(ctx context.Context, environ map[string]string, b *bundle.B return nil } -func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tfConfig := b.Config.Bundle.Terraform if tfConfig == nil { tfConfig = &config.Terraform{} @@ -160,46 +161,46 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { execPath, err := m.findExecPath(ctx, b, tfConfig) if err != nil { - return err + return diag.FromErr(err) } workingDir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } tf, err := tfexec.NewTerraform(workingDir, execPath) if err != nil { - return err + return diag.FromErr(err) } environ, err := b.AuthEnv() if err != nil { - return err + return diag.FromErr(err) } err = inheritEnvVars(ctx, environ) if err != nil { - return err + return diag.FromErr(err) } // Set the temporary directory environment variables err = setTempDirEnvVars(ctx, environ, b) if err != nil { - return err + return diag.FromErr(err) } // Set the proxy related environment variables err = setProxyEnvVars(ctx, environ, b) if err != nil { - return err + return diag.FromErr(err) } // Configure environment variables for auth for Terraform to use. log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", ")) err = tf.SetEnv(environ) if err != nil { - return err + return diag.FromErr(err) } b.Terraform = tf diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 4b00e18e47..bbef7f0f79 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -45,8 +45,8 @@ func TestInitEnvironmentVariables(t *testing.T) { t.Setenv("DATABRICKS_TOKEN", "foobar") b.WorkspaceClient() - err = bundle.Apply(context.Background(), b, Initialize()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, Initialize()) + require.NoError(t, diags.Error()) } func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index 525a38fa88..358279a7af 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/dynvar" ) @@ -20,8 +21,8 @@ func (m *interpolateMutator) Name() string { return "terraform.Interpolate" } -func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { +func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { prefix := dyn.MustPathFromString("resources") // Resolve variable references in all values. @@ -61,4 +62,6 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error return dyn.V(fmt.Sprintf("${%s}", path.String())), nil }) }) + + return diag.FromErr(err) } diff --git a/bundle/deploy/terraform/interpolate_test.go b/bundle/deploy/terraform/interpolate_test.go index be905ad772..9af4a1443c 100644 --- a/bundle/deploy/terraform/interpolate_test.go +++ b/bundle/deploy/terraform/interpolate_test.go @@ -55,8 +55,8 @@ func TestInterpolate(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, Interpolate()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, Interpolate()) + require.NoError(t, diags.Error()) j := b.Config.Resources.Jobs["my_job"] assert.Equal(t, "${databricks_pipeline.other_pipeline.id}", j.Tags["other_pipeline"]) @@ -87,6 +87,6 @@ func TestInterpolateUnknownResourceType(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, Interpolate()) - assert.Contains(t, err.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`) + diags := bundle.Apply(context.Background(), b, Interpolate()) + assert.ErrorContains(t, diags.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`) } diff --git a/bundle/deploy/terraform/load.go b/bundle/deploy/terraform/load.go index 624bf7a50a..fa0cd5b4f6 100644 --- a/bundle/deploy/terraform/load.go +++ b/bundle/deploy/terraform/load.go @@ -6,6 +6,7 @@ import ( "slices" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/hashicorp/terraform-exec/tfexec" tfjson "github.com/hashicorp/terraform-json" ) @@ -22,31 +23,31 @@ func (l *load) Name() string { return "terraform.Load" } -func (l *load) Apply(ctx context.Context, b *bundle.Bundle) error { +func (l *load) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } err := tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } state, err := b.Terraform.Show(ctx) if err != nil { - return err + return diag.FromErr(err) } err = l.validateState(state) if err != nil { - return err + return diag.FromErr(err) } // Merge state into configuration. err = TerraformToBundle(state, &b.Config) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/terraform/load_test.go b/bundle/deploy/terraform/load_test.go index aeaffa14e9..a912c52133 100644 --- a/bundle/deploy/terraform/load_test.go +++ b/bundle/deploy/terraform/load_test.go @@ -32,10 +32,10 @@ func TestLoadWithNoState(t *testing.T) { t.Setenv("DATABRICKS_TOKEN", "foobar") b.WorkspaceClient() - err = bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( Initialize(), Load(ErrorOnEmptyState), )) - require.ErrorContains(t, err, "Did you forget to run 'databricks bundle deploy'") + require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'") } diff --git a/bundle/deploy/terraform/plan.go b/bundle/deploy/terraform/plan.go index ff841148cd..50e0f78ca2 100644 --- a/bundle/deploy/terraform/plan.go +++ b/bundle/deploy/terraform/plan.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/terraform" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -26,30 +27,30 @@ func (p *plan) Name() string { return "terraform.Plan" } -func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) error { +func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } cmdio.LogString(ctx, "Starting plan computation") err := tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } // Persist computed plan tfDir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } planPath := filepath.Join(tfDir, "plan") destroy := p.goal == PlanDestroy notEmpty, err := tf.Plan(ctx, tfexec.Destroy(destroy), tfexec.Out(planPath)) if err != nil { - return err + return diag.FromErr(err) } // Set plan in main bundle struct for downstream mutators diff --git a/bundle/deploy/terraform/state_pull.go b/bundle/deploy/terraform/state_pull.go index 045222ae02..cc7d342747 100644 --- a/bundle/deploy/terraform/state_pull.go +++ b/bundle/deploy/terraform/state_pull.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" ) @@ -45,15 +46,15 @@ func (l *statePull) remoteState(ctx context.Context, f filer.Filer) (*bytes.Buff return &buf, nil } -func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { +func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { f, err := l.filerFactory(b) if err != nil { - return err + return diag.FromErr(err) } dir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } // Download state file from filer to local cache directory. @@ -61,7 +62,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { remote, err := l.remoteState(ctx, f) if err != nil { log.Infof(ctx, "Unable to open remote state file: %s", err) - return err + return diag.FromErr(err) } if remote == nil { log.Infof(ctx, "Remote state file does not exist") @@ -71,7 +72,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { // Expect the state file to live under dir. local, err := os.OpenFile(filepath.Join(dir, TerraformStateFileName), os.O_CREATE|os.O_RDWR, 0600) if err != nil { - return err + return diag.FromErr(err) } defer local.Close() @@ -88,7 +89,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { log.Infof(ctx, "Writing remote state file to local cache directory") _, err = io.Copy(local, bytes.NewReader(remote.Bytes())) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/terraform/state_pull_test.go b/bundle/deploy/terraform/state_pull_test.go index b7734a10fd..805b5af0fc 100644 --- a/bundle/deploy/terraform/state_pull_test.go +++ b/bundle/deploy/terraform/state_pull_test.go @@ -15,12 +15,11 @@ import ( "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" ) func mockStateFilerForPull(t *testing.T, contents map[string]int, merr error) filer.Filer { buf, err := json.Marshal(contents) - require.NoError(t, err) + assert.NoError(t, err) f := mockfiler.NewMockFiler(t) f. @@ -49,11 +48,11 @@ func TestStatePullLocalMissingRemoteMissing(t *testing.T) { ctx := context.Background() b := statePullTestBundle(t) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) // Confirm that no local state file has been written. - _, err = os.Stat(localStateFile(t, ctx, b)) + _, err := os.Stat(localStateFile(t, ctx, b)) assert.ErrorIs(t, err, fs.ErrNotExist) } @@ -64,8 +63,8 @@ func TestStatePullLocalMissingRemotePresent(t *testing.T) { ctx := context.Background() b := statePullTestBundle(t) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) // Confirm that the local state file has been updated. localState := readLocalState(t, ctx, b) @@ -82,8 +81,8 @@ func TestStatePullLocalStale(t *testing.T) { // Write a stale local state file. writeLocalState(t, ctx, b, map[string]int{"serial": 4}) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) // Confirm that the local state file has been updated. localState := readLocalState(t, ctx, b) @@ -100,8 +99,8 @@ func TestStatePullLocalEqual(t *testing.T) { // Write a local state file with the same serial as the remote. writeLocalState(t, ctx, b, map[string]int{"serial": 5}) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) // Confirm that the local state file has not been updated. localState := readLocalState(t, ctx, b) @@ -118,8 +117,8 @@ func TestStatePullLocalNewer(t *testing.T) { // Write a local state file with a newer serial as the remote. writeLocalState(t, ctx, b, map[string]int{"serial": 6}) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) // Confirm that the local state file has not been updated. localState := readLocalState(t, ctx, b) diff --git a/bundle/deploy/terraform/state_push.go b/bundle/deploy/terraform/state_push.go index f701db87d1..b50983bd4b 100644 --- a/bundle/deploy/terraform/state_push.go +++ b/bundle/deploy/terraform/state_push.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" ) @@ -20,21 +21,21 @@ func (l *statePush) Name() string { return "terraform:state-push" } -func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { +func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { f, err := l.filerFactory(b) if err != nil { - return err + return diag.FromErr(err) } dir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } // Expect the state file to live under dir. local, err := os.Open(filepath.Join(dir, TerraformStateFileName)) if err != nil { - return err + return diag.FromErr(err) } defer local.Close() @@ -43,7 +44,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { log.Infof(ctx, "Writing local state file to remote state directory") err = f.Write(ctx, TerraformStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/deploy/terraform/state_push_test.go b/bundle/deploy/terraform/state_push_test.go index bd4514a5fd..41d3849000 100644 --- a/bundle/deploy/terraform/state_push_test.go +++ b/bundle/deploy/terraform/state_push_test.go @@ -56,6 +56,6 @@ func TestStatePush(t *testing.T) { // Write a stale local state file. writeLocalState(t, ctx, b, map[string]int{"serial": 4}) - err := bundle.Apply(ctx, b, m) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + assert.NoError(t, diags.Error()) } diff --git a/bundle/deploy/terraform/unbind.go b/bundle/deploy/terraform/unbind.go index 74e15e1844..49d65615ed 100644 --- a/bundle/deploy/terraform/unbind.go +++ b/bundle/deploy/terraform/unbind.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -13,20 +14,20 @@ type unbind struct { resourceKey string } -func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tf := b.Terraform if tf == nil { - return fmt.Errorf("terraform not initialized") + return diag.Errorf("terraform not initialized") } err := tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { - return fmt.Errorf("terraform init: %w", err) + return diag.Errorf("terraform init: %v", err) } err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey)) if err != nil { - return fmt.Errorf("terraform state rm: %w", err) + return diag.Errorf("terraform state rm: %v", err) } return nil diff --git a/bundle/deploy/terraform/write.go b/bundle/deploy/terraform/write.go index e688f6a61c..bee777ffe0 100644 --- a/bundle/deploy/terraform/write.go +++ b/bundle/deploy/terraform/write.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/internal/tf/schema" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) @@ -17,10 +18,10 @@ func (w *write) Name() string { return "terraform.Write" } -func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { +func (w *write) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { dir, err := Dir(ctx, b) if err != nil { - return err + return diag.FromErr(err) } var root *schema.Root @@ -29,12 +30,12 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { return v, err }) if err != nil { - return err + return diag.FromErr(err) } f, err := os.Create(filepath.Join(dir, TerraformConfigFileName)) if err != nil { - return err + return diag.FromErr(err) } defer f.Close() @@ -43,7 +44,7 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { enc.SetIndent("", " ") err = enc.Encode(root) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/libraries/match.go b/bundle/libraries/match.go index c8fd2baecd..d051e163c5 100644 --- a/bundle/libraries/match.go +++ b/bundle/libraries/match.go @@ -2,9 +2,9 @@ package libraries import ( "context" - "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -19,17 +19,17 @@ func (a *match) Name() string { return "libraries.MatchWithArtifacts" } -func (a *match) Apply(ctx context.Context, b *bundle.Bundle) error { +func (a *match) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { tasks := findAllTasks(b) for _, task := range tasks { if isMissingRequiredLibraries(task) { - return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) + return diag.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) } for j := range task.Libraries { lib := &task.Libraries[j] _, err := findArtifactFiles(ctx, lib, b) if err != nil { - return err + return diag.FromErr(err) } } } diff --git a/bundle/log_string.go b/bundle/log_string.go index 63800d6dfe..f14e3a3ad5 100644 --- a/bundle/log_string.go +++ b/bundle/log_string.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" ) type LogStringMutator struct { @@ -20,7 +21,7 @@ func LogString(message string) Mutator { } } -func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) error { +func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { cmdio.LogString(ctx, m.message) return nil diff --git a/bundle/mutator.go b/bundle/mutator.go index bd1615fd76..6c9968aacd 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -3,6 +3,7 @@ package bundle import ( "context" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -13,10 +14,10 @@ type Mutator interface { Name() string // Apply mutates the specified bundle object. - Apply(context.Context, *Bundle) error + Apply(context.Context, *Bundle) diag.Diagnostics } -func Apply(ctx context.Context, b *Bundle, m Mutator) error { +func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics { ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name())) log.Debugf(ctx, "Apply") @@ -24,7 +25,7 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { err := b.Config.MarkMutatorEntry(ctx) if err != nil { log.Errorf(ctx, "entry error: %s", err) - return err + return diag.Errorf("entry error: %s", err) } defer func() { @@ -34,28 +35,32 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { } }() - err = m.Apply(ctx, b) - if err != nil { + diags := m.Apply(ctx, b) + + // Log error in diagnostics if any. + // Note: errors should be logged when constructing them + // such that they are not logged multiple times. + // If this is done, we can omit this block. + if err := diags.Error(); err != nil { log.Errorf(ctx, "Error: %s", err) - return err } - return nil + return diags } type funcMutator struct { - fn func(context.Context, *Bundle) error + fn func(context.Context, *Bundle) diag.Diagnostics } func (m funcMutator) Name() string { return "" } -func (m funcMutator) Apply(ctx context.Context, b *Bundle) error { +func (m funcMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { return m.fn(ctx, b) } // ApplyFunc applies an inline-specified function mutator. -func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) error) error { +func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) diag.Diagnostics) diag.Diagnostics { return Apply(ctx, b, funcMutator{fn}) } diff --git a/bundle/mutator_test.go b/bundle/mutator_test.go index c1f3c075f8..04ff19cff5 100644 --- a/bundle/mutator_test.go +++ b/bundle/mutator_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/databricks/cli/libs/diag" "github.com/stretchr/testify/assert" ) @@ -16,7 +17,7 @@ func (t *testMutator) Name() string { return "test" } -func (t *testMutator) Apply(ctx context.Context, b *Bundle) error { +func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { t.applyCalled++ return Apply(ctx, b, Seq(t.nestedMutators...)) } @@ -35,8 +36,8 @@ func TestMutator(t *testing.T) { } b := &Bundle{} - err := Apply(context.Background(), b, m) - assert.NoError(t, err) + diags := Apply(context.Background(), b, m) + assert.NoError(t, diags.Error()) assert.Equal(t, 1, m.applyCalled) assert.Equal(t, 1, nested[0].applyCalled) diff --git a/bundle/permissions/filter.go b/bundle/permissions/filter.go index f4834a6563..6d39630c81 100644 --- a/bundle/permissions/filter.go +++ b/bundle/permissions/filter.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) @@ -59,10 +60,10 @@ func filter(currentUser string) dyn.WalkValueFunc { } } -func (m *filterCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *filterCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { currentUser := b.Config.Workspace.CurrentUser.UserName - return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { rv, err := dyn.Get(v, "resources") if err != nil { return dyn.InvalidValue, err @@ -77,4 +78,6 @@ func (m *filterCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { // Set the resources with the filtered permissions back into the bundle return dyn.Set(v, "resources", nv) }) + + return diag.FromErr(err) } diff --git a/bundle/permissions/filter_test.go b/bundle/permissions/filter_test.go index 07f5ae77db..410fa4be85 100644 --- a/bundle/permissions/filter_test.go +++ b/bundle/permissions/filter_test.go @@ -89,8 +89,8 @@ func testFixture(userName string) *bundle.Bundle { func TestFilterCurrentUser(t *testing.T) { b := testFixture("alice@databricks.com") - err := bundle.Apply(context.Background(), b, FilterCurrentUser()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, FilterCurrentUser()) + assert.NoError(t, diags.Error()) // Assert current user is filtered out. assert.Equal(t, 2, len(b.Config.Resources.Jobs["job1"].Permissions)) @@ -124,8 +124,8 @@ func TestFilterCurrentUser(t *testing.T) { func TestFilterCurrentServicePrincipal(t *testing.T) { b := testFixture("i-Robot") - err := bundle.Apply(context.Background(), b, FilterCurrentUser()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, FilterCurrentUser()) + assert.NoError(t, diags.Error()) // Assert current user is filtered out. assert.Equal(t, 2, len(b.Config.Resources.Jobs["job1"].Permissions)) @@ -169,6 +169,6 @@ func TestFilterCurrentUserDoesNotErrorWhenNoResources(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, FilterCurrentUser()) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, FilterCurrentUser()) + assert.NoError(t, diags.Error()) } diff --git a/bundle/permissions/mutator.go b/bundle/permissions/mutator.go index 54925d1c8d..7787bc0481 100644 --- a/bundle/permissions/mutator.go +++ b/bundle/permissions/mutator.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" ) const CAN_MANAGE = "CAN_MANAGE" @@ -46,10 +47,10 @@ func ApplyBundlePermissions() bundle.Mutator { return &bundlePermissions{} } -func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := validate(b) if err != nil { - return err + return diag.FromErr(err) } applyForJobs(ctx, b) diff --git a/bundle/permissions/mutator_test.go b/bundle/permissions/mutator_test.go index 62c0589d3e..438a150615 100644 --- a/bundle/permissions/mutator_test.go +++ b/bundle/permissions/mutator_test.go @@ -46,8 +46,8 @@ func TestApplyBundlePermissions(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, ApplyBundlePermissions()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ApplyBundlePermissions()) + require.NoError(t, diags.Error()) require.Len(t, b.Config.Resources.Jobs["job_1"].Permissions, 3) require.Contains(t, b.Config.Resources.Jobs["job_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) @@ -123,8 +123,8 @@ func TestWarningOnOverlapPermission(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, ApplyBundlePermissions()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ApplyBundlePermissions()) + require.NoError(t, diags.Error()) require.Contains(t, b.Config.Resources.Jobs["job_1"].Permissions, resources.Permission{Level: "CAN_VIEW", UserName: "TestUser"}) require.Contains(t, b.Config.Resources.Jobs["job_1"].Permissions, resources.Permission{Level: "CAN_VIEW", GroupName: "TestGroup"}) diff --git a/bundle/permissions/workspace_root.go b/bundle/permissions/workspace_root.go index a8eb9e278c..a59a039f6f 100644 --- a/bundle/permissions/workspace_root.go +++ b/bundle/permissions/workspace_root.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/workspace" ) @@ -16,10 +17,10 @@ func ApplyWorkspaceRootPermissions() bundle.Mutator { } // Apply implements bundle.Mutator. -func (*workspaceRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) error { +func (*workspaceRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := giveAccessForWorkspaceRoot(ctx, b) if err != nil { - return err + return diag.FromErr(err) } return nil diff --git a/bundle/permissions/workspace_root_test.go b/bundle/permissions/workspace_root_test.go index 6f03204fa6..7dd97b62d2 100644 --- a/bundle/permissions/workspace_root_test.go +++ b/bundle/permissions/workspace_root_test.go @@ -69,6 +69,6 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) { WorkspaceObjectType: "directories", }).Return(nil, nil) - err := bundle.Apply(context.Background(), b, ApplyWorkspaceRootPermissions()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, ApplyWorkspaceRootPermissions()) + require.NoError(t, diags.Error()) } diff --git a/bundle/phases/phase.go b/bundle/phases/phase.go index b594e1f624..1bb4f86a2e 100644 --- a/bundle/phases/phase.go +++ b/bundle/phases/phase.go @@ -5,6 +5,7 @@ import ( "context" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) @@ -26,7 +27,7 @@ func (p *phase) Name() string { return p.name } -func (p *phase) Apply(ctx context.Context, b *bundle.Bundle) error { +func (p *phase) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { log.Infof(ctx, "Phase: %s", p.Name()) return bundle.Apply(ctx, b, bundle.Seq(p.mutators...)) } diff --git a/bundle/python/conditional_transform_test.go b/bundle/python/conditional_transform_test.go index 4c7cad5c56..b4d7f9edb6 100644 --- a/bundle/python/conditional_transform_test.go +++ b/bundle/python/conditional_transform_test.go @@ -47,8 +47,8 @@ func TestNoTransformByDefault(t *testing.T) { } trampoline := TransformWheelTask() - err := bundle.Apply(context.Background(), b, trampoline) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, trampoline) + require.NoError(t, diags.Error()) task := b.Config.Resources.Jobs["job1"].Tasks[0] require.NotNil(t, task.PythonWheelTask) @@ -96,8 +96,8 @@ func TestTransformWithExperimentalSettingSetToTrue(t *testing.T) { } trampoline := TransformWheelTask() - err := bundle.Apply(context.Background(), b, trampoline) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, trampoline) + require.NoError(t, diags.Error()) task := b.Config.Resources.Jobs["job1"].Tasks[0] require.Nil(t, task.PythonWheelTask) diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index b6427ccd8e..729efe1a97 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -140,6 +140,6 @@ func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { }, } trampoline := TransformWheelTask() - err := bundle.Apply(context.Background(), b, trampoline) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, trampoline) + require.NoError(t, diags.Error()) } diff --git a/bundle/python/warning.go b/bundle/python/warning.go index 9b9fd8e59e..060509ad3f 100644 --- a/bundle/python/warning.go +++ b/bundle/python/warning.go @@ -2,11 +2,11 @@ package python import ( "context" - "fmt" "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" "golang.org/x/mod/semver" @@ -19,13 +19,13 @@ func WrapperWarning() bundle.Mutator { return &wrapperWarning{} } -func (m *wrapperWarning) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *wrapperWarning) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { if isPythonWheelWrapperOn(b) { return nil } if hasIncompatibleWheelTasks(ctx, b) { - return fmt.Errorf("python wheel tasks with local libraries require compute with DBR 13.1+. Please change your cluster configuration or set experimental 'python_wheel_wrapper' setting to 'true'") + return diag.Errorf("python wheel tasks with local libraries require compute with DBR 13.1+. Please change your cluster configuration or set experimental 'python_wheel_wrapper' setting to 'true'") } return nil } diff --git a/bundle/python/warning_test.go b/bundle/python/warning_test.go index c8dde59ecb..f1fdf0bcf0 100644 --- a/bundle/python/warning_test.go +++ b/bundle/python/warning_test.go @@ -101,8 +101,8 @@ func TestIncompatibleWheelTasksWithJobClusterKey(t *testing.T) { require.True(t, hasIncompatibleWheelTasks(context.Background(), b)) - err := bundle.Apply(context.Background(), b, WrapperWarning()) - require.ErrorContains(t, err, "python wheel tasks with local libraries require compute with DBR 13.1+.") + diags := bundle.Apply(context.Background(), b, WrapperWarning()) + require.ErrorContains(t, diags.Error(), "python wheel tasks with local libraries require compute with DBR 13.1+.") } func TestIncompatibleWheelTasksWithExistingClusterId(t *testing.T) { @@ -280,8 +280,8 @@ func TestNoWarningWhenPythonWheelWrapperIsOn(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, WrapperWarning()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, WrapperWarning()) + require.NoError(t, diags.Error()) } func TestSparkVersionLowerThanExpected(t *testing.T) { diff --git a/bundle/scripts/scripts.go b/bundle/scripts/scripts.go index 2f13bc19fc..f8ed7d6a38 100644 --- a/bundle/scripts/scripts.go +++ b/bundle/scripts/scripts.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/exec" "github.com/databricks/cli/libs/log" ) @@ -28,15 +29,15 @@ func (m *script) Name() string { return fmt.Sprintf("scripts.%s", m.scriptHook) } -func (m *script) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *script) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { executor, err := exec.NewCommandExecutor(b.Config.Path) if err != nil { - return err + return diag.FromErr(err) } cmd, out, err := executeHook(ctx, executor, b, m.scriptHook) if err != nil { - return err + return diag.FromErr(err) } if cmd == nil { log.Debugf(ctx, "No script defined for %s, skipping", m.scriptHook) @@ -52,7 +53,7 @@ func (m *script) Apply(ctx context.Context, b *bundle.Bundle) error { line, err = reader.ReadString('\n') } - return cmd.Wait() + return diag.FromErr(cmd.Wait()) } func executeHook(ctx context.Context, executor *exec.Executor, b *bundle.Bundle, hook config.ScriptHook) (exec.Command, io.Reader, error) { diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go index bc3202e066..fa5c239701 100644 --- a/bundle/scripts/scripts_test.go +++ b/bundle/scripts/scripts_test.go @@ -46,6 +46,6 @@ func TestExecuteMutator(t *testing.T) { }, } - err := bundle.Apply(context.Background(), b, Execute(config.ScriptPreInit)) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, Execute(config.ScriptPreInit)) + require.NoError(t, diags.Error()) } diff --git a/bundle/seq.go b/bundle/seq.go index 89e760d1f4..c1260a3f08 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -1,6 +1,10 @@ package bundle -import "context" +import ( + "context" + + "github.com/databricks/cli/libs/diag" +) type seqMutator struct { mutators []Mutator @@ -10,14 +14,15 @@ func (s *seqMutator) Name() string { return "seq" } -func (s *seqMutator) Apply(ctx context.Context, b *Bundle) error { +func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { + var diags diag.Diagnostics for _, m := range s.mutators { - err := Apply(ctx, b, m) - if err != nil { - return err + diags = diags.Extend(Apply(ctx, b, m)) + if diags.HasError() { + break } } - return nil + return diags } func Seq(ms ...Mutator) Mutator { diff --git a/bundle/seq_test.go b/bundle/seq_test.go index d5c229e3cd..74f975ed8f 100644 --- a/bundle/seq_test.go +++ b/bundle/seq_test.go @@ -14,8 +14,8 @@ func TestSeqMutator(t *testing.T) { seqMutator := Seq(m1, m2, m3) b := &Bundle{} - err := Apply(context.Background(), b, seqMutator) - assert.NoError(t, err) + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) @@ -30,8 +30,8 @@ func TestSeqWithDeferredMutator(t *testing.T) { seqMutator := Seq(m1, Defer(m2, m3), m4) b := &Bundle{} - err := Apply(context.Background(), b, seqMutator) - assert.NoError(t, err) + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) @@ -47,8 +47,8 @@ func TestSeqWithErrorAndDeferredMutator(t *testing.T) { seqMutator := Seq(errorMut, Defer(m1, m2), m3) b := &Bundle{} - err := Apply(context.Background(), b, seqMutator) - assert.Error(t, err) + diags := Apply(context.Background(), b, seqMutator) + assert.Error(t, diags.Error()) assert.Equal(t, 1, errorMut.applyCalled) assert.Equal(t, 0, m1.applyCalled) @@ -64,8 +64,8 @@ func TestSeqWithErrorInsideDeferredMutator(t *testing.T) { seqMutator := Seq(m1, Defer(errorMut, m2), m3) b := &Bundle{} - err := Apply(context.Background(), b, seqMutator) - assert.Error(t, err) + diags := Apply(context.Background(), b, seqMutator) + assert.Error(t, diags.Error()) assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, errorMut.applyCalled) @@ -81,8 +81,8 @@ func TestSeqWithErrorInsideFinallyStage(t *testing.T) { seqMutator := Seq(m1, Defer(m2, errorMut), m3) b := &Bundle{} - err := Apply(context.Background(), b, seqMutator) - assert.Error(t, err) + diags := Apply(context.Background(), b, seqMutator) + assert.Error(t, diags.Error()) assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m2.applyCalled) diff --git a/bundle/tests/bundle_permissions_test.go b/bundle/tests/bundle_permissions_test.go index 3ea9dc2e08..b55cbdd2b2 100644 --- a/bundle/tests/bundle_permissions_test.go +++ b/bundle/tests/bundle_permissions_test.go @@ -18,8 +18,9 @@ func TestBundlePermissions(t *testing.T) { assert.NotContains(t, b.Config.Permissions, resources.Permission{Level: "CAN_VIEW", ServicePrincipalName: "1234-abcd"}) assert.NotContains(t, b.Config.Permissions, resources.Permission{Level: "CAN_RUN", UserName: "bot@company.com"}) - err := bundle.Apply(context.Background(), b, permissions.ApplyBundlePermissions()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, permissions.ApplyBundlePermissions()) + require.NoError(t, diags.Error()) + pipelinePermissions := b.Config.Resources.Pipelines["nyc_taxi_pipeline"].Permissions assert.Contains(t, pipelinePermissions, resources.Permission{Level: "CAN_RUN", UserName: "test@company.com"}) assert.NotContains(t, pipelinePermissions, resources.Permission{Level: "CAN_MANAGE", GroupName: "devs"}) @@ -40,8 +41,9 @@ func TestBundlePermissionsDevTarget(t *testing.T) { assert.Contains(t, b.Config.Permissions, resources.Permission{Level: "CAN_VIEW", ServicePrincipalName: "1234-abcd"}) assert.Contains(t, b.Config.Permissions, resources.Permission{Level: "CAN_RUN", UserName: "bot@company.com"}) - err := bundle.Apply(context.Background(), b, permissions.ApplyBundlePermissions()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, permissions.ApplyBundlePermissions()) + require.NoError(t, diags.Error()) + pipelinePermissions := b.Config.Resources.Pipelines["nyc_taxi_pipeline"].Permissions assert.Contains(t, pipelinePermissions, resources.Permission{Level: "CAN_RUN", UserName: "test@company.com"}) assert.Contains(t, pipelinePermissions, resources.Permission{Level: "CAN_MANAGE", GroupName: "devs"}) diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go index 704683ad56..16dd1c33ab 100644 --- a/bundle/tests/conflicting_resource_ids_test.go +++ b/bundle/tests/conflicting_resource_ids_test.go @@ -23,18 +23,18 @@ func TestConflictingResourceIdsOneSubconfig(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration") require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") - assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) + assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) } func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations") require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml") resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml") - assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) + assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) } diff --git a/bundle/tests/git_test.go b/bundle/tests/git_test.go index c5ae83a20c..b33ffc2112 100644 --- a/bundle/tests/git_test.go +++ b/bundle/tests/git_test.go @@ -34,6 +34,6 @@ func TestGitBundleBranchValidation(t *testing.T) { assert.Equal(t, "feature-a", b.Config.Bundle.Git.Branch) assert.Equal(t, "feature-b", b.Config.Bundle.Git.ActualBranch) - err := bundle.Apply(context.Background(), b, mutator.ValidateGitDetails()) - assert.ErrorContains(t, err, "not on the right Git branch:") + diags := bundle.Apply(context.Background(), b, mutator.ValidateGitDetails()) + assert.ErrorContains(t, diags.Error(), "not on the right Git branch:") } diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index eb09d1aa05..fd8ae7198d 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -17,9 +17,9 @@ func TestIncludeInvalid(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./include_invalid") require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) - require.Error(t, err) - assert.Contains(t, err.Error(), "notexists.yml defined in 'include' section does not match any files") + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + require.Error(t, diags.Error()) + assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files") } func TestIncludeWithGlob(t *testing.T) { diff --git a/bundle/tests/interpolation_test.go b/bundle/tests/interpolation_test.go index a9659d33f8..920b9000d1 100644 --- a/bundle/tests/interpolation_test.go +++ b/bundle/tests/interpolation_test.go @@ -12,23 +12,22 @@ import ( func TestInterpolation(t *testing.T) { b := load(t, "./interpolation") - err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( + diags := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( "bundle", "workspace", )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "foo bar", b.Config.Bundle.Name) assert.Equal(t, "foo bar | bar", b.Config.Resources.Jobs["my_job"].Name) } func TestInterpolationWithTarget(t *testing.T) { b := loadTarget(t, "./interpolation_target", "development") - err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( + diags := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( "bundle", "workspace", )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "foo bar", b.Config.Bundle.Name) assert.Equal(t, "foo bar | bar | development | development", b.Config.Resources.Jobs["my_job"].Name) - } diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 3a28d822a2..228763ce92 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -13,8 +13,8 @@ func load(t *testing.T, path string) *bundle.Bundle { ctx := context.Background() b, err := bundle.Load(ctx, path) require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + require.NoError(t, diags.Error()) return b } @@ -22,14 +22,14 @@ func loadTarget(t *testing.T, path, env string) *bundle.Bundle { ctx := context.Background() b, err := bundle.Load(ctx, path) require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) - require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) + require.NoError(t, diags.Error()) + diags = bundle.Apply(ctx, b, bundle.Seq( mutator.RewriteSyncPaths(), mutator.MergeJobClusters(), mutator.MergeJobTasks(), mutator.MergePipelineClusters(), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) return b } diff --git a/bundle/tests/path_translation_test.go b/bundle/tests/path_translation_test.go index 6c33934506..05702d2a27 100644 --- a/bundle/tests/path_translation_test.go +++ b/bundle/tests/path_translation_test.go @@ -15,8 +15,8 @@ func TestPathTranslationFallback(t *testing.T) { b := loadTarget(t, "./path_translation/fallback", "development") m := mutator.TranslatePaths() - err := bundle.Apply(context.Background(), b, m) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + require.NoError(t, diags.Error()) j := b.Config.Resources.Jobs["my_job"] assert.Len(t, j.Tasks, 6) @@ -54,16 +54,16 @@ func TestPathTranslationFallbackError(t *testing.T) { b := loadTarget(t, "./path_translation/fallback", "error") m := mutator.TranslatePaths() - err := bundle.Apply(context.Background(), b, m) - assert.ErrorContains(t, err, `notebook this value is overridden not found`) + diags := bundle.Apply(context.Background(), b, m) + assert.ErrorContains(t, diags.Error(), `notebook this value is overridden not found`) } func TestPathTranslationNominal(t *testing.T) { b := loadTarget(t, "./path_translation/nominal", "development") m := mutator.TranslatePaths() - err := bundle.Apply(context.Background(), b, m) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + assert.NoError(t, diags.Error()) j := b.Config.Resources.Jobs["my_job"] assert.Len(t, j.Tasks, 8) @@ -107,6 +107,6 @@ func TestPathTranslationNominalError(t *testing.T) { b := loadTarget(t, "./path_translation/nominal", "error") m := mutator.TranslatePaths() - err := bundle.Apply(context.Background(), b, m) - assert.ErrorContains(t, err, `notebook this value is overridden not found`) + diags := bundle.Apply(context.Background(), b, m) + assert.ErrorContains(t, diags.Error(), `notebook this value is overridden not found`) } diff --git a/bundle/tests/pipeline_glob_paths_test.go b/bundle/tests/pipeline_glob_paths_test.go index 85a1379263..bf5039b5ff 100644 --- a/bundle/tests/pipeline_glob_paths_test.go +++ b/bundle/tests/pipeline_glob_paths_test.go @@ -27,8 +27,8 @@ func TestExpandPipelineGlobPaths(t *testing.T) { b.SetWorkpaceClient(m.WorkspaceClient) ctx := context.Background() - err := bundle.Apply(ctx, b, phases.Initialize()) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, phases.Initialize()) + require.NoError(t, diags.Error()) require.Equal( t, "/Users/user@domain.com/.bundle/pipeline_glob_paths/default/files/dlt/nyc_taxi_loader", @@ -50,6 +50,6 @@ func TestExpandPipelineGlobPathsWithNonExistent(t *testing.T) { b.SetWorkpaceClient(m.WorkspaceClient) ctx := context.Background() - err := bundle.Apply(ctx, b, phases.Initialize()) - require.ErrorContains(t, err, "notebook ./non-existent not found") + diags := bundle.Apply(ctx, b, phases.Initialize()) + require.ErrorContains(t, diags.Error(), "notebook ./non-existent not found") } diff --git a/bundle/tests/python_wheel_test.go b/bundle/tests/python_wheel_test.go index 8351e96ae4..c44e80a578 100644 --- a/bundle/tests/python_wheel_test.go +++ b/bundle/tests/python_wheel_test.go @@ -17,16 +17,16 @@ func TestPythonWheelBuild(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = bundle.Apply(ctx, b, m) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = bundle.Apply(ctx, b, match) - require.NoError(t, err) + diags = bundle.Apply(ctx, b, match) + require.NoError(t, diags.Error()) } func TestPythonWheelBuildAutoDetect(t *testing.T) { @@ -35,16 +35,16 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = bundle.Apply(ctx, b, m) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = bundle.Apply(ctx, b, match) - require.NoError(t, err) + diags = bundle.Apply(ctx, b, match) + require.NoError(t, diags.Error()) } func TestPythonWheelWithDBFSLib(t *testing.T) { @@ -53,12 +53,12 @@ func TestPythonWheelWithDBFSLib(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = bundle.Apply(ctx, b, m) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + require.NoError(t, diags.Error()) match := libraries.MatchWithArtifacts() - err = bundle.Apply(ctx, b, match) - require.NoError(t, err) + diags = bundle.Apply(ctx, b, match) + require.NoError(t, diags.Error()) } func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { @@ -67,12 +67,12 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = bundle.Apply(ctx, b, m) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, m) + require.NoError(t, diags.Error()) match := libraries.MatchWithArtifacts() - err = bundle.Apply(ctx, b, match) - require.ErrorContains(t, err, "./non-existing/*.whl") + diags = bundle.Apply(ctx, b, match) + require.ErrorContains(t, diags.Error(), "./non-existing/*.whl") require.NotZero(t, len(b.Config.Artifacts)) diff --git a/bundle/tests/relative_path_with_includes_test.go b/bundle/tests/relative_path_with_includes_test.go index 1d1f321d4b..6e13628be9 100644 --- a/bundle/tests/relative_path_with_includes_test.go +++ b/bundle/tests/relative_path_with_includes_test.go @@ -14,8 +14,8 @@ func TestRelativePathsWithIncludes(t *testing.T) { b := loadTarget(t, "./relative_path_with_includes", "default") m := mutator.TranslatePaths() - err := bundle.Apply(context.Background(), b, m) - assert.NoError(t, err) + diags := bundle.Apply(context.Background(), b, m) + assert.NoError(t, diags.Error()) assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path) assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path) diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 98aaf63580..321bb5130f 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/iam" "github.com/stretchr/testify/assert" ) @@ -15,7 +16,7 @@ func TestRunAsDefault(t *testing.T) { b := load(t, "./run_as") ctx := context.Background() - bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { b.Config.Workspace.CurrentUser = &config.User{ User: &iam.User{ UserName: "jane@doe.com", @@ -24,8 +25,8 @@ func TestRunAsDefault(t *testing.T) { return nil }) - err := bundle.Apply(ctx, b, mutator.SetRunAs()) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, diags.Error()) assert.Len(t, b.Config.Resources.Jobs, 3) jobs := b.Config.Resources.Jobs @@ -55,7 +56,7 @@ func TestRunAsDevelopment(t *testing.T) { b := loadTarget(t, "./run_as", "development") ctx := context.Background() - bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { b.Config.Workspace.CurrentUser = &config.User{ User: &iam.User{ UserName: "jane@doe.com", @@ -64,8 +65,8 @@ func TestRunAsDevelopment(t *testing.T) { return nil }) - err := bundle.Apply(ctx, b, mutator.SetRunAs()) - assert.NoError(t, err) + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, diags.Error()) assert.Len(t, b.Config.Resources.Jobs, 3) jobs := b.Config.Resources.Jobs diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 05314a8465..fde36344f9 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -13,97 +13,97 @@ import ( func TestVariables(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "def") b := load(t, "./variables/vanilla") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "abc def", b.Config.Bundle.Name) } func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { b := load(t, "./variables/vanilla") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") + assert.ErrorContains(t, diags.Error(), "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } func TestVariablesTargetsBlockOverride(t *testing.T) { b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-single-variable-override"), mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) } func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) } func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) } func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-missing-a-required-variable-assignment"), mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") + assert.ErrorContains(t, diags.Error(), "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-using-an-undefined-variable"), mutator.SetVariables(), mutator.ResolveVariableReferences( "variables", ), )) - assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") + assert.ErrorContains(t, diags.Error(), "variable c is not defined but is assigned a value") } func TestVariablesWithoutDefinition(t *testing.T) { t.Setenv("BUNDLE_VAR_a", "foo") t.Setenv("BUNDLE_VAR_b", "bar") b := load(t, "./variables/without_definition") - err := bundle.Apply(context.Background(), b, mutator.SetVariables()) - require.NoError(t, err) + diags := bundle.Apply(context.Background(), b, mutator.SetVariables()) + require.NoError(t, diags.Error()) require.True(t, b.Config.Variables["a"].HasValue()) require.True(t, b.Config.Variables["b"].HasValue()) assert.Equal(t, "foo", *b.Config.Variables["a"].Value) @@ -112,11 +112,11 @@ func TestVariablesWithoutDefinition(t *testing.T) { func TestVariablesWithTargetLookupOverrides(t *testing.T) { b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-overrides-lookup"), mutator.SetVariables(), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) assert.Equal(t, "cluster: some-test-cluster", b.Config.Variables["d"].Lookup.String()) assert.Equal(t, "instance-pool: some-test-instance-pool", b.Config.Variables["e"].Lookup.String()) } diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 0ba8a187a6..8b8cb9f2ec 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/diag" "github.com/spf13/cobra" ) @@ -31,7 +32,7 @@ func newDeployCommand() *cobra.Command { ctx := cmd.Context() b := bundle.Get(ctx) - bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics { b.Config.Bundle.Force = force b.Config.Bundle.Deployment.Lock.Force = forceLock if cmd.Flag("compute-id").Changed { @@ -45,11 +46,15 @@ func newDeployCommand() *cobra.Command { return nil }) - return bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Build(), phases.Deploy(), )) + if err := diags.Error(); err != nil { + return err + } + return nil } return cmd diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 184cac1d1e..11c560b12a 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/spf13/cobra" ) @@ -44,12 +45,12 @@ func newBindCommand() *cobra.Command { return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1]) } - bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics { b.Config.Bundle.Deployment.Lock.Force = forceLock return nil }) - err = bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ AutoApprove: autoApprove, @@ -58,7 +59,7 @@ func newBindCommand() *cobra.Command { ResourceId: args[1], }), )) - if err != nil { + if err := diags.Error(); err != nil { return fmt.Errorf("failed to bind the resource, err: %w", err) } diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index b5fb69200d..76727877f8 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/diag" "github.com/spf13/cobra" ) @@ -29,15 +30,19 @@ func newUnbindCommand() *cobra.Command { return err } - bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics { b.Config.Bundle.Deployment.Lock.Force = forceLock return nil }) - return bundle.Apply(cmd.Context(), b, bundle.Seq( + diags := bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Unbind(resource.TerraformResourceName(), args[0]), )) + if err := diags.Error(); err != nil { + return err + } + return nil } return cmd diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index dc5ea45f87..38b717713d 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" "golang.org/x/term" @@ -32,7 +33,7 @@ func newDestroyCommand() *cobra.Command { ctx := cmd.Context() b := bundle.Get(ctx) - bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // If `--force-lock` is specified, force acquisition of the deployment lock. b.Config.Bundle.Deployment.Lock.Force = forceDestroy @@ -57,11 +58,15 @@ func newDestroyCommand() *cobra.Command { return fmt.Errorf("please specify --auto-approve since selected logging format is json") } - return bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Build(), phases.Destroy(), )) + if err := diags.Error(); err != nil { + return err + } + return nil } return cmd diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 9b4ad5c8d3..87ea8610cc 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -35,14 +35,14 @@ func newRunCommand() *cobra.Command { ctx := cmd.Context() b := bundle.Get(ctx) - err := bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), terraform.Interpolate(), terraform.Write(), terraform.StatePull(), terraform.Load(terraform.ErrorOnEmptyState), )) - if err != nil { + if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/summary.go b/cmd/bundle/summary.go index 68354a0a23..a28ceede97 100644 --- a/cmd/bundle/summary.go +++ b/cmd/bundle/summary.go @@ -33,8 +33,8 @@ func newSummaryCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) - err := bundle.Apply(cmd.Context(), b, phases.Initialize()) - if err != nil { + diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + if err := diags.Error(); err != nil { return err } @@ -47,18 +47,18 @@ func newSummaryCommand() *cobra.Command { noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) if forcePull || noCache { - err = bundle.Apply(cmd.Context(), b, bundle.Seq( + diags = bundle.Apply(cmd.Context(), b, bundle.Seq( terraform.StatePull(), terraform.Interpolate(), terraform.Write(), )) - if err != nil { + if err := diags.Error(); err != nil { return err } } - err = bundle.Apply(cmd.Context(), b, terraform.Load()) - if err != nil { + diags = bundle.Apply(cmd.Context(), b, terraform.Load()) + if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 0b7ab44737..0b7f9b3a90 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -49,8 +49,8 @@ func newSyncCommand() *cobra.Command { b := bundle.Get(cmd.Context()) // Run initialize phase to make sure paths are set. - err := bundle.Apply(cmd.Context(), b, phases.Initialize()) - if err != nil { + diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index e900f47c38..e53a40b9d6 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/diag" "github.com/spf13/cobra" ) @@ -22,7 +23,9 @@ func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { // Initialize variables by assigning them values passed as command line flags b := bundle.Get(cmd.Context()) - return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) error { - return b.Config.InitializeVariables(variables) + diags := bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.InitializeVariables(variables) + return diag.FromErr(err) }) + return diags.Error() } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index a650fcfdef..42686b3284 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -22,8 +22,8 @@ func newValidateCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) - err := bundle.Apply(cmd.Context(), b, phases.Initialize()) - if err != nil { + diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + if err := diags.Error(); err != nil { return err } diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index edfc1f4315..6a6aeb4d2f 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/env" + "github.com/databricks/cli/libs/diag" envlib "github.com/databricks/cli/libs/env" "github.com/spf13/cobra" "golang.org/x/exp/maps" @@ -64,17 +65,17 @@ func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context profile := getProfile(cmd) if profile != "" { - err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { b.Config.Workspace.Profile = profile return nil }) - if err != nil { + if err := diags.Error(); err != nil { return nil, err } } - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) - if err != nil { + diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + if err := diags.Error(); err != nil { return nil, err } @@ -102,8 +103,8 @@ func configureBundle(cmd *cobra.Command, args []string, load func(ctx context.Co } ctx := cmd.Context() - err = bundle.Apply(ctx, b, m) - if err != nil { + diags := bundle.Apply(ctx, b, m) + if err := diags.Error(); err != nil { return err } diff --git a/internal/bundle/artifacts_test.go b/internal/bundle/artifacts_test.go index 0f3769ece3..2ced12fdd1 100644 --- a/internal/bundle/artifacts_test.go +++ b/internal/bundle/artifacts_test.go @@ -74,8 +74,8 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { }, } - err := bundle.Apply(ctx, b, artifacts.BasicUpload("test")) - require.NoError(t, err) + diags := bundle.Apply(ctx, b, artifacts.BasicUpload("test")) + require.NoError(t, diags.Error()) // The remote path attribute on the artifact file should have been set. require.Regexp(t, diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 02d2e7c176..68b4ad611b 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -32,6 +32,19 @@ func Errorf(format string, args ...any) Diagnostics { } } +// FromErr returns a new error diagnostic from the specified error, if any. +func FromErr(err error) Diagnostics { + if err == nil { + return nil + } + return []Diagnostic{ + { + Severity: Error, + Summary: err.Error(), + }, + } +} + // Warningf creates a new warning diagnostic. func Warningf(format string, args ...any) Diagnostics { return []Diagnostic{ @@ -74,3 +87,13 @@ func (ds Diagnostics) HasError() bool { } return false } + +// Return first error in the set of diagnostics. +func (ds Diagnostics) Error() error { + for _, d := range ds { + if d.Severity == Error { + return fmt.Errorf(d.Summary) + } + } + return nil +} diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index dc287440ca..cad58a5326 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -17,6 +17,7 @@ import ( "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/tags" "github.com/databricks/databricks-sdk-go" workspaceConfig "github.com/databricks/databricks-sdk-go/config" @@ -69,7 +70,7 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri require.NoError(t, err) // Apply initialize / validation mutators - bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} return nil }) @@ -79,17 +80,17 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri b.Config.Bundle.Terraform = &bundleConfig.Terraform{ ExecPath: "sh", } - err = bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( bundle.Seq(mutator.DefaultMutators()...), mutator.SelectTarget(target), phases.Initialize(), )) - require.NoError(t, err) + require.NoError(t, diags.Error()) // Apply build mutator if build { - err = bundle.Apply(ctx, b, phases.Build()) - require.NoError(t, err) + diags = bundle.Apply(ctx, b, phases.Build()) + require.NoError(t, diags.Error()) } } From e3717ba1c43cc423bac5dae8c17489cfebfbb4c3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 26 Mar 2024 08:57:48 +0100 Subject: [PATCH 08/17] Fix flaky test in `libs/process` (#1314) ## Changes The order of stdout and stderr being read into the buffer for combined output is not deterministic due to scheduling of the underlying goroutines that consume them. That's why this asserts on the contents and not the order. --- libs/process/background_test.go | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/libs/process/background_test.go b/libs/process/background_test.go index 5bf2400bcb..2ee6727a01 100644 --- a/libs/process/background_test.go +++ b/libs/process/background_test.go @@ -1,6 +1,7 @@ package process import ( + "bufio" "bytes" "context" "fmt" @@ -12,6 +13,17 @@ import ( "github.com/stretchr/testify/assert" ) +func splitLines(b []byte) (lines []string) { + scan := bufio.NewScanner(bytes.NewReader(b)) + for scan.Scan() { + line := scan.Text() + if line != "" { + lines = append(lines, line) + } + } + return lines +} + func TestBackgroundUnwrapsNotFound(t *testing.T) { ctx := context.Background() _, err := Background(ctx, []string{"/bin/meeecho", "1"}) @@ -46,7 +58,12 @@ func TestBackgroundCombinedOutput(t *testing.T) { }, WithCombinedOutput(&buf)) assert.NoError(t, err) assert.Equal(t, "2", strings.TrimSpace(res)) - assert.Equal(t, "1\n2\n", strings.ReplaceAll(buf.String(), "\r", "")) + + // The order of stdout and stderr being read into the buffer + // for combined output is not deterministic due to scheduling + // of the underlying goroutines that consume them. + // That's why this asserts on the contents and not the order. + assert.ElementsMatch(t, []string{"1", "2"}, splitLines(buf.Bytes())) } func TestBackgroundCombinedOutputFailure(t *testing.T) { @@ -66,10 +83,7 @@ func TestBackgroundCombinedOutputFailure(t *testing.T) { assert.Equal(t, "2", strings.TrimSpace(processErr.Stdout)) } assert.Equal(t, "2", strings.TrimSpace(res)) - - out := strings.ReplaceAll(buf.String(), "\r", "") - assert.Contains(t, out, "1\n") - assert.Contains(t, out, "2\n") + assert.ElementsMatch(t, []string{"1", "2"}, splitLines(buf.Bytes())) } func TestBackgroundNoStdin(t *testing.T) { From b50380471ed3661b25d7c4fe4f37ca433b8d45fe Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 26 Mar 2024 18:32:09 +0530 Subject: [PATCH 09/17] Allow unknown properties in the config file for template initialization (#1315) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes Before we would error if a property was defined in the config file, that was not defined in the schema. ## Tests Unit tests. Also manually that the e2e flow works file. Before: ``` shreyas.goenka@THW32HFW6T playground % cli bundle init default-python --config-file config.json Welcome to the default Python template for Databricks Asset Bundles! Error: failed to load config from file config.json: property include_pytho is not defined in the schema ``` After: ``` shreyas.goenka@THW32HFW6T playground % cli bundle init default-python --config-file config.json Welcome to the default Python template for Databricks Asset Bundles! Workspace to use (auto-detected, edit in 'test/databricks.yml'): https://dbc-a39a1eb1-ef95.cloud.databricks.com ✨ Your new project has been created in the 'test' directory! Please refer to the README.md file for "getting started" instructions. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. ``` --- libs/template/config.go | 13 ++++++++++++- libs/template/config_test.go | 11 +++++++++++ .../config.json | 3 ++- 3 files changed, 25 insertions(+), 2 deletions(-) diff --git a/libs/template/config.go b/libs/template/config.go index 970e74ca95..5470aefeb6 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -70,8 +70,14 @@ func validateSchema(schema *jsonschema.Schema) error { // Reads json file at path and assigns values from the file func (c *config) assignValuesFromFile(path string) error { - // Load the config file. + // It's valid to set additional properties in the config file that are not + // defined in the schema. They will be filtered below. Thus for the duration of + // the LoadInstance call, we disable the additional properties check, + // to allow those properties to be loaded. + c.schema.AdditionalProperties = true configFromFile, err := c.schema.LoadInstance(path) + c.schema.AdditionalProperties = false + if err != nil { return fmt.Errorf("failed to load config from file %s: %w", path, err) } @@ -79,6 +85,11 @@ func (c *config) assignValuesFromFile(path string) error { // Write configs from the file to the input map, not overwriting any existing // configurations. for name, val := range configFromFile { + // If a property is not defined in the schema, skip it. + if _, ok := c.schema.Properties[name]; !ok { + continue + } + // If a value is already assigned, keep the original value. if _, ok := c.values[name]; ok { continue } diff --git a/libs/template/config_test.go b/libs/template/config_test.go index 847c2615bb..1af2e5f5ae 100644 --- a/libs/template/config_test.go +++ b/libs/template/config_test.go @@ -52,6 +52,17 @@ func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *te assert.Equal(t, "this-is-not-overwritten", c.values["string_val"]) } +func TestTemplateConfigAssignValuesFromFileFiltersPropertiesNotInTheSchema(t *testing.T) { + c := testConfig(t) + + err := c.assignValuesFromFile("./testdata/config-assign-from-file-unknown-property/config.json") + assert.NoError(t, err) + + // assert only the known property is loaded + assert.Len(t, c.values, 1) + assert.Equal(t, "i am a known property", c.values["string_val"]) +} + func TestTemplateConfigAssignDefaultValues(t *testing.T) { c := testConfig(t) diff --git a/libs/template/testdata/config-assign-from-file-unknown-property/config.json b/libs/template/testdata/config-assign-from-file-unknown-property/config.json index 518eaa6a26..69ed020cf9 100644 --- a/libs/template/testdata/config-assign-from-file-unknown-property/config.json +++ b/libs/template/testdata/config-assign-from-file-unknown-property/config.json @@ -1,3 +1,4 @@ { - "unknown_prop": 123 + "unknown_prop": 123, + "string_val": "i am a known property" } From 00d76d5afaa9fa7abfb57907e65e6be82debd3e9 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 27 Mar 2024 10:03:24 +0100 Subject: [PATCH 10/17] Move path field to bundle type (#1316) ## Changes The bundle path was previously stored on the `config.Root` type under the assumption that the first configuration file being loaded would set it. This is slightly counterintuitive and we know what the path is upon construction of the bundle. The new location for this property reflects this. ## Tests Unit tests pass. --- bundle/artifacts/build.go | 2 +- bundle/artifacts/upload_test.go | 4 +-- bundle/artifacts/whl/autodetect.go | 6 ++-- bundle/artifacts/whl/from_libraries.go | 2 +- bundle/bundle.go | 15 +++++--- bundle/bundle_test.go | 4 +-- .../expand_pipeline_glob_paths_test.go | 2 +- bundle/config/mutator/load_git_details.go | 4 +-- bundle/config/mutator/process_include_test.go | 4 +-- .../config/mutator/process_root_includes.go | 8 ++--- .../mutator/process_root_includes_test.go | 34 ++++++++----------- bundle/config/mutator/rewrite_sync_paths.go | 4 +-- .../config/mutator/rewrite_sync_paths_test.go | 10 +++--- bundle/config/mutator/trampoline.go | 2 +- bundle/config/mutator/trampoline_test.go | 2 +- bundle/config/mutator/translate_paths.go | 2 +- bundle/config/mutator/translate_paths_test.go | 24 ++++++------- bundle/config/root.go | 11 +----- bundle/deploy/files/sync.go | 2 +- bundle/deploy/metadata/compute.go | 2 +- bundle/deploy/state_pull.go | 2 +- bundle/deploy/state_pull_test.go | 10 +++--- bundle/deploy/state_push_test.go | 2 +- bundle/deploy/state_update_test.go | 12 +++---- bundle/deploy/terraform/init_test.go | 14 ++++---- bundle/deploy/terraform/load_test.go | 2 +- bundle/deploy/terraform/state_pull_test.go | 2 +- bundle/deploy/terraform/state_push_test.go | 2 +- bundle/libraries/libraries.go | 2 +- bundle/libraries/libraries_test.go | 2 +- bundle/python/conditional_transform_test.go | 6 ++-- bundle/python/transform_test.go | 2 +- bundle/root_test.go | 4 +-- bundle/scripts/scripts.go | 2 +- bundle/scripts/scripts_test.go | 2 +- bundle/tests/python_wheel_test.go | 4 +-- cmd/bundle/generate/generate_test.go | 9 ++--- cmd/sync/sync_test.go | 3 +- internal/bundle/artifacts_test.go | 2 +- 39 files changed, 104 insertions(+), 124 deletions(-) diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index f3ee097c28..349b1ff898 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -46,7 +46,7 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // If artifact path is not provided, use bundle root dir if artifact.Path == "" { - artifact.Path = b.Config.Path + artifact.Path = b.RootPath } if !filepath.IsAbs(artifact.Path) { diff --git a/bundle/artifacts/upload_test.go b/bundle/artifacts/upload_test.go index ec71100958..687d73b4a8 100644 --- a/bundle/artifacts/upload_test.go +++ b/bundle/artifacts/upload_test.go @@ -36,8 +36,8 @@ func TestExpandGlobFilesSource(t *testing.T) { t2.Close(t) b := &bundle.Bundle{ + RootPath: rootPath, Config: config.Root{ - Path: rootPath, Artifacts: map[string]*config.Artifact{ "test": { Type: "custom", @@ -72,8 +72,8 @@ func TestExpandGlobFilesSourceWithNoMatches(t *testing.T) { require.NoError(t, err) b := &bundle.Bundle{ + RootPath: rootPath, Config: config.Root{ - Path: rootPath, Artifacts: map[string]*config.Artifact{ "test": { Type: "custom", diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go index d11db83110..ee77fff01b 100644 --- a/bundle/artifacts/whl/autodetect.go +++ b/bundle/artifacts/whl/autodetect.go @@ -35,21 +35,21 @@ func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic log.Infof(ctx, "Detecting Python wheel project...") // checking if there is setup.py in the bundle root - setupPy := filepath.Join(b.Config.Path, "setup.py") + setupPy := filepath.Join(b.RootPath, "setup.py") _, err := os.Stat(setupPy) if err != nil { log.Infof(ctx, "No Python wheel project found at bundle root folder") return nil } - log.Infof(ctx, fmt.Sprintf("Found Python wheel project at %s", b.Config.Path)) + log.Infof(ctx, fmt.Sprintf("Found Python wheel project at %s", b.RootPath)) module := extractModuleName(setupPy) if b.Config.Artifacts == nil { b.Config.Artifacts = make(map[string]*config.Artifact) } - pkgPath, err := filepath.Abs(b.Config.Path) + pkgPath, err := filepath.Abs(b.RootPath) if err != nil { return diag.FromErr(err) } diff --git a/bundle/artifacts/whl/from_libraries.go b/bundle/artifacts/whl/from_libraries.go index a2045aaf8c..84ef712acb 100644 --- a/bundle/artifacts/whl/from_libraries.go +++ b/bundle/artifacts/whl/from_libraries.go @@ -30,7 +30,7 @@ func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost tasks := libraries.FindAllWheelTasksWithLocalLibraries(b) for _, task := range tasks { for _, lib := range task.Libraries { - matches, err := filepath.Glob(filepath.Join(b.Config.Path, lib.Whl)) + matches, err := filepath.Glob(filepath.Join(b.RootPath, lib.Whl)) // File referenced from libraries section does not exists, skipping if err != nil { continue diff --git a/bundle/bundle.go b/bundle/bundle.go index a178ea090b..0aa44df0b9 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -30,6 +30,10 @@ import ( const internalFolder = ".internal" type Bundle struct { + // RootPath contains the directory path to the root of the bundle. + // It is set when we instantiate a new bundle instance. + RootPath string + Config config.Root // Metadata about the bundle deployment. This is the interface Databricks services @@ -63,7 +67,9 @@ type Bundle struct { } func Load(ctx context.Context, path string) (*Bundle, error) { - b := &Bundle{} + b := &Bundle{ + RootPath: filepath.Clean(path), + } stat, err := os.Stat(path) if err != nil { return nil, err @@ -75,7 +81,6 @@ func Load(ctx context.Context, path string) (*Bundle, error) { if hasRootEnv && hasIncludesEnv && stat.IsDir() { log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) b.Config = config.Root{ - Path: path, Bundle: config.Bundle{ Name: filepath.Base(path), }, @@ -158,7 +163,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error) if !exists || cacheDirName == "" { cacheDirName = filepath.Join( // Anchor at bundle root directory. - b.Config.Path, + b.RootPath, // Static cache directory. ".databricks", "bundle", @@ -210,7 +215,7 @@ func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) { if err != nil { return nil, err } - internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + internalDirRel, err := filepath.Rel(b.RootPath, internalDir) if err != nil { return nil, err } @@ -218,7 +223,7 @@ func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) { } func (b *Bundle) GitRepository() (*git.Repository, error) { - rootPath, err := folders.FindDirWithLeaf(b.Config.Path, ".git") + rootPath, err := folders.FindDirWithLeaf(b.RootPath, ".git") if err != nil { return nil, fmt.Errorf("unable to locate repository root: %w", err) } diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index 887a4ee83f..be716a40ac 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -77,7 +77,7 @@ func TestBundleMustLoadSuccess(t *testing.T) { t.Setenv(env.RootVariable, "./tests/basic") b, err := MustLoad(context.Background()) require.NoError(t, err) - assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) + assert.Equal(t, "tests/basic", filepath.ToSlash(b.RootPath)) } func TestBundleMustLoadFailureWithEnv(t *testing.T) { @@ -96,7 +96,7 @@ func TestBundleTryLoadSuccess(t *testing.T) { t.Setenv(env.RootVariable, "./tests/basic") b, err := TryLoad(context.Background()) require.NoError(t, err) - assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) + assert.Equal(t, "tests/basic", filepath.ToSlash(b.RootPath)) } func TestBundleTryLoadFailureWithEnv(t *testing.T) { diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index db80be028a..d1671c256f 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -41,8 +41,8 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "skip/test7.py")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index 6ff9aad622..7ce8476f1f 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -22,7 +22,7 @@ func (m *loadGitDetails) Name() string { func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Load relevant git repository - repo, err := git.NewRepository(b.Config.Path) + repo, err := git.NewRepository(b.RootPath) if err != nil { return diag.FromErr(err) } @@ -56,7 +56,7 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn } // Compute relative path of the bundle root from the Git repo root. - absBundlePath, err := filepath.Abs(b.Config.Path) + absBundlePath, err := filepath.Abs(b.RootPath) if err != nil { return diag.FromErr(err) } diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/mutator/process_include_test.go index 0e5351b634..b4fa3ccda5 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/mutator/process_include_test.go @@ -16,8 +16,8 @@ import ( func TestProcessInclude(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Workspace: config.Workspace{ Host: "foo", }, @@ -25,7 +25,7 @@ func TestProcessInclude(t *testing.T) { } relPath := "./file.yml" - fullPath := filepath.Join(b.Config.Path, relPath) + fullPath := filepath.Join(b.RootPath, relPath) f, err := os.Create(fullPath) require.NoError(t, err) fmt.Fprint(f, "workspace:\n host: bar\n") diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index dbf99f2dc6..4e4aeef43c 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -51,7 +51,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. // Converts extra include paths from environment variable to relative paths for _, extraIncludePath := range getExtraIncludePaths(ctx) { if filepath.IsAbs(extraIncludePath) { - rel, err := filepath.Rel(b.Config.Path, extraIncludePath) + rel, err := filepath.Rel(b.RootPath, extraIncludePath) if err != nil { return diag.Errorf("unable to include file '%s': %v", extraIncludePath, err) } @@ -70,7 +70,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. } // Anchor includes to the bundle root path. - matches, err := filepath.Glob(filepath.Join(b.Config.Path, entry)) + matches, err := filepath.Glob(filepath.Join(b.RootPath, entry)) if err != nil { return diag.FromErr(err) } @@ -84,7 +84,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. // Filter matches to ones we haven't seen yet. var includes []string for _, match := range matches { - rel, err := filepath.Rel(b.Config.Path, match) + rel, err := filepath.Rel(b.RootPath, match) if err != nil { return diag.FromErr(err) } @@ -99,7 +99,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. slices.Sort(includes) files = append(files, includes...) for _, include := range includes { - out = append(out, ProcessInclude(filepath.Join(b.Config.Path, include), include)) + out = append(out, ProcessInclude(filepath.Join(b.RootPath, include), include)) } } diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 7b21945539..d3aaa974d6 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -19,9 +19,7 @@ import ( func TestProcessRootIncludesEmpty(t *testing.T) { b := &bundle.Bundle{ - Config: config.Root{ - Path: ".", - }, + RootPath: ".", } diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, diags.Error()) @@ -36,8 +34,8 @@ func TestProcessRootIncludesAbs(t *testing.T) { } b := &bundle.Bundle{ + RootPath: ".", Config: config.Root{ - Path: ".", Include: []string{ "/tmp/*.yml", }, @@ -50,17 +48,17 @@ func TestProcessRootIncludesAbs(t *testing.T) { func TestProcessRootIncludesSingleGlob(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Include: []string{ "*.yml", }, }, } - testutil.Touch(t, b.Config.Path, "databricks.yml") - testutil.Touch(t, b.Config.Path, "a.yml") - testutil.Touch(t, b.Config.Path, "b.yml") + testutil.Touch(t, b.RootPath, "databricks.yml") + testutil.Touch(t, b.RootPath, "a.yml") + testutil.Touch(t, b.RootPath, "b.yml") diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, diags.Error()) @@ -69,8 +67,8 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { func TestProcessRootIncludesMultiGlob(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Include: []string{ "a*.yml", "b*.yml", @@ -78,8 +76,8 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { }, } - testutil.Touch(t, b.Config.Path, "a1.yml") - testutil.Touch(t, b.Config.Path, "b1.yml") + testutil.Touch(t, b.RootPath, "a1.yml") + testutil.Touch(t, b.RootPath, "b1.yml") diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, diags.Error()) @@ -88,8 +86,8 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { func TestProcessRootIncludesRemoveDups(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Include: []string{ "*.yml", "*.yml", @@ -97,7 +95,7 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { }, } - testutil.Touch(t, b.Config.Path, "a.yml") + testutil.Touch(t, b.RootPath, "a.yml") diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, diags.Error()) @@ -106,8 +104,8 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { func TestProcessRootIncludesNotExists(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Include: []string{ "notexist.yml", }, @@ -125,9 +123,7 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) b := &bundle.Bundle{ - Config: config.Root{ - Path: rootPath, - }, + RootPath: rootPath, } diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) @@ -148,9 +144,7 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { )) b := &bundle.Bundle{ - Config: config.Root{ - Path: rootPath, - }, + RootPath: rootPath, } diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index 0785c64300..710190230e 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -45,11 +45,11 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc { func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) { - v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.RootPath))) if err != nil { return dyn.NilValue, err } - v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.RootPath))) if err != nil { return dyn.NilValue, err } diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index 667f811ac9..56ada19e67 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -14,8 +14,8 @@ import ( func TestRewriteSyncPathsRelative(t *testing.T) { b := &bundle.Bundle{ + RootPath: ".", Config: config.Root{ - Path: ".", Sync: config.Sync{ Include: []string{ "foo", @@ -45,8 +45,8 @@ func TestRewriteSyncPathsRelative(t *testing.T) { func TestRewriteSyncPathsAbsolute(t *testing.T) { b := &bundle.Bundle{ + RootPath: "/tmp/dir", Config: config.Root{ - Path: "/tmp/dir", Sync: config.Sync{ Include: []string{ "foo", @@ -77,9 +77,7 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) { func TestRewriteSyncPathsErrorPaths(t *testing.T) { t.Run("no sync block", func(t *testing.T) { b := &bundle.Bundle{ - Config: config.Root{ - Path: ".", - }, + RootPath: ".", } diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) @@ -88,8 +86,8 @@ func TestRewriteSyncPathsErrorPaths(t *testing.T) { t.Run("empty include/exclude blocks", func(t *testing.T) { b := &bundle.Bundle{ + RootPath: ".", Config: config.Root{ - Path: ".", Sync: config.Sync{ Include: []string{}, Exclude: []string{}, diff --git a/bundle/config/mutator/trampoline.go b/bundle/config/mutator/trampoline.go index 72c053b594..dde9a299eb 100644 --- a/bundle/config/mutator/trampoline.go +++ b/bundle/config/mutator/trampoline.go @@ -82,7 +82,7 @@ func (m *trampoline) generateNotebookWrapper(ctx context.Context, b *bundle.Bund return err } - internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + internalDirRel, err := filepath.Rel(b.RootPath, internalDir) if err != nil { return err } diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go index 8a375aa9ba..e39076647f 100644 --- a/bundle/config/mutator/trampoline_test.go +++ b/bundle/config/mutator/trampoline_test.go @@ -57,8 +57,8 @@ func TestGenerateTrampoline(t *testing.T) { } b := &bundle.Bundle{ + RootPath: tmpDir, Config: config.Root{ - Path: tmpDir, Bundle: config.Bundle{ Target: "development", }, diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index af6896ee0d..8fab3abb38 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -85,7 +85,7 @@ func (m *translatePaths) rewritePath( } // Remote path must be relative to the bundle root. - localRelPath, err := filepath.Rel(b.Config.Path, localPath) + localRelPath, err := filepath.Rel(b.RootPath, localPath) if err != nil { return err } diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index bd2ec809ba..9650ae8ba9 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -36,8 +36,8 @@ func touchEmptyFile(t *testing.T, path string) { func TestTranslatePathsSkippedWithGitSource(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -106,8 +106,8 @@ func TestTranslatePaths(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -273,8 +273,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "job", "my_dbt_project", "dbt_project.yml")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -367,8 +367,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -400,8 +400,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { @@ -430,8 +430,8 @@ func TestJobFileDoesNotExistError(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { @@ -460,8 +460,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { @@ -490,8 +490,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { dir := t.TempDir() b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { @@ -521,8 +521,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -555,8 +555,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "my_file.py")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -589,8 +589,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "my_file.py")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, @@ -623,8 +623,8 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Workspace: config.Workspace{ FilePath: "/bundle", }, diff --git a/bundle/config/root.go b/bundle/config/root.go index 8e1ff65077..a3dd0d28bb 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -5,7 +5,6 @@ import ( "context" "fmt" "os" - "path/filepath" "strings" "github.com/databricks/cli/bundle/config/resources" @@ -24,10 +23,6 @@ type Root struct { diags diag.Diagnostics depth int - // Path contains the directory path to the root of the bundle. - // It is set when loading `databricks.yml`. - Path string `json:"-" bundle:"readonly"` - // Contains user defined variables Variables map[string]*variable.Variable `json:"variables,omitempty"` @@ -80,9 +75,7 @@ func Load(path string) (*Root, error) { return nil, err } - r := Root{ - Path: filepath.Dir(path), - } + r := Root{} // Load configuration tree from YAML. v, err := yamlloader.LoadYAML(path, bytes.NewBuffer(raw)) @@ -135,12 +128,10 @@ func (r *Root) updateWithDynamicValue(nv dyn.Value) error { // the configuration equals nil (happens in tests). diags := r.diags depth := r.depth - path := r.Path defer func() { r.diags = diags r.depth = depth - r.Path = path }() // Convert normalized configuration tree to typed configuration. diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index 8de80c22fa..e8c54c6332 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -28,7 +28,7 @@ func GetSyncOptions(ctx context.Context, b *bundle.Bundle) (*sync.SyncOptions, e } opts := &sync.SyncOptions{ - LocalPath: b.Config.Path, + LocalPath: b.RootPath, RemotePath: b.Config.Workspace.FilePath, Include: includes, Exclude: b.Config.Sync.Exclude, diff --git a/bundle/deploy/metadata/compute.go b/bundle/deploy/metadata/compute.go index 5a46cd67f8..0347654848 100644 --- a/bundle/deploy/metadata/compute.go +++ b/bundle/deploy/metadata/compute.go @@ -39,7 +39,7 @@ func (m *compute) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { for name, job := range b.Config.Resources.Jobs { // Compute config file path the job is defined in, relative to the bundle // root - relativePath, err := filepath.Rel(b.Config.Path, job.ConfigFilePath) + relativePath, err := filepath.Rel(b.RootPath, job.ConfigFilePath) if err != nil { return diag.Errorf("failed to compute relative path for job %s: %v", name, err) } diff --git a/bundle/deploy/state_pull.go b/bundle/deploy/state_pull.go index 61f5426a09..bae457ea09 100644 --- a/bundle/deploy/state_pull.go +++ b/bundle/deploy/state_pull.go @@ -85,7 +85,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic } log.Infof(ctx, "Creating new snapshot") - snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.Config.Path), opts) + snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.RootPath), opts) if err != nil { return diag.FromErr(err) } diff --git a/bundle/deploy/state_pull_test.go b/bundle/deploy/state_pull_test.go index 9716a1e04b..80acb254f4 100644 --- a/bundle/deploy/state_pull_test.go +++ b/bundle/deploy/state_pull_test.go @@ -59,8 +59,8 @@ func testStatePull(t *testing.T, opts statePullOpts) { }} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, @@ -77,11 +77,11 @@ func testStatePull(t *testing.T, opts statePullOpts) { ctx := context.Background() for _, file := range opts.localFiles { - testutil.Touch(t, filepath.Join(b.Config.Path, "bar"), file) + testutil.Touch(t, filepath.Join(b.RootPath, "bar"), file) } for _, file := range opts.localNotebooks { - testutil.TouchNotebook(t, filepath.Join(b.Config.Path, "bar"), file) + testutil.TouchNotebook(t, filepath.Join(b.RootPath, "bar"), file) } if opts.withExistingSnapshot { @@ -251,8 +251,8 @@ func TestStatePullNoState(t *testing.T) { }} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, @@ -439,8 +439,8 @@ func TestStatePullNewerDeploymentStateVersion(t *testing.T) { }} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, diff --git a/bundle/deploy/state_push_test.go b/bundle/deploy/state_push_test.go index c6d9f88f5a..39e4d13a56 100644 --- a/bundle/deploy/state_push_test.go +++ b/bundle/deploy/state_push_test.go @@ -45,8 +45,8 @@ func TestStatePush(t *testing.T) { }} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, diff --git a/bundle/deploy/state_update_test.go b/bundle/deploy/state_update_test.go index 73b7fe4b34..dd8a1336ec 100644 --- a/bundle/deploy/state_update_test.go +++ b/bundle/deploy/state_update_test.go @@ -22,8 +22,8 @@ func TestStateUpdate(t *testing.T) { s := &stateUpdate{} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, @@ -39,8 +39,8 @@ func TestStateUpdate(t *testing.T) { }, } - testutil.Touch(t, b.Config.Path, "test1.py") - testutil.Touch(t, b.Config.Path, "test2.py") + testutil.Touch(t, b.RootPath, "test1.py") + testutil.Touch(t, b.RootPath, "test2.py") m := mocks.NewMockWorkspaceClient(t) m.WorkspaceClient.Config = &databrickscfg.Config{ @@ -82,8 +82,8 @@ func TestStateUpdateWithExistingState(t *testing.T) { s := &stateUpdate{} b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "default", }, @@ -99,8 +99,8 @@ func TestStateUpdateWithExistingState(t *testing.T) { }, } - testutil.Touch(t, b.Config.Path, "test1.py") - testutil.Touch(t, b.Config.Path, "test2.py") + testutil.Touch(t, b.RootPath, "test1.py") + testutil.Touch(t, b.RootPath, "test2.py") m := mocks.NewMockWorkspaceClient(t) m.WorkspaceClient.Config = &databrickscfg.Config{ diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index bbef7f0f79..29bd80a3e0 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -28,8 +28,8 @@ func TestInitEnvironmentVariables(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", Terraform: &config.Terraform{ @@ -55,8 +55,8 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, @@ -83,8 +83,8 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, @@ -109,8 +109,8 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, @@ -139,8 +139,8 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, @@ -169,8 +169,8 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, @@ -197,8 +197,8 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { func TestSetProxyEnvVars(t *testing.T) { b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", }, diff --git a/bundle/deploy/terraform/load_test.go b/bundle/deploy/terraform/load_test.go index a912c52133..c62217187d 100644 --- a/bundle/deploy/terraform/load_test.go +++ b/bundle/deploy/terraform/load_test.go @@ -17,8 +17,8 @@ func TestLoadWithNoState(t *testing.T) { } b := &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ - Path: t.TempDir(), Bundle: config.Bundle{ Target: "whatever", Terraform: &config.Terraform{ diff --git a/bundle/deploy/terraform/state_pull_test.go b/bundle/deploy/terraform/state_pull_test.go index 805b5af0fc..26297bfcbe 100644 --- a/bundle/deploy/terraform/state_pull_test.go +++ b/bundle/deploy/terraform/state_pull_test.go @@ -32,11 +32,11 @@ func mockStateFilerForPull(t *testing.T, contents map[string]int, merr error) fi func statePullTestBundle(t *testing.T) *bundle.Bundle { return &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ Bundle: config.Bundle{ Target: "default", }, - Path: t.TempDir(), }, } } diff --git a/bundle/deploy/terraform/state_push_test.go b/bundle/deploy/terraform/state_push_test.go index 41d3849000..e054773f31 100644 --- a/bundle/deploy/terraform/state_push_test.go +++ b/bundle/deploy/terraform/state_push_test.go @@ -29,11 +29,11 @@ func mockStateFilerForPush(t *testing.T, fn func(body io.Reader)) filer.Filer { func statePushTestBundle(t *testing.T) *bundle.Bundle { return &bundle.Bundle{ + RootPath: t.TempDir(), Config: config.Root{ Bundle: config.Bundle{ Target: "default", }, - Path: t.TempDir(), }, } } diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index e0cb3fa382..8dd63a75a6 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -65,7 +65,7 @@ func findLibraryMatches(lib *compute.Library, b *bundle.Bundle) ([]string, error return nil, nil } - fullPath := filepath.Join(b.Config.Path, path) + fullPath := filepath.Join(b.RootPath, path) return filepath.Glob(fullPath) } diff --git a/bundle/libraries/libraries_test.go b/bundle/libraries/libraries_test.go index 0bec2c6d01..3da10d47bb 100644 --- a/bundle/libraries/libraries_test.go +++ b/bundle/libraries/libraries_test.go @@ -15,8 +15,8 @@ import ( func TestMapFilesToTaskLibrariesNoGlob(t *testing.T) { b := &bundle.Bundle{ + RootPath: "testdata", Config: config.Root{ - Path: "testdata", Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job1": { diff --git a/bundle/python/conditional_transform_test.go b/bundle/python/conditional_transform_test.go index b4d7f9edb6..677970d70a 100644 --- a/bundle/python/conditional_transform_test.go +++ b/bundle/python/conditional_transform_test.go @@ -18,8 +18,8 @@ func TestNoTransformByDefault(t *testing.T) { tmpDir := t.TempDir() b := &bundle.Bundle{ + RootPath: tmpDir, Config: config.Root{ - Path: tmpDir, Bundle: config.Bundle{ Target: "development", }, @@ -63,8 +63,8 @@ func TestTransformWithExperimentalSettingSetToTrue(t *testing.T) { tmpDir := t.TempDir() b := &bundle.Bundle{ + RootPath: tmpDir, Config: config.Root{ - Path: tmpDir, Bundle: config.Bundle{ Target: "development", }, @@ -106,7 +106,7 @@ func TestTransformWithExperimentalSettingSetToTrue(t *testing.T) { dir, err := b.InternalDir(context.Background()) require.NoError(t, err) - internalDirRel, err := filepath.Rel(b.Config.Path, dir) + internalDirRel, err := filepath.Rel(b.RootPath, dir) require.NoError(t, err) require.Equal(t, path.Join(filepath.ToSlash(internalDirRel), "notebook_job1_key1"), task.NotebookTask.NotebookPath) diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index 729efe1a97..c15feb4241 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -116,8 +116,8 @@ func TestTransformFiltersWheelTasksOnly(t *testing.T) { func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { tmpDir := t.TempDir() b := &bundle.Bundle{ + RootPath: tmpDir, Config: config.Root{ - Path: tmpDir, Bundle: config.Bundle{ Target: "development", }, diff --git a/bundle/root_test.go b/bundle/root_test.go index e6c53e8249..a83f36ace7 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -106,7 +106,7 @@ func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { cwd, err := os.Getwd() assert.NoError(t, err) - assert.Equal(t, cwd, bundle.Config.Path) + assert.Equal(t, cwd, bundle.RootPath) } func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { @@ -118,7 +118,7 @@ func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { bundle, err := MustLoad(ctx) assert.NoError(t, err) - assert.Equal(t, dir, bundle.Config.Path) + assert.Equal(t, dir, bundle.RootPath) } func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { diff --git a/bundle/scripts/scripts.go b/bundle/scripts/scripts.go index f8ed7d6a38..38d204f99e 100644 --- a/bundle/scripts/scripts.go +++ b/bundle/scripts/scripts.go @@ -30,7 +30,7 @@ func (m *script) Name() string { } func (m *script) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - executor, err := exec.NewCommandExecutor(b.Config.Path) + executor, err := exec.NewCommandExecutor(b.RootPath) if err != nil { return diag.FromErr(err) } diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go index fa5c239701..1bc216b610 100644 --- a/bundle/scripts/scripts_test.go +++ b/bundle/scripts/scripts_test.go @@ -23,7 +23,7 @@ func TestExecutesHook(t *testing.T) { }, } - executor, err := exec.NewCommandExecutor(b.Config.Path) + executor, err := exec.NewCommandExecutor(b.RootPath) require.NoError(t, err) _, out, err := executeHook(context.Background(), executor, b, config.ScriptPreBuild) require.NoError(t, err) diff --git a/bundle/tests/python_wheel_test.go b/bundle/tests/python_wheel_test.go index c44e80a578..412b507fe9 100644 --- a/bundle/tests/python_wheel_test.go +++ b/bundle/tests/python_wheel_test.go @@ -79,9 +79,7 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { artifact := b.Config.Artifacts["my_test_code-0.0.1-py3-none-any.whl"] require.NotNil(t, artifact) require.Empty(t, artifact.BuildCommand) - require.Contains(t, artifact.Files[0].Source, filepath.Join( - b.Config.Path, - "package", + require.Contains(t, artifact.Files[0].Source, filepath.Join(b.RootPath, "package", "my_test_code-0.0.1-py3-none-any.whl", )) } diff --git a/cmd/bundle/generate/generate_test.go b/cmd/bundle/generate/generate_test.go index b71f1edfde..69ef639ae0 100644 --- a/cmd/bundle/generate/generate_test.go +++ b/cmd/bundle/generate/generate_test.go @@ -10,7 +10,6 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" "github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" @@ -25,9 +24,7 @@ func TestGeneratePipelineCommand(t *testing.T) { root := t.TempDir() b := &bundle.Bundle{ - Config: config.Root{ - Path: root, - }, + RootPath: root, } m := mocks.NewMockWorkspaceClient(t) @@ -125,9 +122,7 @@ func TestGenerateJobCommand(t *testing.T) { root := t.TempDir() b := &bundle.Bundle{ - Config: config.Root{ - Path: root, - }, + RootPath: root, } m := mocks.NewMockWorkspaceClient(t) diff --git a/cmd/sync/sync_test.go b/cmd/sync/sync_test.go index 827c4d5097..026d840f73 100644 --- a/cmd/sync/sync_test.go +++ b/cmd/sync/sync_test.go @@ -16,9 +16,8 @@ import ( func TestSyncOptionsFromBundle(t *testing.T) { tempDir := t.TempDir() b := &bundle.Bundle{ + RootPath: tempDir, Config: config.Root{ - Path: tempDir, - Bundle: config.Bundle{ Target: "default", }, diff --git a/internal/bundle/artifacts_test.go b/internal/bundle/artifacts_test.go index 2ced12fdd1..866a1f6e9e 100644 --- a/internal/bundle/artifacts_test.go +++ b/internal/bundle/artifacts_test.go @@ -36,8 +36,8 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { wsDir := internal.TemporaryWorkspaceDir(t, w) b := &bundle.Bundle{ + RootPath: dir, Config: config.Root{ - Path: dir, Bundle: config.Bundle{ Target: "whatever", }, From f195b844758a82050dfbd8873d58984aa62b4052 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 27 Mar 2024 11:13:54 +0100 Subject: [PATCH 11/17] Remove support for DATABRICKS_BUNDLE_INCLUDES (#1317) ## Changes PR #604 added functionality to load a bundle without a `databricks.yml` if both the `DATABRICKS_BUNDLE_ROOT` and `DATABRICKS_BUNDLE_INCLUDES` environment variables were set. We never ended up using this in downstream tools so this can be removed. ## Tests Unit tests pass. --- bundle/bundle.go | 15 ------ .../config/mutator/process_root_includes.go | 23 --------- .../mutator/process_root_includes_test.go | 40 ---------------- bundle/env/includes.go | 14 ------ bundle/env/includes_test.go | 28 ----------- bundle/root_test.go | 47 ------------------- 6 files changed, 167 deletions(-) delete mode 100644 bundle/env/includes.go delete mode 100644 bundle/env/includes_test.go diff --git a/bundle/bundle.go b/bundle/bundle.go index 0aa44df0b9..2e193bbf39 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -70,23 +70,8 @@ func Load(ctx context.Context, path string) (*Bundle, error) { b := &Bundle{ RootPath: filepath.Clean(path), } - stat, err := os.Stat(path) - if err != nil { - return nil, err - } configFile, err := config.FileNames.FindInPath(path) if err != nil { - _, hasRootEnv := env.Root(ctx) - _, hasIncludesEnv := env.Includes(ctx) - if hasRootEnv && hasIncludesEnv && stat.IsDir() { - log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) - b.Config = config.Root{ - Bundle: config.Bundle{ - Name: filepath.Base(path), - }, - } - return b, nil - } return nil, err } log.Debugf(ctx, "Loading bundle configuration from: %s", configFile) diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 4e4aeef43c..c5e0a22c5e 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -2,26 +2,15 @@ package mutator import ( "context" - "os" "path/filepath" "slices" "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/libs/diag" ) -// Get extra include paths from environment variable -func getExtraIncludePaths(ctx context.Context) []string { - value, exists := env.Includes(ctx) - if !exists { - return nil - } - return strings.Split(value, string(os.PathListSeparator)) -} - type processRootIncludes struct{} // ProcessRootIncludes expands the patterns in the configuration's include list @@ -48,18 +37,6 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. // This is stored in the bundle configuration for observability. var files []string - // Converts extra include paths from environment variable to relative paths - for _, extraIncludePath := range getExtraIncludePaths(ctx) { - if filepath.IsAbs(extraIncludePath) { - rel, err := filepath.Rel(b.RootPath, extraIncludePath) - if err != nil { - return diag.Errorf("unable to include file '%s': %v", extraIncludePath, err) - } - extraIncludePath = rel - } - b.Config.Include = append(b.Config.Include, extraIncludePath) - } - // For each glob, find all files to load. // Ordering of the list of globs is maintained in the output. // For matches that appear in multiple globs, only the first is kept. diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index d3aaa974d6..675dd9acfb 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -2,16 +2,12 @@ package mutator_test import ( "context" - "os" - "path" "runtime" - "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/internal/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -115,39 +111,3 @@ func TestProcessRootIncludesNotExists(t *testing.T) { require.True(t, diags.HasError()) assert.ErrorContains(t, diags.Error(), "notexist.yml defined in 'include' section does not match any files") } - -func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { - rootPath := t.TempDir() - testYamlName := "extra_include_path.yml" - testutil.Touch(t, rootPath, testYamlName) - t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) - - b := &bundle.Bundle{ - RootPath: rootPath, - } - - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, diags.Error()) - assert.Contains(t, b.Config.Include, testYamlName) -} - -func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { - rootPath := t.TempDir() - testYamlName := "extra_include_path.yml" - testutil.Touch(t, rootPath, testYamlName) - t.Setenv(env.IncludesVariable, strings.Join( - []string{ - path.Join(rootPath, testYamlName), - path.Join(rootPath, testYamlName), - }, - string(os.PathListSeparator), - )) - - b := &bundle.Bundle{ - RootPath: rootPath, - } - - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) - require.NoError(t, diags.Error()) - assert.Equal(t, []string{testYamlName}, b.Config.Include) -} diff --git a/bundle/env/includes.go b/bundle/env/includes.go deleted file mode 100644 index 4ade018776..0000000000 --- a/bundle/env/includes.go +++ /dev/null @@ -1,14 +0,0 @@ -package env - -import "context" - -// IncludesVariable names the environment variable that holds additional configuration paths to include -// during bundle configuration loading. Also see `bundle/config/mutator/process_root_includes.go`. -const IncludesVariable = "DATABRICKS_BUNDLE_INCLUDES" - -// Includes returns the bundle Includes environment variable. -func Includes(ctx context.Context) (string, bool) { - return get(ctx, []string{ - IncludesVariable, - }) -} diff --git a/bundle/env/includes_test.go b/bundle/env/includes_test.go deleted file mode 100644 index d9366a59ff..0000000000 --- a/bundle/env/includes_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package env - -import ( - "context" - "testing" - - "github.com/databricks/cli/internal/testutil" - "github.com/stretchr/testify/assert" -) - -func TestIncludes(t *testing.T) { - ctx := context.Background() - - testutil.CleanupEnvironment(t) - - t.Run("set", func(t *testing.T) { - t.Setenv("DATABRICKS_BUNDLE_INCLUDES", "foo") - includes, ok := Includes(ctx) - assert.True(t, ok) - assert.Equal(t, "foo", includes) - }) - - t.Run("not set", func(t *testing.T) { - includes, ok := Includes(ctx) - assert.False(t, ok) - assert.Equal(t, "", includes) - }) -} diff --git a/bundle/root_test.go b/bundle/root_test.go index a83f36ace7..99bf58a00a 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -9,7 +9,6 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/internal/testutil" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -94,49 +93,3 @@ func TestRootLookupError(t *testing.T) { _, err := mustGetRoot(ctx) require.ErrorContains(t, err, "unable to locate bundle root") } - -func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { - ctx := context.Background() - testutil.Chdir(t, filepath.Join(".", "tests", "basic")) - t.Setenv(env.IncludesVariable, "test") - - bundle, err := MustLoad(ctx) - assert.NoError(t, err) - assert.Equal(t, "basic", bundle.Config.Bundle.Name) - - cwd, err := os.Getwd() - assert.NoError(t, err) - assert.Equal(t, cwd, bundle.RootPath) -} - -func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { - ctx := context.Background() - dir := t.TempDir() - testutil.Chdir(t, dir) - t.Setenv(env.RootVariable, dir) - t.Setenv(env.IncludesVariable, "test") - - bundle, err := MustLoad(ctx) - assert.NoError(t, err) - assert.Equal(t, dir, bundle.RootPath) -} - -func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { - ctx := context.Background() - dir := t.TempDir() - testutil.Chdir(t, dir) - t.Setenv(env.IncludesVariable, "test") - - _, err := MustLoad(ctx) - assert.Error(t, err) -} - -func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) { - ctx := context.Background() - dir := t.TempDir() - testutil.Chdir(t, dir) - t.Setenv(env.RootVariable, dir) - - _, err := MustLoad(ctx) - assert.Error(t, err) -} From ca534d596bb410dd1fbae3e90d951ca434fd2dd3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 27 Mar 2024 11:49:05 +0100 Subject: [PATCH 12/17] Load bundle configuration from mutator (#1318) ## Changes Prior to this change, the bundle configuration entry point was loaded from the function `bundle.Load`. Other configuration files were only loaded once the caller applied the first set of mutators. This separation was unnecessary and not ideal in light of gathering diagnostics while loading _any_ configuration file, not just the ones from the includes. This change: * Updates `bundle.Load` to only verify that the specified path is a valid bundle root. * Moves mutators that perform loading to `bundle/config/loader`. * Adds a "load" phase that takes the place of applying `DefaultMutators`. Follow ups: * Rename `bundle.Load` -> `bundle.Find` (because it no longer performs loading) This change depends on #1316 and #1317. ## Tests Tests pass. --- bundle/bundle.go | 7 +--- bundle/bundle_test.go | 4 +-- bundle/config/loader/entry_point.go | 34 +++++++++++++++++++ bundle/config/loader/entry_point_test.go | 26 ++++++++++++++ .../{mutator => loader}/process_include.go | 2 +- .../process_include_test.go | 21 +++++------- .../process_root_includes.go | 2 +- .../process_root_includes_test.go | 16 ++++----- bundle/config/loader/testdata/databricks.yml | 2 ++ bundle/config/loader/testdata/host.yml | 2 ++ bundle/config/mutator/mutator.go | 6 +++- bundle/phases/load.go | 29 ++++++++++++++++ bundle/tests/conflicting_resource_ids_test.go | 12 ++++--- bundle/tests/include_test.go | 4 +-- bundle/tests/loader.go | 8 ++--- bundle/tests/python_wheel_test.go | 12 +++---- cmd/root/bundle_test.go | 4 +++ libs/template/renderer_test.go | 15 ++++---- 18 files changed, 149 insertions(+), 57 deletions(-) create mode 100644 bundle/config/loader/entry_point.go create mode 100644 bundle/config/loader/entry_point_test.go rename bundle/config/{mutator => loader}/process_include.go (98%) rename bundle/config/{mutator => loader}/process_include_test.go (54%) rename bundle/config/{mutator => loader}/process_root_includes.go (99%) rename bundle/config/{mutator => loader}/process_root_includes_test.go (80%) create mode 100644 bundle/config/loader/testdata/databricks.yml create mode 100644 bundle/config/loader/testdata/host.yml create mode 100644 bundle/phases/load.go diff --git a/bundle/bundle.go b/bundle/bundle.go index 2e193bbf39..977ca2247c 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -74,12 +74,7 @@ func Load(ctx context.Context, path string) (*Bundle, error) { if err != nil { return nil, err } - log.Debugf(ctx, "Loading bundle configuration from: %s", configFile) - root, err := config.Load(configFile) - if err != nil { - return nil, err - } - b.Config = *root + log.Debugf(ctx, "Found bundle root at %s (file %s)", b.RootPath, configFile) return b, nil } diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index be716a40ac..908b446e24 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -20,8 +20,8 @@ func TestLoadNotExists(t *testing.T) { func TestLoadExists(t *testing.T) { b, err := Load(context.Background(), "./tests/basic") - require.Nil(t, err) - assert.Equal(t, "basic", b.Config.Bundle.Name) + assert.NoError(t, err) + assert.NotNil(t, b) } func TestBundleCacheDir(t *testing.T) { diff --git a/bundle/config/loader/entry_point.go b/bundle/config/loader/entry_point.go new file mode 100644 index 0000000000..24ba2f068e --- /dev/null +++ b/bundle/config/loader/entry_point.go @@ -0,0 +1,34 @@ +package loader + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/diag" +) + +type entryPoint struct{} + +// EntryPoint loads the entry point configuration. +func EntryPoint() bundle.Mutator { + return &entryPoint{} +} + +func (m *entryPoint) Name() string { + return "EntryPoint" +} + +func (m *entryPoint) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { + path, err := config.FileNames.FindInPath(b.RootPath) + if err != nil { + return diag.FromErr(err) + } + this, err := config.Load(path) + if err != nil { + return diag.FromErr(err) + } + // TODO: Return actual warnings. + err = b.Config.Merge(this) + return diag.FromErr(err) +} diff --git a/bundle/config/loader/entry_point_test.go b/bundle/config/loader/entry_point_test.go new file mode 100644 index 0000000000..80271f0b74 --- /dev/null +++ b/bundle/config/loader/entry_point_test.go @@ -0,0 +1,26 @@ +package loader_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/loader" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEntryPointNoRootPath(t *testing.T) { + b := &bundle.Bundle{} + diags := bundle.Apply(context.Background(), b, loader.EntryPoint()) + require.Error(t, diags.Error()) +} + +func TestEntryPoint(t *testing.T) { + b := &bundle.Bundle{ + RootPath: "testdata", + } + diags := bundle.Apply(context.Background(), b, loader.EntryPoint()) + require.NoError(t, diags.Error()) + assert.Equal(t, "loader_test", b.Config.Bundle.Name) +} diff --git a/bundle/config/mutator/process_include.go b/bundle/config/loader/process_include.go similarity index 98% rename from bundle/config/mutator/process_include.go rename to bundle/config/loader/process_include.go index 23acdf12a0..328f4eacf3 100644 --- a/bundle/config/mutator/process_include.go +++ b/bundle/config/loader/process_include.go @@ -1,4 +1,4 @@ -package mutator +package loader import ( "context" diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/loader/process_include_test.go similarity index 54% rename from bundle/config/mutator/process_include_test.go rename to bundle/config/loader/process_include_test.go index b4fa3ccda5..da4da9ff66 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/loader/process_include_test.go @@ -1,22 +1,20 @@ -package mutator_test +package loader_test import ( "context" - "fmt" - "os" "path/filepath" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/loader" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestProcessInclude(t *testing.T) { b := &bundle.Bundle{ - RootPath: t.TempDir(), + RootPath: "testdata", Config: config.Root{ Workspace: config.Workspace{ Host: "foo", @@ -24,15 +22,14 @@ func TestProcessInclude(t *testing.T) { }, } - relPath := "./file.yml" - fullPath := filepath.Join(b.RootPath, relPath) - f, err := os.Create(fullPath) - require.NoError(t, err) - fmt.Fprint(f, "workspace:\n host: bar\n") - f.Close() + m := loader.ProcessInclude(filepath.Join(b.RootPath, "host.yml"), "host.yml") + assert.Equal(t, "ProcessInclude(host.yml)", m.Name()) + // Assert the host value prior to applying the mutator assert.Equal(t, "foo", b.Config.Workspace.Host) - diags := bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) + + // Apply the mutator and assert that the host value has been updated + diags := bundle.Apply(context.Background(), b, m) require.NoError(t, diags.Error()) assert.Equal(t, "bar", b.Config.Workspace.Host) } diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/loader/process_root_includes.go similarity index 99% rename from bundle/config/mutator/process_root_includes.go rename to bundle/config/loader/process_root_includes.go index c5e0a22c5e..25f284fd3a 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/loader/process_root_includes.go @@ -1,4 +1,4 @@ -package mutator +package loader import ( "context" diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/loader/process_root_includes_test.go similarity index 80% rename from bundle/config/mutator/process_root_includes_test.go rename to bundle/config/loader/process_root_includes_test.go index 675dd9acfb..737dbbefd1 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/loader/process_root_includes_test.go @@ -1,4 +1,4 @@ -package mutator_test +package loader_test import ( "context" @@ -7,7 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/internal/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -17,7 +17,7 @@ func TestProcessRootIncludesEmpty(t *testing.T) { b := &bundle.Bundle{ RootPath: ".", } - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.NoError(t, diags.Error()) } @@ -37,7 +37,7 @@ func TestProcessRootIncludesAbs(t *testing.T) { }, }, } - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.True(t, diags.HasError()) assert.ErrorContains(t, diags.Error(), "must be relative paths") } @@ -56,7 +56,7 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { testutil.Touch(t, b.RootPath, "a.yml") testutil.Touch(t, b.RootPath, "b.yml") - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.NoError(t, diags.Error()) assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) } @@ -75,7 +75,7 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { testutil.Touch(t, b.RootPath, "a1.yml") testutil.Touch(t, b.RootPath, "b1.yml") - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.NoError(t, diags.Error()) assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) } @@ -93,7 +93,7 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { testutil.Touch(t, b.RootPath, "a.yml") - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.NoError(t, diags.Error()) assert.Equal(t, []string{"a.yml"}, b.Config.Include) } @@ -107,7 +107,7 @@ func TestProcessRootIncludesNotExists(t *testing.T) { }, }, } - diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + diags := bundle.Apply(context.Background(), b, loader.ProcessRootIncludes()) require.True(t, diags.HasError()) assert.ErrorContains(t, diags.Error(), "notexist.yml defined in 'include' section does not match any files") } diff --git a/bundle/config/loader/testdata/databricks.yml b/bundle/config/loader/testdata/databricks.yml new file mode 100644 index 0000000000..1a0635b898 --- /dev/null +++ b/bundle/config/loader/testdata/databricks.yml @@ -0,0 +1,2 @@ +bundle: + name: loader_test diff --git a/bundle/config/loader/testdata/host.yml b/bundle/config/loader/testdata/host.yml new file mode 100644 index 0000000000..f83830d1d1 --- /dev/null +++ b/bundle/config/loader/testdata/host.yml @@ -0,0 +1,2 @@ +workspace: + host: bar diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index c45a6c15e1..99b7e9ac99 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -3,13 +3,17 @@ package mutator import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/scripts" ) func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ + loader.EntryPoint(), + loader.ProcessRootIncludes(), + + // Execute preinit script after loading all configuration files. scripts.Execute(config.ScriptPreInit), - ProcessRootIncludes(), EnvironmentsToTargets(), InitializeVariables(), DefineDefaultTarget(), diff --git a/bundle/phases/load.go b/bundle/phases/load.go new file mode 100644 index 0000000000..fa06687754 --- /dev/null +++ b/bundle/phases/load.go @@ -0,0 +1,29 @@ +package phases + +import ( + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" +) + +// The load phase loads configuration from disk and performs +// lightweight preprocessing (anything that can be done without network I/O). +func Load() bundle.Mutator { + return newPhase( + "load", + mutator.DefaultMutators(), + ) +} + +func LoadDefaultTarget() bundle.Mutator { + return newPhase( + "load", + append(mutator.DefaultMutators(), mutator.SelectDefaultTarget()), + ) +} + +func LoadNamedTarget(target string) bundle.Mutator { + return newPhase( + "load", + append(mutator.DefaultMutators(), mutator.SelectTarget(target)), + ) +} diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go index 16dd1c33ab..e7f0aa28f2 100644 --- a/bundle/tests/conflicting_resource_ids_test.go +++ b/bundle/tests/conflicting_resource_ids_test.go @@ -7,23 +7,25 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/phases" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestConflictingResourceIdsNoSubconfig(t *testing.T) { ctx := context.Background() - _, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations") + b, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations") + require.NoError(t, err) + diags := bundle.Apply(ctx, b, phases.Load()) bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml") - assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) + assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) } func TestConflictingResourceIdsOneSubconfig(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration") require.NoError(t, err) - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, phases.Load()) bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) @@ -33,7 +35,7 @@ func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations") require.NoError(t, err) - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, phases.Load()) resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml") resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml") assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index fd8ae7198d..5b0235f605 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/phases" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "golang.org/x/exp/maps" @@ -17,7 +17,7 @@ func TestIncludeInvalid(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./include_invalid") require.NoError(t, err) - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, phases.Load()) require.Error(t, diags.Error()) assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files") } diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 228763ce92..e7cf18f732 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/phases" "github.com/stretchr/testify/require" ) @@ -13,7 +14,7 @@ func load(t *testing.T, path string) *bundle.Bundle { ctx := context.Background() b, err := bundle.Load(ctx, path) require.NoError(t, err) - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + diags := bundle.Apply(ctx, b, phases.Load()) require.NoError(t, diags.Error()) return b } @@ -22,9 +23,8 @@ func loadTarget(t *testing.T, path, env string) *bundle.Bundle { ctx := context.Background() b, err := bundle.Load(ctx, path) require.NoError(t, err) - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) - require.NoError(t, diags.Error()) - diags = bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.Apply(ctx, b, bundle.Seq( + phases.LoadNamedTarget(env), mutator.RewriteSyncPaths(), mutator.MergeJobClusters(), mutator.MergeJobTasks(), diff --git a/bundle/tests/python_wheel_test.go b/bundle/tests/python_wheel_test.go index 412b507fe9..e2266516ae 100644 --- a/bundle/tests/python_wheel_test.go +++ b/bundle/tests/python_wheel_test.go @@ -16,8 +16,7 @@ func TestPythonWheelBuild(t *testing.T) { b, err := bundle.Load(ctx, "./python_wheel/python_wheel") require.NoError(t, err) - m := phases.Build() - diags := bundle.Apply(ctx, b, m) + diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -34,8 +33,7 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) { b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact") require.NoError(t, err) - m := phases.Build() - diags := bundle.Apply(ctx, b, m) + diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") @@ -52,8 +50,7 @@ func TestPythonWheelWithDBFSLib(t *testing.T) { b, err := bundle.Load(ctx, "./python_wheel/python_wheel_dbfs_lib") require.NoError(t, err) - m := phases.Build() - diags := bundle.Apply(ctx, b, m) + diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) require.NoError(t, diags.Error()) match := libraries.MatchWithArtifacts() @@ -66,8 +63,7 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_no_setup") require.NoError(t, err) - m := phases.Build() - diags := bundle.Apply(ctx, b, m) + diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) require.NoError(t, diags.Error()) match := libraries.MatchWithArtifacts() diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index a3dec491d2..97412ff69f 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -40,8 +40,12 @@ func emptyCommand(t *testing.T) *cobra.Command { func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { setupDatabricksCfg(t) + rootPath := t.TempDir() + testutil.Touch(t, rootPath, "databricks.yml") + err := configureBundle(cmd, []string{"validate"}, func(_ context.Context) (*bundle.Bundle, error) { return &bundle.Bundle{ + RootPath: rootPath, Config: config.Root{ Bundle: config.Bundle{ Name: "test", diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index cad58a5326..a8678a5251 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -14,7 +14,6 @@ import ( "github.com/databricks/cli/bundle" bundleConfig "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/diag" @@ -66,23 +65,25 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri require.NoError(t, err) err = renderer.persistToDisk() require.NoError(t, err) + b, err := bundle.Load(ctx, filepath.Join(tempDir, "template", "my_project")) require.NoError(t, err) + diags := bundle.Apply(ctx, b, phases.LoadNamedTarget(target)) + require.NoError(t, diags.Error()) // Apply initialize / validation mutators bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} + b.Config.Bundle.Terraform = &bundleConfig.Terraform{ + ExecPath: "sh", + } return nil }) b.Tagging = tags.ForCloud(w.Config) b.WorkspaceClient() - b.Config.Bundle.Terraform = &bundleConfig.Terraform{ - ExecPath: "sh", - } - diags := bundle.Apply(ctx, b, bundle.Seq( - bundle.Seq(mutator.DefaultMutators()...), - mutator.SelectTarget(target), + + diags = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), )) require.NoError(t, diags.Error()) From 704d06945975a901cb16328337ac3a24e81a6a6d Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Wed, 27 Mar 2024 19:07:59 +0530 Subject: [PATCH 13/17] Make `bundle.deployment` optional in the bundle schema (#1321) ## Changes Makes the field optional by adding the `omitempty` tag. This gets rid of the red squiggly lines in the bundle schema. --- bundle/config/bundle.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/config/bundle.go b/bundle/config/bundle.go index 21278151f9..6f991e5628 100644 --- a/bundle/config/bundle.go +++ b/bundle/config/bundle.go @@ -42,5 +42,5 @@ type Bundle struct { ComputeID string `json:"compute_id,omitempty"` // Deployment section specifies deployment related configuration for bundle - Deployment Deployment `json:"deployment"` + Deployment Deployment `json:"deployment,omitempty"` } From 5df4c7e134c563bf2ff6c9ac6fbea61511876180 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Wed, 27 Mar 2024 21:43:53 +0530 Subject: [PATCH 14/17] Add allow list for resources when bundle `run_as` is set (#1233) ## Changes This PR introduces an allow list for resource types that are allowed when the run_as for the bundle is not the same as the current deployment user. This PR also adds a test to ensure that any new resources added to DABs will have to add the resource to either the allow list or add an error to fail when run_as identity is not the same as deployment user. ## Tests Unit tests --- bundle/config/mutator/run_as.go | 117 ++++++++--- bundle/config/mutator/run_as_test.go | 188 ++++++++++++++++++ bundle/config/root.go | 11 + .../tests/run_as/{ => allowed}/databricks.yml | 26 ++- .../both_sp_and_user/databricks.yml | 17 ++ .../not_allowed/model_serving/databricks.yml | 15 ++ .../neither_sp_nor_user/databricks.yml | 4 + .../databricks.yml | 8 + .../neither_sp_nor_user_override/override.yml | 4 + .../not_allowed/pipelines/databricks.yml | 25 +++ bundle/tests/run_as_test.go | 176 ++++++++++++++-- 11 files changed, 534 insertions(+), 57 deletions(-) create mode 100644 bundle/config/mutator/run_as_test.go rename bundle/tests/run_as/{ => allowed}/databricks.yml (70%) create mode 100644 bundle/tests/run_as/not_allowed/both_sp_and_user/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/model_serving/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml create mode 100644 bundle/tests/run_as/not_allowed/pipelines/databricks.yml diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index 243f8ef7d9..578591eb14 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -2,20 +2,24 @@ package mutator import ( "context" - "slices" + "fmt" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/jobs" ) type setRunAs struct { } -// SetRunAs mutator is used to go over defined resources such as Jobs and DLT Pipelines -// And set correct execution identity ("run_as" for a job or "is_owner" permission for DLT) -// if top-level "run-as" section is defined in the configuration. +// This mutator does two things: +// +// 1. Sets the run_as field for jobs to the value of the run_as field in the bundle. +// +// 2. Validates that the bundle run_as configuration is valid in the context of the bundle. +// If the run_as user is different from the current deployment user, DABs only +// supports a subset of resources. func SetRunAs() bundle.Mutator { return &setRunAs{} } @@ -24,12 +28,94 @@ func (m *setRunAs) Name() string { return "SetRunAs" } +type errUnsupportedResourceTypeForRunAs struct { + resourceType string + resourceLocation dyn.Location + currentUser string + runAsUser string +} + +// TODO(6 March 2024): Link the docs page describing run_as semantics in the error below +// once the page is ready. +func (e errUnsupportedResourceTypeForRunAs) Error() string { + return fmt.Sprintf("%s are not supported when the current deployment user is different from the bundle's run_as identity. Please deploy as the run_as identity. Location of the unsupported resource: %s. Current identity: %s. Run as identity: %s", e.resourceType, e.resourceLocation, e.currentUser, e.runAsUser) +} + +type errBothSpAndUserSpecified struct { + spName string + spLoc dyn.Location + userName string + userLoc dyn.Location +} + +func (e errBothSpAndUserSpecified) Error() string { + return fmt.Sprintf("run_as section must specify exactly one identity. A service_principal_name %q is specified at %s. A user_name %q is defined at %s", e.spName, e.spLoc, e.userName, e.userLoc) +} + +func validateRunAs(b *bundle.Bundle) error { + runAs := b.Config.RunAs + + // Error if neither service_principal_name nor user_name are specified + if runAs.ServicePrincipalName == "" && runAs.UserName == "" { + return fmt.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as")) + } + + // Error if both service_principal_name and user_name are specified + if runAs.UserName != "" && runAs.ServicePrincipalName != "" { + return errBothSpAndUserSpecified{ + spName: runAs.ServicePrincipalName, + userName: runAs.UserName, + spLoc: b.Config.GetLocation("run_as.service_principal_name"), + userLoc: b.Config.GetLocation("run_as.user_name"), + } + } + + identity := runAs.ServicePrincipalName + if identity == "" { + identity = runAs.UserName + } + + // All resources are supported if the run_as identity is the same as the current deployment identity. + if identity == b.Config.Workspace.CurrentUser.UserName { + return nil + } + + // DLT pipelines do not support run_as in the API. + if len(b.Config.Resources.Pipelines) > 0 { + return errUnsupportedResourceTypeForRunAs{ + resourceType: "pipelines", + resourceLocation: b.Config.GetLocation("resources.pipelines"), + currentUser: b.Config.Workspace.CurrentUser.UserName, + runAsUser: identity, + } + } + + // Model serving endpoints do not support run_as in the API. + if len(b.Config.Resources.ModelServingEndpoints) > 0 { + return errUnsupportedResourceTypeForRunAs{ + resourceType: "model_serving_endpoints", + resourceLocation: b.Config.GetLocation("resources.model_serving_endpoints"), + currentUser: b.Config.Workspace.CurrentUser.UserName, + runAsUser: identity, + } + } + + return nil +} + func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { + // Mutator is a no-op if run_as is not specified in the bundle runAs := b.Config.RunAs if runAs == nil { return nil } + // Assert the run_as configuration is valid in the context of the bundle + if err := validateRunAs(b); err != nil { + return diag.FromErr(err) + } + + // Set run_as for jobs for i := range b.Config.Resources.Jobs { job := b.Config.Resources.Jobs[i] if job.RunAs != nil { @@ -41,26 +127,5 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { } } - me := b.Config.Workspace.CurrentUser.UserName - // If user deploying the bundle and the one defined in run_as are the same - // Do not add IS_OWNER permission. Current user is implied to be an owner in this case. - // Otherwise, it will fail due to this bug https://github.com/databricks/terraform-provider-databricks/issues/2407 - if runAs.UserName == me || runAs.ServicePrincipalName == me { - return nil - } - - for i := range b.Config.Resources.Pipelines { - pipeline := b.Config.Resources.Pipelines[i] - pipeline.Permissions = slices.DeleteFunc(pipeline.Permissions, func(p resources.Permission) bool { - return (runAs.ServicePrincipalName != "" && p.ServicePrincipalName == runAs.ServicePrincipalName) || - (runAs.UserName != "" && p.UserName == runAs.UserName) - }) - pipeline.Permissions = append(pipeline.Permissions, resources.Permission{ - Level: "IS_OWNER", - ServicePrincipalName: runAs.ServicePrincipalName, - UserName: runAs.UserName, - }) - } - return nil } diff --git a/bundle/config/mutator/run_as_test.go b/bundle/config/mutator/run_as_test.go new file mode 100644 index 0000000000..d6fb2939f6 --- /dev/null +++ b/bundle/config/mutator/run_as_test.go @@ -0,0 +1,188 @@ +package mutator + +import ( + "context" + "slices" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func allResourceTypes(t *testing.T) []string { + // Compute supported resource types based on the `Resources{}` struct. + r := config.Resources{} + rv, err := convert.FromTyped(r, dyn.NilValue) + require.NoError(t, err) + normalized, _ := convert.Normalize(r, rv, convert.IncludeMissingFields) + resourceTypes := []string{} + for _, k := range normalized.MustMap().Keys() { + resourceTypes = append(resourceTypes, k.MustString()) + } + slices.Sort(resourceTypes) + + // Assert the total list of resource supported, as a sanity check that using + // the dyn library gives us the correct list of all resources supported. Please + // also update this check when adding a new resource + require.Equal(t, []string{ + "experiments", + "jobs", + "model_serving_endpoints", + "models", + "pipelines", + "registered_models", + }, + resourceTypes, + ) + + return resourceTypes +} + +func TestRunAsWorksForAllowedResources(t *testing.T) { + config := config.Root{ + Workspace: config.Workspace{ + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "alice", + }, + }, + }, + RunAs: &jobs.JobRunAs{ + UserName: "bob", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job_one": { + JobSettings: &jobs.JobSettings{ + Name: "foo", + }, + }, + "job_two": { + JobSettings: &jobs.JobSettings{ + Name: "bar", + }, + }, + "job_three": { + JobSettings: &jobs.JobSettings{ + Name: "baz", + }, + }, + }, + Models: map[string]*resources.MlflowModel{ + "model_one": {}, + }, + RegisteredModels: map[string]*resources.RegisteredModel{ + "registered_model_one": {}, + }, + Experiments: map[string]*resources.MlflowExperiment{ + "experiment_one": {}, + }, + }, + } + + b := &bundle.Bundle{ + Config: config, + } + + diags := bundle.Apply(context.Background(), b, SetRunAs()) + assert.NoError(t, diags.Error()) + + for _, job := range b.Config.Resources.Jobs { + assert.Equal(t, "bob", job.RunAs.UserName) + } +} + +func TestRunAsErrorForUnsupportedResources(t *testing.T) { + // Bundle "run_as" has two modes of operation, each with a different set of + // resources that are supported. + // Cases: + // 1. When the bundle "run_as" identity is same as the current deployment + // identity. In this case all resources are supported. + // 2. When the bundle "run_as" identity is different from the current + // deployment identity. In this case only a subset of resources are + // supported. This subset of resources are defined in the allow list below. + // + // To be a part of the allow list, the resource must satisfy one of the following + // two conditions: + // 1. The resource supports setting a run_as identity to a different user + // from the owner/creator of the resource. For example, jobs. + // 2. Run as semantics do not apply to the resource. We do not plan to add + // platform side support for `run_as` for these resources. For example, + // experiments or registered models. + // + // Any resource that is not on the allow list cannot be used when the bundle + // run_as is different from the current deployment user. "bundle validate" must + // return an error if such a resource has been defined, and the run_as identity + // is different from the current deployment identity. + // + // Action Item: If you are adding a new resource to DABs, please check in with + // the relevant owning team whether the resource should be on the allow list or (implicitly) on + // the deny list. Any resources that could have run_as semantics in the future + // should be on the deny list. + // For example: Teams for pipelines, model serving endpoints or Lakeview dashboards + // are planning to add platform side support for `run_as` for these resources at + // some point in the future. These resources are (implicitly) on the deny list, since + // they are not on the allow list below. + allowList := []string{ + "jobs", + "models", + "registered_models", + "experiments", + } + + base := config.Root{ + Workspace: config.Workspace{ + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "alice", + }, + }, + }, + RunAs: &jobs.JobRunAs{ + UserName: "bob", + }, + } + + v, err := convert.FromTyped(base, dyn.NilValue) + require.NoError(t, err) + + for _, rt := range allResourceTypes(t) { + // Skip allowed resources + if slices.Contains(allowList, rt) { + continue + } + + // Add an instance of the resource type that is not on the allow list to + // the bundle configuration. + nv, err := dyn.SetByPath(v, dyn.NewPath(dyn.Key("resources"), dyn.Key(rt)), dyn.V(map[string]dyn.Value{ + "foo": dyn.V(map[string]dyn.Value{ + "path": dyn.V("bar"), + }), + })) + require.NoError(t, err) + + // Get back typed configuration from the newly created invalid bundle configuration. + r := &config.Root{} + err = convert.ToTyped(r, nv) + require.NoError(t, err) + + // Assert this invalid bundle configuration fails validation. + b := &bundle.Bundle{ + Config: *r, + } + diags := bundle.Apply(context.Background(), b, SetRunAs()) + assert.Equal(t, diags.Error().Error(), errUnsupportedResourceTypeForRunAs{ + resourceType: rt, + resourceLocation: dyn.Location{}, + currentUser: "alice", + runAsUser: "bob", + }.Error(), "expected run_as with a different identity than the current deployment user to not supported for resources of type: %s", rt) + } +} diff --git a/bundle/config/root.go b/bundle/config/root.go index a3dd0d28bb..0e54c04ce4 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -448,3 +448,14 @@ func validateVariableOverrides(root, target dyn.Value) (err error) { return nil } + +// Best effort to get the location of configuration value at the specified path. +// This function is useful to annotate error messages with the location, because +// we don't want to fail with a different error message if we cannot retrieve the location. +func (r *Root) GetLocation(path string) dyn.Location { + v, err := dyn.Get(r.value, path) + if err != nil { + return dyn.Location{} + } + return v.Location() +} diff --git a/bundle/tests/run_as/databricks.yml b/bundle/tests/run_as/allowed/databricks.yml similarity index 70% rename from bundle/tests/run_as/databricks.yml rename to bundle/tests/run_as/allowed/databricks.yml index 1cdc9e44b2..6cb9cd5a49 100644 --- a/bundle/tests/run_as/databricks.yml +++ b/bundle/tests/run_as/allowed/databricks.yml @@ -11,20 +11,6 @@ targets: user_name: "my_user_name" resources: - pipelines: - nyc_taxi_pipeline: - name: "nyc taxi loader" - - permissions: - - level: CAN_VIEW - service_principal_name: my_service_principal - - level: CAN_VIEW - user_name: my_user_name - - libraries: - - notebook: - path: ./dlt/nyc_taxi_loader - jobs: job_one: name: Job One @@ -52,3 +38,15 @@ resources: - task_key: "task_three" notebook_task: notebook_path: "./test.py" + + models: + model_one: + name: "skynet" + + registered_models: + model_two: + name: "skynet (in UC)" + + experiments: + experiment_one: + name: "experiment_one" diff --git a/bundle/tests/run_as/not_allowed/both_sp_and_user/databricks.yml b/bundle/tests/run_as/not_allowed/both_sp_and_user/databricks.yml new file mode 100644 index 0000000000..dfab50e94b --- /dev/null +++ b/bundle/tests/run_as/not_allowed/both_sp_and_user/databricks.yml @@ -0,0 +1,17 @@ +bundle: + name: "run_as" + +# This is not allowed because both service_principal_name and user_name are set +run_as: + service_principal_name: "my_service_principal" + user_name: "my_user_name" + +resources: + jobs: + job_one: + name: Job One + + tasks: + - task_key: "task_one" + notebook_task: + notebook_path: "./test.py" diff --git a/bundle/tests/run_as/not_allowed/model_serving/databricks.yml b/bundle/tests/run_as/not_allowed/model_serving/databricks.yml new file mode 100644 index 0000000000..cdd7e09135 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/model_serving/databricks.yml @@ -0,0 +1,15 @@ +bundle: + name: "run_as" + +run_as: + service_principal_name: "my_service_principal" + +targets: + development: + run_as: + user_name: "my_user_name" + +resources: + model_serving_endpoints: + foo: + name: "skynet" diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml new file mode 100644 index 0000000000..a328fbd8c2 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml @@ -0,0 +1,4 @@ +bundle: + name: "abc" + +run_as: diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml new file mode 100644 index 0000000000..f7c1d728d8 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml @@ -0,0 +1,8 @@ +bundle: + name: "abc" + +run_as: + user_name: "my_user_name" + +include: + - ./override.yml diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml new file mode 100644 index 0000000000..d093e4c95b --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml @@ -0,0 +1,4 @@ +targets: + development: + default: true + run_as: diff --git a/bundle/tests/run_as/not_allowed/pipelines/databricks.yml b/bundle/tests/run_as/not_allowed/pipelines/databricks.yml new file mode 100644 index 0000000000..d59c34ab63 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/pipelines/databricks.yml @@ -0,0 +1,25 @@ +bundle: + name: "run_as" + +run_as: + service_principal_name: "my_service_principal" + +targets: + development: + run_as: + user_name: "my_user_name" + +resources: + pipelines: + nyc_taxi_pipeline: + name: "nyc taxi loader" + + permissions: + - level: CAN_VIEW + service_principal_name: my_service_principal + - level: CAN_VIEW + user_name: my_user_name + + libraries: + - notebook: + path: ./dlt/nyc_taxi_loader diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 321bb5130f..3b9deafe0d 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -2,18 +2,22 @@ package config_tests import ( "context" + "fmt" + "path/filepath" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/libs/diag" + "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/databricks/databricks-sdk-go/service/ml" "github.com/stretchr/testify/assert" ) -func TestRunAsDefault(t *testing.T) { - b := load(t, "./run_as") +func TestRunAsForAllowed(t *testing.T) { + b := load(t, "./run_as/allowed") ctx := context.Background() bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { @@ -31,6 +35,7 @@ func TestRunAsDefault(t *testing.T) { assert.Len(t, b.Config.Resources.Jobs, 3) jobs := b.Config.Resources.Jobs + // job_one and job_two should have the same run_as identity as the bundle. assert.NotNil(t, jobs["job_one"].RunAs) assert.Equal(t, "my_service_principal", jobs["job_one"].RunAs.ServicePrincipalName) assert.Equal(t, "", jobs["job_one"].RunAs.UserName) @@ -39,21 +44,19 @@ func TestRunAsDefault(t *testing.T) { assert.Equal(t, "my_service_principal", jobs["job_two"].RunAs.ServicePrincipalName) assert.Equal(t, "", jobs["job_two"].RunAs.UserName) + // job_three should retain the job level run_as identity. assert.NotNil(t, jobs["job_three"].RunAs) assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) assert.Equal(t, "", jobs["job_three"].RunAs.UserName) - pipelines := b.Config.Resources.Pipelines - assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) - assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level) - assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[0].UserName) - - assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level) - assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName) + // Assert other resources are not affected. + assert.Equal(t, ml.Model{Name: "skynet"}, *b.Config.Resources.Models["model_one"].Model) + assert.Equal(t, catalog.CreateRegisteredModelRequest{Name: "skynet (in UC)"}, *b.Config.Resources.RegisteredModels["model_two"].CreateRegisteredModelRequest) + assert.Equal(t, ml.Experiment{Name: "experiment_one"}, *b.Config.Resources.Experiments["experiment_one"].Experiment) } -func TestRunAsDevelopment(t *testing.T) { - b := loadTarget(t, "./run_as", "development") +func TestRunAsForAllowedWithTargetOverride(t *testing.T) { + b := loadTarget(t, "./run_as/allowed", "development") ctx := context.Background() bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { @@ -71,6 +74,8 @@ func TestRunAsDevelopment(t *testing.T) { assert.Len(t, b.Config.Resources.Jobs, 3) jobs := b.Config.Resources.Jobs + // job_one and job_two should have the same run_as identity as the bundle's + // development target. assert.NotNil(t, jobs["job_one"].RunAs) assert.Equal(t, "", jobs["job_one"].RunAs.ServicePrincipalName) assert.Equal(t, "my_user_name", jobs["job_one"].RunAs.UserName) @@ -79,15 +84,152 @@ func TestRunAsDevelopment(t *testing.T) { assert.Equal(t, "", jobs["job_two"].RunAs.ServicePrincipalName) assert.Equal(t, "my_user_name", jobs["job_two"].RunAs.UserName) + // job_three should retain the job level run_as identity. assert.NotNil(t, jobs["job_three"].RunAs) assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) assert.Equal(t, "", jobs["job_three"].RunAs.UserName) - pipelines := b.Config.Resources.Pipelines - assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) - assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level) - assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName) + // Assert other resources are not affected. + assert.Equal(t, ml.Model{Name: "skynet"}, *b.Config.Resources.Models["model_one"].Model) + assert.Equal(t, catalog.CreateRegisteredModelRequest{Name: "skynet (in UC)"}, *b.Config.Resources.RegisteredModels["model_two"].CreateRegisteredModelRequest) + assert.Equal(t, ml.Experiment{Name: "experiment_one"}, *b.Config.Resources.Experiments["experiment_one"].Experiment) + +} + +func TestRunAsErrorForPipelines(t *testing.T) { + b := load(t, "./run_as/not_allowed/pipelines") + + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() + + configPath := filepath.FromSlash("run_as/not_allowed/pipelines/databricks.yml") + assert.EqualError(t, err, fmt.Sprintf("pipelines are not supported when the current deployment user is different from the bundle's run_as identity. Please deploy as the run_as identity. Location of the unsupported resource: %s:14:5. Current identity: jane@doe.com. Run as identity: my_service_principal", configPath)) +} + +func TestRunAsNoErrorForPipelines(t *testing.T) { + b := load(t, "./run_as/not_allowed/pipelines") + + // We should not error because the pipeline is being deployed with the same + // identity as the bundle run_as identity. + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, diags.Error()) +} + +func TestRunAsErrorForModelServing(t *testing.T) { + b := load(t, "./run_as/not_allowed/model_serving") + + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() + + configPath := filepath.FromSlash("run_as/not_allowed/model_serving/databricks.yml") + assert.EqualError(t, err, fmt.Sprintf("model_serving_endpoints are not supported when the current deployment user is different from the bundle's run_as identity. Please deploy as the run_as identity. Location of the unsupported resource: %s:14:5. Current identity: jane@doe.com. Run as identity: my_service_principal", configPath)) +} + +func TestRunAsNoErrorForModelServingEndpoints(t *testing.T) { + b := load(t, "./run_as/not_allowed/model_serving") + + // We should not error because the model serving endpoint is being deployed + // with the same identity as the bundle run_as identity. + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, diags.Error()) +} + +func TestRunAsErrorWhenBothUserAndSpSpecified(t *testing.T) { + b := load(t, "./run_as/not_allowed/both_sp_and_user") + + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() + + configPath := filepath.FromSlash("run_as/not_allowed/both_sp_and_user/databricks.yml") + assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. A service_principal_name \"my_service_principal\" is specified at %s:6:27. A user_name \"my_user_name\" is defined at %s:7:14", configPath, configPath)) +} + +func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) { + b := load(t, "./run_as/not_allowed/neither_sp_nor_user") + + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() + + configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/databricks.yml") + assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", configPath)) +} + +func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) { + b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user_override", "development") + + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() - assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level) - assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[1].UserName) + configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user_override/override.yml") + assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:12", configPath)) } From b21e3c81cdcd7462b51139b86193d367932410f4 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 28 Mar 2024 11:32:34 +0100 Subject: [PATCH 15/17] Make bundle loaders return diagnostics (#1319) ## Changes The function signature of Cobra's `PreRunE` function has an `error` return value. We'd like to start returning `diag.Diagnostics` after loading a bundle, so this is incompatible. This change modifies all usage of `PreRunE` to load a bundle to inline function calls in the command's `RunE` function. ## Tests * Unit tests pass. * Integration tests pass. --- cmd/bundle/deploy.go | 14 +-- cmd/bundle/deployment/bind.go | 15 +-- cmd/bundle/deployment/unbind.go | 15 +-- cmd/bundle/destroy.go | 14 +-- cmd/bundle/generate.go | 8 +- cmd/bundle/generate/job.go | 13 +-- cmd/bundle/generate/pipeline.go | 13 +-- cmd/bundle/launch.go | 2 - cmd/bundle/run.go | 19 ++-- cmd/bundle/summary.go | 21 ++-- cmd/bundle/sync.go | 11 +- cmd/bundle/test.go | 3 - cmd/bundle/utils/utils.go | 29 ++++-- cmd/bundle/validate.go | 15 +-- cmd/labs/project/entrypoint.go | 7 +- cmd/root/auth.go | 7 +- cmd/root/bundle.go | 119 +++++++++++---------- cmd/root/bundle_test.go | 179 ++++++++++++++++++++++---------- 18 files changed, 305 insertions(+), 199 deletions(-) diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 8b8cb9f2ec..919b15a723 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -13,10 +13,9 @@ import ( func newDeployCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "deploy", - Short: "Deploy bundle", - Args: root.NoArgs, - PreRunE: utils.ConfigureBundleWithVariables, + Use: "deploy", + Short: "Deploy bundle", + Args: root.NoArgs, } var force bool @@ -30,7 +29,10 @@ func newDeployCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics { b.Config.Bundle.Force = force @@ -46,7 +48,7 @@ func newDeployCommand() *cobra.Command { return nil }) - diags := bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Build(), phases.Deploy(), diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 11c560b12a..71f441d3db 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -16,10 +16,9 @@ import ( func newBindCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "bind KEY RESOURCE_ID", - Short: "Bind bundle-defined resources to existing resources", - Args: root.ExactArgs(2), - PreRunE: utils.ConfigureBundleWithVariables, + Use: "bind KEY RESOURCE_ID", + Short: "Bind bundle-defined resources to existing resources", + Args: root.ExactArgs(2), } var autoApprove bool @@ -29,7 +28,11 @@ func newBindCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } + resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) if err != nil { return err @@ -50,7 +53,7 @@ func newBindCommand() *cobra.Command { return nil }) - diags := bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ AutoApprove: autoApprove, diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index 76727877f8..9de5285a52 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -13,10 +13,9 @@ import ( func newUnbindCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "unbind KEY", - Short: "Unbind bundle-defined resources from its managed remote resource", - Args: root.ExactArgs(1), - PreRunE: utils.ConfigureBundleWithVariables, + Use: "unbind KEY", + Short: "Unbind bundle-defined resources from its managed remote resource", + Args: root.ExactArgs(1), } var forceLock bool @@ -24,7 +23,11 @@ func newUnbindCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } + resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) if err != nil { return err @@ -35,7 +38,7 @@ func newUnbindCommand() *cobra.Command { return nil }) - diags := bundle.Apply(cmd.Context(), b, bundle.Seq( + diags = bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Unbind(resource.TerraformResourceName(), args[0]), )) diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index 38b717713d..cd7e630626 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -18,10 +18,9 @@ import ( func newDestroyCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "destroy", - Short: "Destroy deployed bundle resources", - Args: root.NoArgs, - PreRunE: utils.ConfigureBundleWithVariables, + Use: "destroy", + Short: "Destroy deployed bundle resources", + Args: root.NoArgs, } var autoApprove bool @@ -31,7 +30,10 @@ func newDestroyCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // If `--force-lock` is specified, force acquisition of the deployment lock. @@ -58,7 +60,7 @@ func newDestroyCommand() *cobra.Command { return fmt.Errorf("please specify --auto-approve since selected logging format is json") } - diags := bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Build(), phases.Destroy(), diff --git a/cmd/bundle/generate.go b/cmd/bundle/generate.go index 6c48b15866..1e3d56e430 100644 --- a/cmd/bundle/generate.go +++ b/cmd/bundle/generate.go @@ -2,7 +2,6 @@ package bundle import ( "github.com/databricks/cli/cmd/bundle/generate" - "github.com/databricks/cli/cmd/bundle/utils" "github.com/spf13/cobra" ) @@ -10,10 +9,9 @@ func newGenerateCommand() *cobra.Command { var key string cmd := &cobra.Command{ - Use: "generate", - Short: "Generate bundle configuration", - Long: "Generate bundle configuration", - PreRunE: utils.ConfigureBundleWithVariables, + Use: "generate", + Short: "Generate bundle configuration", + Long: "Generate bundle configuration", } cmd.AddCommand(generate.NewGenerateJobCommand()) diff --git a/cmd/bundle/generate/job.go b/cmd/bundle/generate/job.go index c5a94a8f6f..99bc616604 100644 --- a/cmd/bundle/generate/job.go +++ b/cmd/bundle/generate/job.go @@ -5,7 +5,6 @@ import ( "os" "path/filepath" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/generate" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" @@ -24,9 +23,8 @@ func NewGenerateJobCommand() *cobra.Command { var force bool cmd := &cobra.Command{ - Use: "job", - Short: "Generate bundle configuration for a job", - PreRunE: root.MustConfigureBundle, + Use: "job", + Short: "Generate bundle configuration for a job", } cmd.Flags().Int64Var(&jobId, "existing-job-id", 0, `Job ID of the job to generate config for`) @@ -43,9 +41,12 @@ func NewGenerateJobCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) - w := b.WorkspaceClient() + b, diags := root.MustConfigureBundle(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } + w := b.WorkspaceClient() job, err := w.Jobs.Get(ctx, jobs.GetJobRequest{JobId: jobId}) if err != nil { return err diff --git a/cmd/bundle/generate/pipeline.go b/cmd/bundle/generate/pipeline.go index 4c5fcf4255..bd973fe0b3 100644 --- a/cmd/bundle/generate/pipeline.go +++ b/cmd/bundle/generate/pipeline.go @@ -5,7 +5,6 @@ import ( "os" "path/filepath" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/generate" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" @@ -24,9 +23,8 @@ func NewGeneratePipelineCommand() *cobra.Command { var force bool cmd := &cobra.Command{ - Use: "pipeline", - Short: "Generate bundle configuration for a pipeline", - PreRunE: root.MustConfigureBundle, + Use: "pipeline", + Short: "Generate bundle configuration for a pipeline", } cmd.Flags().StringVar(&pipelineId, "existing-pipeline-id", "", `ID of the pipeline to generate config for`) @@ -43,9 +41,12 @@ func NewGeneratePipelineCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) - w := b.WorkspaceClient() + b, diags := root.MustConfigureBundle(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } + w := b.WorkspaceClient() pipeline, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{PipelineId: pipelineId}) if err != nil { return err diff --git a/cmd/bundle/launch.go b/cmd/bundle/launch.go index f376ebdae6..0d2b4233b3 100644 --- a/cmd/bundle/launch.go +++ b/cmd/bundle/launch.go @@ -16,8 +16,6 @@ func newLaunchCommand() *cobra.Command { // We're not ready to expose this command until we specify its semantics. Hidden: true, - - PreRunE: root.MustConfigureBundle, } cmd.RunE = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 87ea8610cc..e6a8e1ba4a 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -17,10 +17,9 @@ import ( func newRunCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "run [flags] KEY", - Short: "Run a resource (e.g. a job or a pipeline)", - Args: root.MaximumNArgs(1), - PreRunE: utils.ConfigureBundleWithVariables, + Use: "run [flags] KEY", + Short: "Run a resource (e.g. a job or a pipeline)", + Args: root.MaximumNArgs(1), } var runOptions run.Options @@ -33,9 +32,12 @@ func newRunCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() - b := bundle.Get(ctx) + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } - diags := bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), terraform.Interpolate(), terraform.Write(), @@ -109,15 +111,14 @@ func newRunCommand() *cobra.Command { return nil, cobra.ShellCompDirectiveNoFileComp } - err := root.MustConfigureBundle(cmd, args) - if err != nil { + b, diags := root.MustConfigureBundle(cmd) + if err := diags.Error(); err != nil { cobra.CompErrorln(err.Error()) return nil, cobra.ShellCompDirectiveError } // No completion in the context of a bundle. // Source and destination paths are taken from bundle configuration. - b := bundle.GetOrNil(cmd.Context()) if b == nil { return nil, cobra.ShellCompDirectiveNoFileComp } diff --git a/cmd/bundle/summary.go b/cmd/bundle/summary.go index a28ceede97..5a64b46c0a 100644 --- a/cmd/bundle/summary.go +++ b/cmd/bundle/summary.go @@ -18,10 +18,9 @@ import ( func newSummaryCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "summary", - Short: "Describe the bundle resources and their deployment states", - Args: root.NoArgs, - PreRunE: utils.ConfigureBundleWithVariables, + Use: "summary", + Short: "Describe the bundle resources and their deployment states", + Args: root.NoArgs, // This command is currently intended for the Databricks VSCode extension only Hidden: true, @@ -31,14 +30,18 @@ func newSummaryCommand() *cobra.Command { cmd.Flags().BoolVar(&forcePull, "force-pull", false, "Skip local cache and load the state from the remote workspace") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + ctx := cmd.Context() + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } - diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + diags = bundle.Apply(ctx, b, phases.Initialize()) if err := diags.Error(); err != nil { return err } - cacheDir, err := terraform.Dir(cmd.Context(), b) + cacheDir, err := terraform.Dir(ctx, b) if err != nil { return err } @@ -47,7 +50,7 @@ func newSummaryCommand() *cobra.Command { noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) if forcePull || noCache { - diags = bundle.Apply(cmd.Context(), b, bundle.Seq( + diags = bundle.Apply(ctx, b, bundle.Seq( terraform.StatePull(), terraform.Interpolate(), terraform.Write(), @@ -57,7 +60,7 @@ func newSummaryCommand() *cobra.Command { } } - diags = bundle.Apply(cmd.Context(), b, terraform.Load()) + diags = bundle.Apply(ctx, b, terraform.Load()) if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 0b7f9b3a90..0818aecf73 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -36,8 +36,6 @@ func newSyncCommand() *cobra.Command { Use: "sync [flags]", Short: "Synchronize bundle tree to the workspace", Args: root.NoArgs, - - PreRunE: utils.ConfigureBundleWithVariables, } var f syncFlags @@ -46,10 +44,14 @@ func newSyncCommand() *cobra.Command { cmd.Flags().BoolVar(&f.watch, "watch", false, "watch local file system for changes") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + ctx := cmd.Context() + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } // Run initialize phase to make sure paths are set. - diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + diags = bundle.Apply(ctx, b, phases.Initialize()) if err := diags.Error(); err != nil { return err } @@ -59,7 +61,6 @@ func newSyncCommand() *cobra.Command { return err } - ctx := cmd.Context() s, err := sync.New(ctx, *opts) if err != nil { return err diff --git a/cmd/bundle/test.go b/cmd/bundle/test.go index ea1a4b716e..4d30e727d5 100644 --- a/cmd/bundle/test.go +++ b/cmd/bundle/test.go @@ -3,7 +3,6 @@ package bundle import ( "fmt" - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) @@ -15,8 +14,6 @@ func newTestCommand() *cobra.Command { // We're not ready to expose this command until we specify its semantics. Hidden: true, - - PreRunE: root.MustConfigureBundle, } cmd.RunE = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index e53a40b9d6..d585c62204 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -9,23 +9,30 @@ import ( "github.com/spf13/cobra" ) -func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { +func configureVariables(cmd *cobra.Command, b *bundle.Bundle, variables []string) diag.Diagnostics { + return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.InitializeVariables(variables) + return diag.FromErr(err) + }) +} + +func ConfigureBundleWithVariables(cmd *cobra.Command) (*bundle.Bundle, diag.Diagnostics) { // Load bundle config and apply target - err := root.MustConfigureBundle(cmd, args) - if err != nil { - return err + b, diags := root.MustConfigureBundle(cmd) + if diags.HasError() { + return nil, diags } variables, err := cmd.Flags().GetStringSlice("var") if err != nil { - return err + return nil, diag.FromErr(err) } // Initialize variables by assigning them values passed as command line flags - b := bundle.Get(cmd.Context()) - diags := bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - err := b.Config.InitializeVariables(variables) - return diag.FromErr(err) - }) - return diags.Error() + diags = diags.Extend(configureVariables(cmd, b, variables)) + if diags.HasError() { + return nil, diags + } + + return b, diags } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 42686b3284..57bf6f7b9b 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -13,16 +13,19 @@ import ( func newValidateCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "validate", - Short: "Validate configuration", - Args: root.NoArgs, - PreRunE: utils.ConfigureBundleWithVariables, + Use: "validate", + Short: "Validate configuration", + Args: root.NoArgs, } cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + ctx := cmd.Context() + b, diags := utils.ConfigureBundleWithVariables(cmd) + if err := diags.Error(); err != nil { + return diags.Error() + } - diags := bundle.Apply(cmd.Context(), b, phases.Initialize()) + diags = bundle.Apply(ctx, b, phases.Initialize()) if err := diags.Error(); err != nil { return err } diff --git a/cmd/labs/project/entrypoint.go b/cmd/labs/project/entrypoint.go index 96f46d4b5c..99edf83c8c 100644 --- a/cmd/labs/project/entrypoint.go +++ b/cmd/labs/project/entrypoint.go @@ -10,7 +10,6 @@ import ( "path/filepath" "strings" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" @@ -203,11 +202,11 @@ func (e *Entrypoint) getLoginConfig(cmd *cobra.Command) (*loginConfig, *config.C return lc, cfg, nil } if e.IsBundleAware { - err = root.TryConfigureBundle(cmd, []string{}) - if err != nil { + b, diags := root.TryConfigureBundle(cmd) + if err := diags.Error(); err != nil { return nil, nil, fmt.Errorf("bundle: %w", err) } - if b := bundle.GetOrNil(cmd.Context()); b != nil { + if b != nil { log.Infof(ctx, "Using login configuration from Databricks Asset Bundle") return &loginConfig{}, b.WorkspaceClient().Config, nil } diff --git a/cmd/root/auth.go b/cmd/root/auth.go index 89c7641c54..0edfaaa838 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -6,7 +6,6 @@ import ( "fmt" "net/http" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go" @@ -149,11 +148,11 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error { // Try to load a bundle configuration if we're allowed to by the caller (see `./auth_options.go`). if !shouldSkipLoadBundle(cmd.Context()) { - err := TryConfigureBundle(cmd, args) - if err != nil { + b, diags := TryConfigureBundle(cmd) + if err := diags.Error(); err != nil { return err } - if b := bundle.GetOrNil(cmd.Context()); b != nil { + if b != nil { client, err := b.InitializeWorkspaceClient() if err != nil { return err diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 6a6aeb4d2f..4ed89c57b8 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -4,8 +4,8 @@ import ( "context" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/env" + "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/libs/diag" envlib "github.com/databricks/cli/libs/env" "github.com/spf13/cobra" @@ -50,87 +50,100 @@ func getProfile(cmd *cobra.Command) (value string) { return envlib.Get(cmd.Context(), "DATABRICKS_CONFIG_PROFILE") } -// loadBundle loads the bundle configuration and applies default mutators. -func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) (*bundle.Bundle, error) { - ctx := cmd.Context() - b, err := load(ctx) - if err != nil { - return nil, err - } - - // No bundle is fine in case of `TryConfigureBundle`. - if b == nil { - return nil, nil - } - +// configureProfile applies the profile flag to the bundle. +func configureProfile(cmd *cobra.Command, b *bundle.Bundle) diag.Diagnostics { profile := getProfile(cmd) - if profile != "" { - diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - b.Config.Workspace.Profile = profile - return nil - }) - if err := diags.Error(); err != nil { - return nil, err - } - } - - diags := bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) - if err := diags.Error(); err != nil { - return nil, err - } - - return b, nil -} - -// configureBundle loads the bundle configuration and configures it on the command's context. -func configureBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) error { - b, err := loadBundle(cmd, args, load) - if err != nil { - return err + if profile == "" { + return nil } - // No bundle is fine in case of `TryConfigureBundle`. - if b == nil { + return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.Profile = profile return nil - } + }) +} +// configureBundle loads the bundle configuration and configures flag values, if any. +func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) { var m bundle.Mutator - env := getTarget(cmd) - if env == "" { - m = mutator.SelectDefaultTarget() + if target := getTarget(cmd); target == "" { + m = phases.LoadDefaultTarget() } else { - m = mutator.SelectTarget(env) + m = phases.LoadNamedTarget(target) } + // Load bundle and select target. ctx := cmd.Context() diags := bundle.Apply(ctx, b, m) - if err := diags.Error(); err != nil { - return err + if diags.HasError() { + return nil, diags } - cmd.SetContext(bundle.Context(ctx, b)) - return nil + // Configure the workspace profile if the flag has been set. + diags = diags.Extend(configureProfile(cmd, b)) + if diags.HasError() { + return nil, diags + } + + return b, diags } // MustConfigureBundle configures a bundle on the command context. -func MustConfigureBundle(cmd *cobra.Command, args []string) error { - return configureBundle(cmd, args, bundle.MustLoad) +func MustConfigureBundle(cmd *cobra.Command) (*bundle.Bundle, diag.Diagnostics) { + // A bundle may be configured on the context when testing. + // If it is, return it immediately. + b := bundle.GetOrNil(cmd.Context()) + if b != nil { + return b, nil + } + + b, err := bundle.MustLoad(cmd.Context()) + if err != nil { + return nil, diag.FromErr(err) + } + + return configureBundle(cmd, b) } // TryConfigureBundle configures a bundle on the command context // if there is one, but doesn't fail if there isn't one. -func TryConfigureBundle(cmd *cobra.Command, args []string) error { - return configureBundle(cmd, args, bundle.TryLoad) +func TryConfigureBundle(cmd *cobra.Command) (*bundle.Bundle, diag.Diagnostics) { + // A bundle may be configured on the context when testing. + // If it is, return it immediately. + b := bundle.GetOrNil(cmd.Context()) + if b != nil { + return b, nil + } + + b, err := bundle.TryLoad(cmd.Context()) + if err != nil { + return nil, diag.FromErr(err) + } + + // No bundle is fine in this case. + if b == nil { + return nil, nil + } + + return configureBundle(cmd, b) } // targetCompletion executes to autocomplete the argument to the target flag. func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - b, err := loadBundle(cmd, args, bundle.MustLoad) + ctx := cmd.Context() + b, err := bundle.MustLoad(ctx) if err != nil { cobra.CompErrorln(err.Error()) return nil, cobra.ShellCompDirectiveError } + // Load bundle but don't select a target (we're completing those). + diags := bundle.Apply(ctx, b, phases.Load()) + if err := diags.Error(); err != nil { + cobra.CompErrorln(err.Error()) + return nil, cobra.ShellCompDirectiveError + } + return maps.Keys(b.Config.Targets), cobra.ShellCompDirectiveDefault } diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 97412ff69f..3018842870 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -2,16 +2,17 @@ package root import ( "context" + "fmt" "os" "path/filepath" "runtime" "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/internal/testutil" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func setupDatabricksCfg(t *testing.T) { @@ -37,47 +38,61 @@ func emptyCommand(t *testing.T) *cobra.Command { return cmd } -func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { +func setupWithHost(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { setupDatabricksCfg(t) rootPath := t.TempDir() - testutil.Touch(t, rootPath, "databricks.yml") - - err := configureBundle(cmd, []string{"validate"}, func(_ context.Context) (*bundle.Bundle, error) { - return &bundle.Bundle{ - RootPath: rootPath, - Config: config.Root{ - Bundle: config.Bundle{ - Name: "test", - }, - Workspace: config.Workspace{ - Host: host, - }, - }, - }, nil - }) - assert.NoError(t, err) - return bundle.Get(cmd.Context()) + testutil.Chdir(t, rootPath) + + contents := fmt.Sprintf(` +workspace: + host: %q +`, host) + err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0644) + require.NoError(t, err) + + b, diags := MustConfigureBundle(cmd) + require.NoError(t, diags.Error()) + return b +} + +func setupWithProfile(t *testing.T, cmd *cobra.Command, profile string) *bundle.Bundle { + setupDatabricksCfg(t) + + rootPath := t.TempDir() + testutil.Chdir(t, rootPath) + + contents := fmt.Sprintf(` +workspace: + profile: %q +`, profile) + err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0644) + require.NoError(t, err) + + b, diags := MustConfigureBundle(cmd) + require.NoError(t, diags.Error()) + return b } func TestBundleConfigureDefault(t *testing.T) { testutil.CleanupEnvironment(t) cmd := emptyCommand(t) - b := setup(t, cmd, "https://x.com") - assert.NotPanics(t, func() { - b.WorkspaceClient() - }) + b := setupWithHost(t, cmd, "https://x.com") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://x.com", client.Config.Host) } func TestBundleConfigureWithMultipleMatches(t *testing.T) { testutil.CleanupEnvironment(t) cmd := emptyCommand(t) - b := setup(t, cmd, "https://a.com") - assert.Panics(t, func() { - b.WorkspaceClient() - }) + b := setupWithHost(t, cmd, "https://a.com") + + _, err := b.InitializeWorkspaceClient() + assert.ErrorContains(t, err, "multiple profiles matched: PROFILE-1, PROFILE-2") } func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { @@ -85,11 +100,10 @@ func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("NOEXIST") + b := setupWithHost(t, cmd, "https://x.com") - b := setup(t, cmd, "https://x.com") - assert.Panics(t, func() { - b.WorkspaceClient() - }) + _, err := b.InitializeWorkspaceClient() + assert.ErrorContains(t, err, "has no NOEXIST profile configured") } func TestBundleConfigureWithMismatchedProfile(t *testing.T) { @@ -97,11 +111,10 @@ func TestBundleConfigureWithMismatchedProfile(t *testing.T) { cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") + b := setupWithHost(t, cmd, "https://x.com") - b := setup(t, cmd, "https://x.com") - assert.PanicsWithError(t, "cannot resolve bundle auth configuration: config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { - b.WorkspaceClient() - }) + _, err := b.InitializeWorkspaceClient() + assert.ErrorContains(t, err, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com") } func TestBundleConfigureWithCorrectProfile(t *testing.T) { @@ -109,35 +122,97 @@ func TestBundleConfigureWithCorrectProfile(t *testing.T) { cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") + b := setupWithHost(t, cmd, "https://a.com") - b := setup(t, cmd, "https://a.com") - assert.NotPanics(t, func() { - b.WorkspaceClient() - }) + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "PROFILE-1", client.Config.Profile) } func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) { testutil.CleanupEnvironment(t) - t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1") + t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1") cmd := emptyCommand(t) - b := setup(t, cmd, "https://x.com") - assert.PanicsWithError(t, "cannot resolve bundle auth configuration: config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { - b.WorkspaceClient() - }) + b := setupWithHost(t, cmd, "https://x.com") + + _, err := b.InitializeWorkspaceClient() + assert.ErrorContains(t, err, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com") } func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { testutil.CleanupEnvironment(t) - t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") + t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") + b := setupWithHost(t, cmd, "https://a.com") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "PROFILE-1", client.Config.Profile) +} + +func TestBundleConfigureProfileDefault(t *testing.T) { + testutil.CleanupEnvironment(t) + + // The profile in the databricks.yml file is used + cmd := emptyCommand(t) + b := setupWithProfile(t, cmd, "PROFILE-1") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "a", client.Config.Token) + assert.Equal(t, "PROFILE-1", client.Config.Profile) +} + +func TestBundleConfigureProfileFlag(t *testing.T) { + testutil.CleanupEnvironment(t) + + // The --profile flag takes precedence over the profile in the databricks.yml file + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-2") + b := setupWithProfile(t, cmd, "PROFILE-1") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "b", client.Config.Token) + assert.Equal(t, "PROFILE-2", client.Config.Profile) +} + +func TestBundleConfigureProfileEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) + + // The DATABRICKS_CONFIG_PROFILE environment variable takes precedence over the profile in the databricks.yml file + t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-2") + cmd := emptyCommand(t) + b := setupWithProfile(t, cmd, "PROFILE-1") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "b", client.Config.Token) + assert.Equal(t, "PROFILE-2", client.Config.Profile) +} - b := setup(t, cmd, "https://a.com") - assert.NotPanics(t, func() { - b.WorkspaceClient() - }) +func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) + + // The --profile flag takes precedence over the DATABRICKS_CONFIG_PROFILE environment variable + t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-2") + b := setupWithProfile(t, cmd, "PROFILE-1") + + client, err := b.InitializeWorkspaceClient() + require.NoError(t, err) + assert.Equal(t, "https://a.com", client.Config.Host) + assert.Equal(t, "b", client.Config.Token) + assert.Equal(t, "PROFILE-2", client.Config.Profile) } func TestTargetFlagFull(t *testing.T) { @@ -149,7 +224,7 @@ func TestTargetFlagFull(t *testing.T) { err := cmd.ExecuteContext(ctx) assert.NoError(t, err) - assert.Equal(t, getTarget(cmd), "development") + assert.Equal(t, "development", getTarget(cmd)) } func TestTargetFlagShort(t *testing.T) { @@ -161,7 +236,7 @@ func TestTargetFlagShort(t *testing.T) { err := cmd.ExecuteContext(ctx) assert.NoError(t, err) - assert.Equal(t, getTarget(cmd), "production") + assert.Equal(t, "production", getTarget(cmd)) } // TODO: remove when environment flag is fully deprecated @@ -175,5 +250,5 @@ func TestTargetEnvironmentFlag(t *testing.T) { err := cmd.ExecuteContext(ctx) assert.NoError(t, err) - assert.Equal(t, getTarget(cmd), "development") + assert.Equal(t, "development", getTarget(cmd)) } From eea34b25040751862d6b720fa43f9e962970978d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 28 Mar 2024 11:59:03 +0100 Subject: [PATCH 16/17] Return diagnostics from `config.Load` (#1324) ## Changes We no longer need to store load diagnostics on the `config.Root` type itself and instead can return them from the `config.Load` call directly. It is up to the caller of this function to append them to previous diagnostics, if any. Background: previous commits moved configuration loading of the entry point into a mutator, so now all diagnostics naturally flow from applying mutators. This PR depends on #1319. ## Tests Unit and manual validation of the debug statements in the validate command. --- bundle/config/loader/entry_point.go | 12 +++++----- bundle/config/loader/process_include.go | 12 +++++----- bundle/config/root.go | 30 ++++++++----------------- bundle/config/root_test.go | 18 +++++++-------- cmd/bundle/validate.go | 2 +- 5 files changed, 33 insertions(+), 41 deletions(-) diff --git a/bundle/config/loader/entry_point.go b/bundle/config/loader/entry_point.go index 24ba2f068e..2c73a58255 100644 --- a/bundle/config/loader/entry_point.go +++ b/bundle/config/loader/entry_point.go @@ -24,11 +24,13 @@ func (m *entryPoint) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics if err != nil { return diag.FromErr(err) } - this, err := config.Load(path) - if err != nil { - return diag.FromErr(err) + this, diags := config.Load(path) + if diags.HasError() { + return diags } - // TODO: Return actual warnings. err = b.Config.Merge(this) - return diag.FromErr(err) + if err != nil { + diags = diags.Extend(diag.FromErr(err)) + } + return diags } diff --git a/bundle/config/loader/process_include.go b/bundle/config/loader/process_include.go index 328f4eacf3..7cf9a17d77 100644 --- a/bundle/config/loader/process_include.go +++ b/bundle/config/loader/process_include.go @@ -27,11 +27,13 @@ func (m *processInclude) Name() string { } func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { - this, err := config.Load(m.fullPath) + this, diags := config.Load(m.fullPath) + if diags.HasError() { + return diags + } + err := b.Config.Merge(this) if err != nil { - return diag.FromErr(err) + diags = diags.Extend(diag.FromErr(err)) } - // TODO: Return actual warnings. - err = b.Config.Merge(this) - return diag.FromErr(err) + return diags } diff --git a/bundle/config/root.go b/bundle/config/root.go index 0e54c04ce4..18b548d643 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -20,7 +20,6 @@ import ( type Root struct { value dyn.Value - diags diag.Diagnostics depth int // Contains user defined variables @@ -69,10 +68,10 @@ type Root struct { } // Load loads the bundle configuration file at the specified path. -func Load(path string) (*Root, error) { +func Load(path string) (*Root, diag.Diagnostics) { raw, err := os.ReadFile(path) if err != nil { - return nil, err + return nil, diag.FromErr(err) } r := Root{} @@ -80,31 +79,29 @@ func Load(path string) (*Root, error) { // Load configuration tree from YAML. v, err := yamlloader.LoadYAML(path, bytes.NewBuffer(raw)) if err != nil { - return nil, fmt.Errorf("failed to load %s: %w", path, err) + return nil, diag.Errorf("failed to load %s: %v", path, err) } // Rewrite configuration tree where necessary. v, err = rewriteShorthands(v) if err != nil { - return nil, fmt.Errorf("failed to rewrite %s: %w", path, err) + return nil, diag.Errorf("failed to rewrite %s: %v", path, err) } // Normalize dynamic configuration tree according to configuration type. v, diags := convert.Normalize(r, v) - // Keep track of diagnostics (warnings and errors in the schema). - // We delay acting on diagnostics until we have loaded all - // configuration files and merged them together. - r.diags = diags - // Convert normalized configuration tree to typed configuration. err = r.updateWithDynamicValue(v) if err != nil { - return nil, fmt.Errorf("failed to load %s: %w", path, err) + return nil, diag.Errorf("failed to load %s: %v", path, err) } _, err = r.Resources.VerifyUniqueResourceIdentifiers() - return &r, err + if err != nil { + diags = diags.Extend(diag.FromErr(err)) + } + return &r, diags } func (r *Root) initializeDynamicValue() error { @@ -126,11 +123,9 @@ func (r *Root) initializeDynamicValue() error { func (r *Root) updateWithDynamicValue(nv dyn.Value) error { // Hack: restore state; it may be cleared by [ToTyped] if // the configuration equals nil (happens in tests). - diags := r.diags depth := r.depth defer func() { - r.diags = diags r.depth = depth }() @@ -224,10 +219,6 @@ func (r *Root) MarkMutatorExit(ctx context.Context) error { return nil } -func (r *Root) Diagnostics() diag.Diagnostics { - return r.diags -} - // SetConfigFilePath configures the path that its configuration // was loaded from in configuration leafs that require it. func (r *Root) ConfigureConfigFilePath() { @@ -261,9 +252,6 @@ func (r *Root) InitializeVariables(vars []string) error { } func (r *Root) Merge(other *Root) error { - // Merge diagnostics. - r.diags = append(r.diags, other.diags...) - // Check for safe merge, protecting against duplicate resource identifiers err := r.Resources.VerifySafeMerge(&other.Resources) if err != nil { diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 3b25fb1f8e..b567688480 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -25,24 +25,24 @@ func TestRootMarshalUnmarshal(t *testing.T) { } func TestRootLoad(t *testing.T) { - root, err := Load("../tests/basic/databricks.yml") - require.NoError(t, err) + root, diags := Load("../tests/basic/databricks.yml") + require.NoError(t, diags.Error()) assert.Equal(t, "basic", root.Bundle.Name) } func TestDuplicateIdOnLoadReturnsError(t *testing.T) { - _, err := Load("./testdata/duplicate_resource_names_in_root/databricks.yml") - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") + _, diags := Load("./testdata/duplicate_resource_names_in_root/databricks.yml") + assert.ErrorContains(t, diags.Error(), "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") } func TestDuplicateIdOnMergeReturnsError(t *testing.T) { - root, err := Load("./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml") - require.NoError(t, err) + root, diags := Load("./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml") + require.NoError(t, diags.Error()) - other, err := Load("./testdata/duplicate_resource_name_in_subconfiguration/resources.yml") - require.NoError(t, err) + other, diags := Load("./testdata/duplicate_resource_name_in_subconfiguration/resources.yml") + require.NoError(t, diags.Error()) - err = root.Merge(other) + err := root.Merge(other) assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 57bf6f7b9b..e625539b41 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -32,7 +32,7 @@ func newValidateCommand() *cobra.Command { // Until we change up the output of this command to be a text representation, // we'll just output all diagnostics as debug logs. - for _, diag := range b.Config.Diagnostics() { + for _, diag := range diags { log.Debugf(cmd.Context(), "[%s]: %s", diag.Location, diag.Summary) } From cddc5f97f862805d885ea3b0d1d497d51e0a59fa Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 28 Mar 2024 16:55:36 +0530 Subject: [PATCH 17/17] Fix the generated DABs JSON schema (#1322) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes This PR fixes bundle schema being broken because `for_each_task: null` was set in the generated schema. This is not valid according to the JSON schema specification and thus the Red Hat YAML VSCode extension was failing to parse the YAML configuration. This PR fixes: https://github.com/databricks/cli/issues/1312 ## Tests The fix itself was tested manually. I asserted that the autocompletion works now. This was mistakenly overlooked the first time around when the regression was introduced in https://github.com/databricks/cli/pull/1204 because the YAML extension provides best-effort autocomplete suggestions even if the JSON schema fails to load. To prevent future regressions we also add a test to assert that the JSON schema generated itself is a valid JSON schema object. This is done via using the `ajv-cli` to validate the schema. This package is also used by the Red Hat YAML extension and thus provides a high fidelity check for ensuring the JSON schema is valid. Before, with the old schema: ``` shreyas.goenka@THW32HFW6T cli-versions % ajv validate -s proj/schema-216.json -d ../bundle-playground-3/databricks.yml schema proj/schema-216.json is invalid error: schema is invalid: data/properties/resources/properties/jobs/additionalProperties/properties/tasks/items/properties/for_each_task must be object,boolean, data/properties/resources/properties/jobs/additionalProperties/properties/tasks/items must be array, data/properties/resources/properties/jobs/additionalProperties/properties/tasks/items must match a schema in anyOf ``` After, with the new schema: ``` shreyas.goenka@THW32HFW6T cli-versions % ajv validate -s proj/schema-dev.json -d ../bundle-playground-3/databricks.yml ../bundle-playground-3/databricks.yml valid ``` After, autocomplete suggestions: Screenshot 2024-03-27 at 6 35 57 PM --- .github/workflows/push.yml | 26 ++++++++++++++++++++++++++ bundle/schema/schema.go | 4 +++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 18ba54a379..244bdeee5b 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -89,3 +89,29 @@ jobs: run: | # Exit with status code 1 if there are differences (i.e. unformatted files) git diff --exit-code + + validate-bundle-schema: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: 1.21.x + + # Github repo: https://github.com/ajv-validator/ajv-cli + - name: Install ajv-cli + run: npm install -g ajv-cli@5.0.0 + + # Assert that the generated bundle schema is a valid JSON schema by using + # ajv-cli to validate it against a sample configuration file. + # By default the ajv-cli runs in strict mode which will fail if the schema + # itself is not valid. Strict mode is more strict than the JSON schema + # specification. See for details: https://ajv.js.org/options.html#strict-mode-options + - name: Validate bundle schema + run: | + go run main.go bundle schema > schema.json + ajv -s schema.json -d ./bundle/tests/basic/databricks.yml diff --git a/bundle/schema/schema.go b/bundle/schema/schema.go index 7153f38f60..b37f72d9b6 100644 --- a/bundle/schema/schema.go +++ b/bundle/schema/schema.go @@ -95,7 +95,9 @@ func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker * // HACK to unblock CLI release (13th Feb 2024). This is temporary until proper // support for recursive types is added to the schema generator. PR: https://github.com/databricks/cli/pull/1204 if traceId == "for_each_task" { - return nil, nil + return &jsonschema.Schema{ + Type: jsonschema.ObjectType, + }, nil } // WE ERROR OUT IF THERE ARE CYCLES IN THE JSON SCHEMA