From 97a9ed733f0b6dded8f553cb60a6cecee0dc64ed Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 22 Apr 2025 11:56:42 +0200 Subject: [PATCH 01/40] Add secret scopes support in assets bundling --- .../deploy/secret-scope/databricks.yml.tmpl | 9 ++ .../bundle/deploy/secret-scope/output.txt | 91 +++++++++++++++++++ acceptance/bundle/deploy/secret-scope/script | 9 ++ .../bundle/deploy/secret-scope/test.toml | 1 + bundle/config/resources.go | 9 ++ bundle/config/resources/secret_scope.go | 63 +++++++++++++ bundle/deploy/terraform/convert.go | 11 +++ .../terraform/tfdyn/convert_secret_scope.go | 29 ++++++ 8 files changed, 222 insertions(+) create mode 100644 acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl create mode 100644 acceptance/bundle/deploy/secret-scope/output.txt create mode 100644 acceptance/bundle/deploy/secret-scope/script create mode 100644 acceptance/bundle/deploy/secret-scope/test.toml create mode 100644 bundle/config/resources/secret_scope.go create mode 100644 bundle/deploy/terraform/tfdyn/convert_secret_scope.go diff --git a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl new file mode 100644 index 0000000000..de8534a4e5 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl @@ -0,0 +1,9 @@ + +bundle: + name: deploy-secret-scope-test-$UNIQUE_NAME + +resources: + secret_scopes: + secret_scope1: + name: my-secrets + initial_manage_principal: users diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt new file mode 100644 index 0000000000..30218dd7dd --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -0,0 +1,91 @@ + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle summary --output json +{ + "bundle": { + "name": "deploy-secret-scope-test-[UNIQUE_NAME]", + "target": "default", + "environment": "default", + "terraform": { + "exec_path": "[TERRAFORM]" + }, + "git": { + "bundle_root_path": "." + }, + "deployment": { + "lock": {} + } + }, + "workspace": { + "current_user": { + "active": true, + "displayName": "[USERNAME]", + "emails": [ + { + "primary": true, + "type": "work", + "value": "[USERNAME]" + } + ], + "entitlements": [ + { + "value": "allow-cluster-create" + }, + { + "value": "allow-instance-pool-create" + } + ], + "groups": [ + { + "$ref": "Groups/[USERGROUP]", + "display": "admins", + "type": "direct", + "value": "[USERGROUP]" + } + ], + "id": "[USERID]", + "name": { + "givenName": "[USERNAME]" + }, + "schemas": [ + "urn:ietf:params:scim:schemas:core:2.0:User", + "urn:ietf:params:scim:schemas:extension:workspace:2.0:User" + ], + "short_name": "[USERNAME]", + "userName": "[USERNAME]" + }, + "root_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default", + "file_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/files", + "resource_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/resources", + "artifact_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/artifacts", + "state_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/state" + }, + "resources": { + "secret_scopes": { + "secret_scope1": { + "initial_manage_principal": "users", + "name": "my-secrets" + } + } + }, + "sync": { + "paths": [ + "." + ] + }, + "presets": {} +} + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete secret_scope secret_scope1 + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script new file mode 100644 index 0000000000..763e3fa207 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/script @@ -0,0 +1,9 @@ +envsubst < databricks.yml.tmpl > databricks.yml + +cleanup() { + trace $CLI bundle destroy --auto-approve +} +trap cleanup EXIT + +trace $CLI bundle deploy +trace $CLI bundle summary --output json diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml new file mode 100644 index 0000000000..c7c6f58ed6 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -0,0 +1 @@ +Cloud = true diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 204ca93127..a97fe6584d 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -24,6 +24,7 @@ type Resources struct { Clusters map[string]*resources.Cluster `json:"clusters,omitempty"` Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"` Apps map[string]*resources.App `json:"apps,omitempty"` + SecretScopes map[string]*resources.SecretScope `json:"secret_scopes,omitempty"` } type ConfigResource interface { @@ -86,6 +87,7 @@ func (r *Resources) AllResources() []ResourceGroup { collectResourceMap(descriptions["dashboards"], r.Dashboards), collectResourceMap(descriptions["volumes"], r.Volumes), collectResourceMap(descriptions["apps"], r.Apps), + collectResourceMap(descriptions["secret_scopes"], r.SecretScopes), } } @@ -157,6 +159,12 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) } } + for k := range r.SecretScopes { + if k == key { + found = append(found, r.SecretScopes[k]) + } + } + if len(found) == 0 { return nil, fmt.Errorf("no such resource: %s", key) } @@ -187,5 +195,6 @@ func SupportedResources() map[string]resources.ResourceDescription { "dashboards": (&resources.Dashboard{}).ResourceDescription(), "volumes": (&resources.Volume{}).ResourceDescription(), "apps": (&resources.App{}).ResourceDescription(), + "secret_scopes": (&resources.SecretScope{}).ResourceDescription(), } } diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go new file mode 100644 index 0000000000..91a5a2664a --- /dev/null +++ b/bundle/config/resources/secret_scope.go @@ -0,0 +1,63 @@ +package resources + +import ( + "context" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/marshal" + "github.com/databricks/databricks-sdk-go/service/workspace" + "net/url" +) + +type SecretScope struct { + Name string `json:"name"` + InitialManagePrincipal string `json:"initial_manage_principal"` + + ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` + URL string `json:"url,omitempty" bundle:"internal"` + + *workspace.SecretScope +} + +func (s *SecretScope) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s SecretScope) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} + +func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) { + //TODO implement me + panic("implement me") +} + +func (s SecretScope) ResourceDescription() ResourceDescription { + return ResourceDescription{ + SingularName: "secret_scope", + PluralName: "secret_scopes", + SingularTitle: "Secret Scope", + PluralTitle: "Secret Scope", + TerraformResourceName: "databricks_secret_scope", + } +} + +func (s SecretScope) TerraformResourceName() string { + return "databricks_secret_scope" +} + +func (s SecretScope) GetName() string { + return s.Name +} + +func (s SecretScope) GetURL() string { + return s.URL +} + +func (s SecretScope) InitializeURL(baseURL url.URL) { + //TODO implement me + panic("implement me") +} + +func (s SecretScope) IsNil() bool { + return s.SecretScope == nil +} diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index a5821a240d..88731d081c 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -3,6 +3,7 @@ package terraform import ( "context" "fmt" + "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" @@ -210,6 +211,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { } cur.Name = instance.Attributes.Name config.Resources.Apps[resource.Name] = cur + case "databricks_secret_scope": + if config.Resources.SecretScopes == nil { + config.Resources.SecretScopes = make(map[string]*resources.SecretScope) + } + cur := config.Resources.SecretScopes[resource.Name] + if cur == nil { + cur = &resources.SecretScope{ModifiedStatus: resources.ModifiedStatusDeleted, SecretScope: &workspace.SecretScope{}} + } + cur.Name = instance.Attributes.Name + config.Resources.SecretScopes[resource.Name] = cur case "databricks_permissions": case "databricks_grants": // Ignore; no need to pull these back into the configuration. diff --git a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go new file mode 100644 index 0000000000..ebe0484e58 --- /dev/null +++ b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go @@ -0,0 +1,29 @@ +package tfdyn + +import ( + "context" + "github.com/databricks/cli/bundle/internal/tf/schema" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go/service/workspace" +) + +type secretScopeConverter struct{} + +func (s secretScopeConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error { + // Normalize the output value to the target schema. + vout, diags := convert.Normalize(workspace.SecretScope{}, vin) + for _, diag := range diags { + log.Debugf(ctx, "secret scope normalization diagnostic: %s", diag.Summary) + } + + // Add the converted resource to the output. + out.SecretScope[key] = vout.AsAny() + + return nil +} + +func init() { + registerConverter("secret_scopes", secretScopeConverter{}) +} From dbe8e111b37bf3936908987ccae808e9d88715f8 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 14:42:10 +0200 Subject: [PATCH 02/40] test creating secrets in a newly created secret scope --- .../bundle/deploy/secret-scope/output.txt | 79 ++----------------- acceptance/bundle/deploy/secret-scope/script | 5 +- .../bundle/deploy/secret-scope/test.toml | 36 +++++++++ bundle/config/resources/secret_scope.go | 7 +- bundle/deploy/terraform/convert.go | 1 + .../terraform/tfdyn/convert_secret_scope.go | 1 + 6 files changed, 53 insertions(+), 76 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt index 30218dd7dd..126bbc9d79 100644 --- a/acceptance/bundle/deploy/secret-scope/output.txt +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -6,79 +6,14 @@ Updating deployment state... Deployment complete! >>> [CLI] bundle summary --output json +"my-secrets" + +>>> [CLI] secrets put-secret my-secrets my-key --string-value my-secret-value + +>>> [CLI] secrets get-secret my-secrets my-key { - "bundle": { - "name": "deploy-secret-scope-test-[UNIQUE_NAME]", - "target": "default", - "environment": "default", - "terraform": { - "exec_path": "[TERRAFORM]" - }, - "git": { - "bundle_root_path": "." - }, - "deployment": { - "lock": {} - } - }, - "workspace": { - "current_user": { - "active": true, - "displayName": "[USERNAME]", - "emails": [ - { - "primary": true, - "type": "work", - "value": "[USERNAME]" - } - ], - "entitlements": [ - { - "value": "allow-cluster-create" - }, - { - "value": "allow-instance-pool-create" - } - ], - "groups": [ - { - "$ref": "Groups/[USERGROUP]", - "display": "admins", - "type": "direct", - "value": "[USERGROUP]" - } - ], - "id": "[USERID]", - "name": { - "givenName": "[USERNAME]" - }, - "schemas": [ - "urn:ietf:params:scim:schemas:core:2.0:User", - "urn:ietf:params:scim:schemas:extension:workspace:2.0:User" - ], - "short_name": "[USERNAME]", - "userName": "[USERNAME]" - }, - "root_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default", - "file_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/files", - "resource_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/resources", - "artifact_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/artifacts", - "state_path": "/Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default/state" - }, - "resources": { - "secret_scopes": { - "secret_scope1": { - "initial_manage_principal": "users", - "name": "my-secrets" - } - } - }, - "sync": { - "paths": [ - "." - ] - }, - "presets": {} + "key":"my-key", + "value":"bXktc2VjcmV0LXZhbHVl" } >>> [CLI] bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script index 763e3fa207..287c2eb01e 100644 --- a/acceptance/bundle/deploy/secret-scope/script +++ b/acceptance/bundle/deploy/secret-scope/script @@ -6,4 +6,7 @@ cleanup() { trap cleanup EXIT trace $CLI bundle deploy -trace $CLI bundle summary --output json +trace $CLI bundle summary --output json | jq '.resources.secret_scopes.secret_scope1.name' + +trace $CLI secrets put-secret my-secrets my-key --string-value "my-secret-value" +trace $CLI secrets get-secret my-secrets my-key diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index c7c6f58ed6..4504b8a1eb 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -1 +1,37 @@ Cloud = true +Local = true + +Ignore = [ + "databricks.yml", +] + +[[Server]] +Pattern = "POST /api/2.0/secrets/scopes/create" + +[[Server]] +Pattern = "GET /api/2.0/secrets/scopes/list" +Response.Body = ''' +{ + "scopes": [ + { + "backend_type": "DATABRICKS", + "name": "my-secrets" + } + ] +} +''' + +[[Server]] +Pattern = "POST /api/2.0/secrets/scopes/delete" + +[[Server]] +Pattern = "POST /api/2.0/secrets/put" + +[[Server]] +Pattern = "GET /api/2.0/secrets/get" +Response.Body = ''' +{ + "key":"my-key", + "value":"bXktc2VjcmV0LXZhbHVl" +} +''' diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 91a5a2664a..a5a65d0361 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -2,10 +2,11 @@ package resources import ( "context" + "net/url" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/workspace" - "net/url" ) type SecretScope struct { @@ -27,7 +28,7 @@ func (s SecretScope) MarshalJSON() ([]byte, error) { } func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) { - //TODO implement me + // TODO implement me panic("implement me") } @@ -54,7 +55,7 @@ func (s SecretScope) GetURL() string { } func (s SecretScope) InitializeURL(baseURL url.URL) { - //TODO implement me + // TODO implement me panic("implement me") } diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 88731d081c..fff32bc9b5 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -3,6 +3,7 @@ package terraform import ( "context" "fmt" + "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/cli/bundle/config" diff --git a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go index ebe0484e58..9dcd970309 100644 --- a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go +++ b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go @@ -2,6 +2,7 @@ package tfdyn import ( "context" + "github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" From 5116564b6f0ff1059e2ad887192b23639be482be Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:24:50 +0200 Subject: [PATCH 03/40] run `make docs` --- bundle/docsgen/output/reference.md | 10 +- bundle/docsgen/output/resources.md | 391 +++++++++++++++++++++-------- 2 files changed, 296 insertions(+), 105 deletions(-) diff --git a/bundle/docsgen/output/reference.md b/bundle/docsgen/output/reference.md index c51c5c190a..9476587d1a 100644 --- a/bundle/docsgen/output/reference.md +++ b/bundle/docsgen/output/reference.md @@ -1,7 +1,7 @@ --- description: 'Configuration reference for databricks.yml' last_update: - date: 2025-04-01 + date: 2025-04-23 --- @@ -438,6 +438,10 @@ resources: - Map - The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). +- - `secret_scopes` + - Map + - + - - `volumes` - Map - The volume definitions for the bundle, where each key is the name of the volume. See [\_](/dev-tools/bundles/resources.md#volumes). @@ -921,6 +925,10 @@ The resource definitions for the target. - Map - The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). +- - `secret_scopes` + - Map + - + - - `volumes` - Map - The volume definitions for the bundle, where each key is the name of the volume. See [\_](/dev-tools/bundles/resources.md#volumes). diff --git a/bundle/docsgen/output/resources.md b/bundle/docsgen/output/resources.md index b8f7ec5dd8..3738ca85a3 100644 --- a/bundle/docsgen/output/resources.md +++ b/bundle/docsgen/output/resources.md @@ -1,7 +1,7 @@ --- description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.' last_update: - date: 2025-04-01 + date: 2025-04-23 --- @@ -508,19 +508,19 @@ for deployment to the app compute. - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -1446,19 +1446,19 @@ destination needs to be provided, e.g. - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -1615,19 +1615,19 @@ In addition, if you attempt to deploy a bundle that contains a dashboard JSON fi - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -1717,19 +1717,19 @@ resources: - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -1820,7 +1820,7 @@ jobs: - - `job_clusters` - Sequence - - A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. See [\_](#jobsnamejob_clusters). + - A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. See [\_](#jobsnamejob_clusters). - - `max_concurrent_runs` - Integer @@ -1840,7 +1840,7 @@ jobs: - - `performance_target` - String - - PerformanceTarget defines how performant or cost efficient the execution of run on serverless should be. + - The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. - - `permissions` - Sequence @@ -1864,7 +1864,7 @@ jobs: - - `tasks` - Sequence - - A list of task specifications to be executed by this job. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. See [\_](#jobsnametasks). + - A list of task specifications to be executed by this job. It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. See [\_](#jobsnametasks). - - `timeout_seconds` - Integer @@ -2036,6 +2036,10 @@ In this minimal environment spec, only pip dependencies are supported. - Sequence - List of pip dependencies, as supported by the version of pip in this environment. +- - `jar_dependencies` + - Sequence + - List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`. + ::: @@ -2193,7 +2197,6 @@ An optional set of health rules that can be defined for this job. **`Type: Sequence`** A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. -If more than 100 job clusters are available, you can paginate through them using :method:jobs/get. @@ -3049,19 +3052,19 @@ Job-level parameter definitions - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -3148,7 +3151,8 @@ An optional periodic schedule for this job. The default behavior is that the job **`Type: Sequence`** A list of task specifications to be executed by this job. -If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. +It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). +Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available. @@ -3166,6 +3170,10 @@ If more than 100 tasks are available, you can paginate through them using :metho - Map - The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications. See [\_](#jobsnametaskscondition_task). +- - `dashboard_task` + - Map + - The task runs a DashboardTask when the `dashboard_task` field is present. See [\_](#jobsnametasksdashboard_task). + - - `dbt_task` - Map - The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. See [\_](#jobsnametasksdbt_task). @@ -3200,7 +3208,7 @@ If more than 100 tasks are available, you can paginate through them using :metho - - `gen_ai_compute_task` - Map - - Next field: 9. See [\_](#jobsnametasksgen_ai_compute_task). + - See [\_](#jobsnametasksgen_ai_compute_task). - - `health` - Map @@ -3238,6 +3246,10 @@ If more than 100 tasks are available, you can paginate through them using :metho - Map - The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported. See [\_](#jobsnametaskspipeline_task). +- - `power_bi_task` + - Map + - The task triggers a Power BI semantic model update when the `power_bi_task` field is present. See [\_](#jobsnametaskspower_bi_task). + - - `python_wheel_task` - Map - The task runs a Python wheel when the `python_wheel_task` field is present. See [\_](#jobsnametaskspython_wheel_task). @@ -3349,6 +3361,89 @@ The condition task does not require a cluster to execute and does not support re ::: +### jobs._name_.tasks.dashboard_task + +**`Type: Map`** + +The task runs a DashboardTask when the `dashboard_task` field is present. + + + +:::list-table + +- - Key + - Type + - Description + +- - `dashboard_id` + - String + - + +- - `subscription` + - Map + - See [\_](#jobsnametasksdashboard_tasksubscription). + +- - `warehouse_id` + - String + - The warehouse id to execute the dashboard with for the schedule + +::: + + +### jobs._name_.tasks.dashboard_task.subscription + +**`Type: Map`** + + + + + +:::list-table + +- - Key + - Type + - Description + +- - `custom_subject` + - String + - Optional: Allows users to specify a custom subject line on the email sent to subscribers. + +- - `paused` + - Boolean + - When true, the subscription will not send emails. + +- - `subscribers` + - Sequence + - See [\_](#jobsnametasksdashboard_tasksubscriptionsubscribers). + +::: + + +### jobs._name_.tasks.dashboard_task.subscription.subscribers + +**`Type: Sequence`** + + + + + +:::list-table + +- - Key + - Type + - Description + +- - `destination_id` + - String + - + +- - `user_name` + - String + - + +::: + + ### jobs._name_.tasks.dbt_task **`Type: Map`** @@ -3494,7 +3589,7 @@ The task executes a nested task for every input provided when the `for_each_task **`Type: Map`** -Next field: 9 + @@ -3510,7 +3605,7 @@ Next field: 9 - - `compute` - Map - - Next field: 4. See [\_](#jobsnametasksgen_ai_compute_taskcompute). + - See [\_](#jobsnametasksgen_ai_compute_taskcompute). - - `dl_runtime_image` - String @@ -3543,7 +3638,7 @@ Next field: 9 **`Type: Map`** -Next field: 4 + @@ -4601,6 +4696,113 @@ The task triggers a pipeline update when the `pipeline_task` field is present. O ::: +### jobs._name_.tasks.power_bi_task + +**`Type: Map`** + +The task triggers a Power BI semantic model update when the `power_bi_task` field is present. + + + +:::list-table + +- - Key + - Type + - Description + +- - `connection_resource_name` + - String + - The resource name of the UC connection to authenticate from Databricks to Power BI + +- - `power_bi_model` + - Map + - The semantic model to update. See [\_](#jobsnametaskspower_bi_taskpower_bi_model). + +- - `refresh_after_update` + - Boolean + - Whether the model should be refreshed after the update + +- - `tables` + - Sequence + - The tables to be exported to Power BI. See [\_](#jobsnametaskspower_bi_tasktables). + +- - `warehouse_id` + - String + - The SQL warehouse ID to use as the Power BI data source + +::: + + +### jobs._name_.tasks.power_bi_task.power_bi_model + +**`Type: Map`** + +The semantic model to update + + + +:::list-table + +- - Key + - Type + - Description + +- - `authentication_method` + - String + - How the published Power BI model authenticates to Databricks + +- - `model_name` + - String + - The name of the Power BI model + +- - `overwrite_existing` + - Boolean + - Whether to overwrite existing Power BI models + +- - `storage_mode` + - String + - The default storage mode of the Power BI model + +- - `workspace_name` + - String + - The name of the Power BI workspace of the model + +::: + + +### jobs._name_.tasks.power_bi_task.tables + +**`Type: Sequence`** + +The tables to be exported to Power BI + + + +:::list-table + +- - Key + - Type + - Description + +- - `catalog` + - String + - The catalog name in Databricks + +- - `name` + - String + - The table name in Databricks + +- - `schema` + - String + - The schema name in Databricks + +- - `storage_mode` + - String + - The Power BI storage mode of the table + +::: + + ### jobs._name_.tasks.python_wheel_task **`Type: Map`** @@ -5470,7 +5672,7 @@ model_serving_endpoints: - - `ai_gateway` - Map - - The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported. See [\_](#model_serving_endpointsnameai_gateway). + - The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. See [\_](#model_serving_endpointsnameai_gateway). - - `budget_policy_id` - String @@ -5531,7 +5733,7 @@ resources: **`Type: Map`** -The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported. +The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. @@ -6475,19 +6677,19 @@ The list of routes that define traffic to each served entity. - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -6711,19 +6913,19 @@ Tags: Additional metadata key-value pairs for this `model_version`. - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -6882,7 +7084,7 @@ pipelines: - - `trigger` - Map - - Which pipeline trigger to use. Deprecated: Use `continuous` instead. See [\_](#pipelinesnametrigger). + - Use continuous instead ::: @@ -8002,7 +8204,7 @@ Libraries or code needed by this deployment. - - `whl` - String - - URI of the whl to be installed. + - This field is deprecated ::: @@ -8122,19 +8324,19 @@ List of notification settings for this pipeline. - - `group_name` - String - - The name of the group that has the permission set in level. + - - - `level` - String - - The allowed permission for user, group, service principal defined for this permission. + - - - `service_principal_name` - String - - The name of the service principal that has the permission set in level. + - - - `user_name` - String - - The name of the user that has the permission set in level. + - ::: @@ -8203,63 +8405,6 @@ Only `user_name` or `service_principal_name` can be specified. If both are speci ::: -### pipelines._name_.trigger - -**`Type: Map`** - -Which pipeline trigger to use. Deprecated: Use `continuous` instead. - - - -:::list-table - -- - Key - - Type - - Description - -- - `cron` - - Map - - See [\_](#pipelinesnametriggercron). - -- - `manual` - - Map - - - -::: - - -### pipelines._name_.trigger.cron - -**`Type: Map`** - - - - - -:::list-table - -- - Key - - Type - - Description - -- - `quartz_cron_schedule` - - String - - - -- - `timezone_id` - - String - - - -::: - - -### pipelines._name_.trigger.manual - -**`Type: Map`** - - - - ## quality_monitors **`Type: Map`** @@ -8800,6 +8945,44 @@ resources: ::: +## secret_scopes + +**`Type: Map`** + + + +```yaml +secret_scopes: + : + : +``` + + +:::list-table + +- - Key + - Type + - Description + +- - `backend_type` + - String + - + +- - `initial_manage_principal` + - String + - + +- - `keyvault_metadata` + - Map + - See [\_](#secret_scopesnamekeyvault_metadata). + +- - `name` + - String + - + +::: + + ## volumes **`Type: Map`** @@ -8849,7 +9032,7 @@ volumes: - - `volume_type` - String - - + - The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more](https://docs.databricks.com/aws/en/volumes/managed-vs-external) ::: From 98e3587303ede521e128a6529cc5986926b45011 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:34:15 +0200 Subject: [PATCH 04/40] run `make schema` --- bundle/internal/schema/annotations.yml | 16 ++++++ bundle/schema/jsonschema.json | 74 ++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index bc445055d5..4142801269 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -199,6 +199,9 @@ github.com/databricks/cli/bundle/config.Resources: The schema definitions for the bundle, where each key is the name of the schema. "markdown_description": |- The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). + "secret_scopes": + "description": |- + PLACEHOLDER "volumes": "description": |- The volume definitions for the bundle, where each key is the name of the volume. @@ -532,6 +535,19 @@ github.com/databricks/cli/bundle/config/resources.PipelinePermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.SecretScope: + "backend_type": + "description": |- + PLACEHOLDER + "initial_manage_principal": + "description": |- + PLACEHOLDER + "keyvault_metadata": + "description": |- + PLACEHOLDER + "name": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/variable.Lookup: "alert": "description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index ed7ab6bab3..ba6510c55c 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1304,6 +1304,36 @@ } ] }, + "resources.SecretScope": { + "oneOf": [ + { + "type": "object", + "properties": { + "backend_type": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType" + }, + "initial_manage_principal": { + "$ref": "#/$defs/string" + }, + "keyvault_metadata": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" + }, + "name": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "initial_manage_principal" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Volume": { "oneOf": [ { @@ -1843,6 +1873,9 @@ "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas)." }, + "secret_scopes": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SecretScope" + }, "volumes": { "description": "The volume definitions for the bundle, where each key is the name of the volume.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume", @@ -7343,6 +7376,33 @@ "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" } ] + }, + "workspace.AzureKeyVaultSecretScopeMetadata": { + "oneOf": [ + { + "type": "object", + "properties": { + "dns_name": { + "$ref": "#/$defs/string" + }, + "resource_id": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "dns_name", + "resource_id" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "workspace.ScopeBackendType": { + "type": "string" } } } @@ -7563,6 +7623,20 @@ } ] }, + "resources.SecretScope": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScope" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Volume": { "oneOf": [ { From 2af6e759de40fea04126ada3e7093f9bea29ccad Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:38:39 +0200 Subject: [PATCH 05/40] fix TestTerraformToBundleEmptyRemoteResources --- bundle/deploy/terraform/convert.go | 5 +++++ bundle/deploy/terraform/convert_test.go | 8 ++++++++ 2 files changed, 13 insertions(+) diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index fff32bc9b5..739ab8b79c 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -291,6 +291,11 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { src.ModifiedStatus = resources.ModifiedStatusCreated } } + for _, src := range config.Resources.SecretScopes { + if src.ModifiedStatus == "" { + src.ModifiedStatus = resources.ModifiedStatusCreated + } + } return nil } diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index bb7f149358..5c57af9ed3 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -2,6 +2,7 @@ package terraform import ( "context" + "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "testing" @@ -834,6 +835,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { }, }, }, + SecretScopes: map[string]*resources.SecretScope{ + "test_secret_scope": { + SecretScope: &workspace.SecretScope{ + Name: "test_secret_scope", + }, + }, + }, }, } tfState := resourcesState{ From 88b7908f635587b950bbc43427295a528712b02e Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:41:18 +0200 Subject: [PATCH 06/40] fix TestTerraformToBundleModifiedResources --- bundle/deploy/terraform/convert_test.go | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 5c57af9ed3..623007a2f8 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -1036,6 +1036,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { }, }, }, + SecretScopes: map[string]*resources.SecretScope{ + "test_secret_scope": { + SecretScope: &workspace.SecretScope{ + Name: "test_secret_scope", + }, + }, + "test_secret_scope_new": { + SecretScope: &workspace.SecretScope{ + Name: "test_secret_scope_new", + }, + }, + }, }, } tfState := resourcesState{ From faa796d0c3c9310f68aeb8be25d132d1d0bfacd2 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:43:46 +0200 Subject: [PATCH 07/40] fix TestTerraformToBundleEmptyLocalResources --- bundle/deploy/terraform/convert_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 623007a2f8..eb479ffed8 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -704,6 +704,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) { {Attributes: stateInstanceAttributes{Name: "app1"}}, }, }, + { + Type: "databricks_secret_scope", + Mode: "managed", + Name: "test_secret_scope", + Instances: []stateResourceInstance{ + {Attributes: stateInstanceAttributes{Name: "secret_scope1"}}, + }, + }, }, } err := TerraformToBundle(&tfState, &config) From 03bab9adcccaf72736e47f6491eaaae49a0f9565 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:45:47 +0200 Subject: [PATCH 08/40] fix TestRunAsNoErrorForSupportedResources --- bundle/config/mutator/resourcemutator/run_as_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/bundle/config/mutator/resourcemutator/run_as_test.go b/bundle/config/mutator/resourcemutator/run_as_test.go index 9ff5d5faec..803f601c3f 100644 --- a/bundle/config/mutator/resourcemutator/run_as_test.go +++ b/bundle/config/mutator/resourcemutator/run_as_test.go @@ -43,6 +43,7 @@ func allResourceTypes(t *testing.T) []string { "quality_monitors", "registered_models", "schemas", + "secret_scopes", "volumes", }, resourceTypes, From c7befbaea045303b797ff2945b96ee207c295395 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:57:11 +0200 Subject: [PATCH 09/40] fix TestRunAsErrorForUnsupportedResources --- bundle/config/mutator/resourcemutator/run_as.go | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/bundle/config/mutator/resourcemutator/run_as.go b/bundle/config/mutator/resourcemutator/run_as.go index fb5408bfbd..dc553a601c 100644 --- a/bundle/config/mutator/resourcemutator/run_as.go +++ b/bundle/config/mutator/resourcemutator/run_as.go @@ -129,6 +129,16 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics { )) } + // Secret Scopes do not support run_as in the API. + if len(b.Config.Resources.SecretScopes) > 0 { + diags = diags.Extend(reportRunAsNotSupported( + "secret_scopes", + b.Config.GetLocation("resources.secret_scopes"), + b.Config.Workspace.CurrentUser.UserName, + identity, + )) + } + return diags } From 306440440509248d7a63d95bdd262840689cefe4 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:00:51 +0200 Subject: [PATCH 10/40] fix TestAllResourcesMocked --- .../mutator/resourcemutator/apply_target_mode_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index 6f32300631..c16e8e7058 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -2,6 +2,7 @@ package resourcemutator import ( "context" + "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "slices" "testing" @@ -152,6 +153,13 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, }, }, + SecretScopes: map[string]*resources.SecretScope{ + "secretScope1": { + SecretScope: &workspace.SecretScope{ + Name: "secretScope1", + }, + }, + }, }, }, SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"), From 57c72bf9f2991e414df2b990582de3c553262181 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:12:00 +0200 Subject: [PATCH 11/40] fix TestAllNonUcResourcesAreRenamed --- .../config/mutator/resourcemutator/apply_target_mode_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index c16e8e7058..6fc6dd93f8 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -326,8 +326,8 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) { nameField := resource.Elem().FieldByName("Name") resourceType := resources.Type().Field(i).Name - // Skip apps, as they are not renamed - if resourceType == "Apps" { + // Skip resources that are not renamed + if resourceType == "Apps" || resourceType == "SecretScopes" { continue } From 674823b236116bb5cc1df3e1436a723950ae8885 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:57:48 +0200 Subject: [PATCH 12/40] add bundle permissions support to secret scopes --- .../mutator/resourcemutator/apply_bundle_permissions.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bundle/config/mutator/resourcemutator/apply_bundle_permissions.go b/bundle/config/mutator/resourcemutator/apply_bundle_permissions.go index 3c438ce7b7..a015fdbfe1 100644 --- a/bundle/config/mutator/resourcemutator/apply_bundle_permissions.go +++ b/bundle/config/mutator/resourcemutator/apply_bundle_permissions.go @@ -51,6 +51,10 @@ var ( permissions.CAN_MANAGE: "CAN_MANAGE", permissions.CAN_VIEW: "CAN_USE", }, + "secret_scopes": { + permissions.CAN_MANAGE: "MANAGE", + permissions.CAN_VIEW: "READ", + }, } ) From 03740e0d27e3ffdf47947e0503aaed3f34f4c583 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:35:24 +0200 Subject: [PATCH 13/40] implement Exists for SecretScopes --- bundle/config/resources/secret_scope.go | 17 ++++++++++++++--- bundle/config/resources_test.go | 13 +++++++++++++ 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index a5a65d0361..211fde6a7b 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -27,9 +27,20 @@ func (s SecretScope) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } -func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) { - // TODO implement me - panic("implement me") +func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { + scopes, err := w.Secrets.ListScopesAll(ctx) + + if err != nil { + return false, nil + } + + for _, scope := range scopes { + if scope.Name == name { + return true, nil + } + } + + return false, nil } func (s SecretScope) ResourceDescription() ResourceDescription { diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 92bd32e5c1..c18ead2cfc 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -3,6 +3,7 @@ package config import ( "context" "encoding/json" + "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "strings" "testing" @@ -168,6 +169,12 @@ func TestResourcesBindSupport(t *testing.T) { CreateServingEndpoint: serving.CreateServingEndpoint{}, }, }, + SecretScopes: map[string]*resources.SecretScope{ + "my_secret_scope": { + Name: "0", + SecretScope: &workspace.SecretScope{}, + }, + }, } unbindableResources := map[string]bool{"model": true} @@ -184,6 +191,9 @@ func TestResourcesBindSupport(t *testing.T) { m.GetMockAppsAPI().EXPECT().GetByName(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockQualityMonitorsAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockServingEndpointsAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) + m.GetMockSecretsAPI().EXPECT().ListScopesAll(mock.Anything).Return([]workspace.SecretScope{ + {Name: "0"}, + }, nil) allResources := supportedResources.AllResources() for _, group := range allResources { @@ -197,6 +207,9 @@ func TestResourcesBindSupport(t *testing.T) { // bind operation requires Exists to return true exists, err := r.Exists(ctx, m.WorkspaceClient, "0") + if err != nil { + panic(err) + } assert.NoError(t, err) assert.True(t, exists) } From 8c33d55bce0e0a6f19b3975126645dd48e5af143 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:40:49 +0200 Subject: [PATCH 14/40] remove URL field from SecretScope; fix lint --- .../mutator/resourcemutator/apply_target_mode_test.go | 3 ++- bundle/config/resources/secret_scope.go | 10 ++++------ bundle/config/resources_test.go | 3 ++- bundle/deploy/terraform/convert_test.go | 3 ++- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index 6fc6dd93f8..24baef533a 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -2,11 +2,12 @@ package resourcemutator import ( "context" - "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "slices" "testing" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 211fde6a7b..7d9aaa6ca9 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -14,7 +14,6 @@ type SecretScope struct { InitialManagePrincipal string `json:"initial_manage_principal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` - URL string `json:"url,omitempty" bundle:"internal"` *workspace.SecretScope } @@ -29,7 +28,6 @@ func (s SecretScope) MarshalJSON() ([]byte, error) { func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { scopes, err := w.Secrets.ListScopesAll(ctx) - if err != nil { return false, nil } @@ -62,12 +60,12 @@ func (s SecretScope) GetName() string { } func (s SecretScope) GetURL() string { - return s.URL + // Secret scopes do not have a URL + return "" } -func (s SecretScope) InitializeURL(baseURL url.URL) { - // TODO implement me - panic("implement me") +func (s SecretScope) InitializeURL(_ url.URL) { + // Secret scopes do not have a URL } func (s SecretScope) IsNil() bool { diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index c18ead2cfc..4d4c7f2266 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -3,11 +3,12 @@ package config import ( "context" "encoding/json" - "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "strings" "testing" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/databricks/databricks-sdk-go/service/serving" "github.com/databricks/cli/bundle/config/resources" diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index eb479ffed8..d5f0ec9606 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -2,10 +2,11 @@ package terraform import ( "context" - "github.com/databricks/databricks-sdk-go/service/workspace" "reflect" "testing" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/tf/schema" From f421f6d253b9ebbe123ba48c9d4e62e65e07aea5 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 25 Apr 2025 15:53:23 +0200 Subject: [PATCH 15/40] list scopes and acls in the acceptance test --- .../bundle/deploy/secret-scope/output.txt | 20 ++++++++++++++++++- acceptance/bundle/deploy/secret-scope/script | 5 ++++- .../bundle/deploy/secret-scope/test.toml | 13 ++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt index 126bbc9d79..3a165c2a1e 100644 --- a/acceptance/bundle/deploy/secret-scope/output.txt +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -6,7 +6,25 @@ Updating deployment state... Deployment complete! >>> [CLI] bundle summary --output json -"my-secrets" +{ + "initial_manage_principal": "users", + "modified_status": "created", + "name": "my-secrets" +} + +>>> [CLI] secrets list-scopes -o json +{ + "backend_type": "DATABRICKS", + "name": "my-secrets" +} + +>>> [CLI] secrets list-acls my-secrets +[ + { + "permission": "MANAGE", + "principal": "[USERNAME]" + } +] >>> [CLI] secrets put-secret my-secrets my-key --string-value my-secret-value diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script index 287c2eb01e..d52c0339e7 100644 --- a/acceptance/bundle/deploy/secret-scope/script +++ b/acceptance/bundle/deploy/secret-scope/script @@ -6,7 +6,10 @@ cleanup() { trap cleanup EXIT trace $CLI bundle deploy -trace $CLI bundle summary --output json | jq '.resources.secret_scopes.secret_scope1.name' +trace $CLI bundle summary --output json | jq '.resources.secret_scopes.secret_scope1' + +trace $CLI secrets list-scopes -o json | jq '.[] | select(.name == "my-secrets")' +trace $CLI secrets list-acls my-secrets trace $CLI secrets put-secret my-secrets my-key --string-value "my-secret-value" trace $CLI secrets get-secret my-secrets my-key diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index 4504b8a1eb..17c3300bf8 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -35,3 +35,16 @@ Response.Body = ''' "value":"bXktc2VjcmV0LXZhbHVl" } ''' + +[[Server]] +Pattern = "GET /api/2.0/secrets/acls/list" +Response.Body = ''' +{ + "items": [ + { + "permission": "MANAGE", + "principal": "[USERNAME]" + } + ] +} +''' From defc01b2bda7f9a0dbe4a0eae06253b04fa565ca Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 25 Apr 2025 17:03:22 +0200 Subject: [PATCH 16/40] add acceptance test for binding secret scopes --- .../bind/secret-scope/databricks.yml.tmpl | 7 ++++ .../deployment/bind/secret-scope/output.txt | 35 +++++++++++++++++++ .../deployment/bind/secret-scope/script | 26 ++++++++++++++ .../deployment/bind/secret-scope/test.toml | 26 ++++++++++++++ 4 files changed, 94 insertions(+) create mode 100644 acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl create mode 100644 acceptance/bundle/deployment/bind/secret-scope/output.txt create mode 100644 acceptance/bundle/deployment/bind/secret-scope/script create mode 100644 acceptance/bundle/deployment/bind/secret-scope/test.toml diff --git a/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl new file mode 100644 index 0000000000..1377bb4923 --- /dev/null +++ b/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl @@ -0,0 +1,7 @@ +bundle: + name: bind-dashboard-test-$UNIQUE_NAME + +resources: + secret_scopes: + secret_scope1: + name: $SECRET_SCOPE_NAME diff --git a/acceptance/bundle/deployment/bind/secret-scope/output.txt b/acceptance/bundle/deployment/bind/secret-scope/output.txt new file mode 100644 index 0000000000..d8e3368bda --- /dev/null +++ b/acceptance/bundle/deployment/bind/secret-scope/output.txt @@ -0,0 +1,35 @@ + +>>> [CLI] secrets create-scope test-secret-scope-[UUID] + +>>> [CLI] bundle deployment bind secret_scope1 test-secret-scope-[UUID] --auto-approve +Updating deployment state... +Successfully bound secret_scope with an id 'test-secret-scope-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] secrets list-scopes -o json +{ + "backend_type": "DATABRICKS", + "name": "test-secret-scope-[UUID]" +} + +>>> [CLI] bundle deployment unbind secret_scope1 +Updating deployment state... + +>>> [CLI] bundle destroy --auto-approve +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default + +Deleting files... +Destroy complete! + +>>> [CLI] secrets list-scopes -o json +{ + "backend_type": "DATABRICKS", + "name": "test-secret-scope-[UUID]" +} + +>>> [CLI] secrets delete-scope test-secret-scope-[UUID] diff --git a/acceptance/bundle/deployment/bind/secret-scope/script b/acceptance/bundle/deployment/bind/secret-scope/script new file mode 100644 index 0000000000..1dd2233851 --- /dev/null +++ b/acceptance/bundle/deployment/bind/secret-scope/script @@ -0,0 +1,26 @@ +SECRET_SCOPE_NAME="test-secret-scope-$(uuid)" +if [ -z "$CLOUD_ENV" ]; then + SECRET_SCOPE_NAME="test-secret-scope-6260d50f-e8ff-4905-8f28-812345678903" # use hard-coded uuid when running locally +fi +export SECRET_SCOPE_NAME +envsubst < databricks.yml.tmpl > databricks.yml + +# Create a pre-defined volume: +trace $CLI secrets create-scope "${SECRET_SCOPE_NAME}" + +cleanup() { + trace $CLI secrets delete-scope "${SECRET_SCOPE_NAME}" +} +trap cleanup EXIT + +trace $CLI bundle deployment bind secret_scope1 "${SECRET_SCOPE_NAME}" --auto-approve + +trace $CLI bundle deploy + +trace $CLI secrets list-scopes -o json | jq --arg value ${SECRET_SCOPE_NAME} '.[] | select(.name == $value)' + +trace $CLI bundle deployment unbind secret_scope1 + +trace $CLI bundle destroy --auto-approve + +trace $CLI secrets list-scopes -o json | jq --arg value ${SECRET_SCOPE_NAME} '.[] | select(.name == $value)' diff --git a/acceptance/bundle/deployment/bind/secret-scope/test.toml b/acceptance/bundle/deployment/bind/secret-scope/test.toml new file mode 100644 index 0000000000..3c38ba78fc --- /dev/null +++ b/acceptance/bundle/deployment/bind/secret-scope/test.toml @@ -0,0 +1,26 @@ +Local = true +Cloud = true +RequiresUnityCatalog = true + +Ignore = [ + "databricks.yml", +] + +[[Server]] +Pattern = "POST /api/2.0/secrets/scopes/create" + +[[Server]] +Pattern = "GET /api/2.0/secrets/scopes/list" +Response.Body = ''' +{ + "scopes": [ + { + "backend_type": "DATABRICKS", + "name": "test-secret-scope-6260d50f-e8ff-4905-8f28-812345678903" + } + ] +} +''' + +[[Server]] +Pattern = "POST /api/2.0/secrets/scopes/delete" From 0452b8552b15ba360658e855f4d9ce34f25a4ac4 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 29 Apr 2025 14:57:50 +0200 Subject: [PATCH 17/40] [wip] todo statement to handle permissions --- acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl | 2 ++ acceptance/bundle/deploy/secret-scope/test.toml | 2 ++ bundle/deploy/terraform/tfdyn/convert_secret_scope.go | 6 +++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl index de8534a4e5..16b407f6ab 100644 --- a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl +++ b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl @@ -7,3 +7,5 @@ resources: secret_scope1: name: my-secrets initial_manage_principal: users + backend_type: "DATABRICKS" + diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index 17c3300bf8..d37623766d 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -1,6 +1,8 @@ Cloud = true Local = true +RecordRequests = true + Ignore = [ "databricks.yml", ] diff --git a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go index 9dcd970309..760d95a7dd 100644 --- a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go +++ b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go @@ -2,7 +2,6 @@ package tfdyn import ( "context" - "github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" @@ -19,6 +18,11 @@ func (s secretScopeConverter) Convert(ctx context.Context, key string, vin dyn.V log.Debugf(ctx, "secret scope normalization diagnostic: %s", diag.Summary) } + // Configure permissions for this resource + // need to convert permissions to a new resource: https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/secret_acl + // TODO: add a validator to check that SecretScope has only one permission + out.SecretAcl = + // Add the converted resource to the output. out.SecretScope[key] = vout.AsAny() From 3d2ea8bf7fa3a791b3332bb7f5e8ee4dfbe08498 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Thu, 1 May 2025 11:07:50 +0200 Subject: [PATCH 18/40] add random suffix to the scope name in acceptance test --- acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl | 7 ++++--- acceptance/bundle/deploy/secret-scope/script | 7 +++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl index 16b407f6ab..cc996ea255 100644 --- a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl +++ b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl @@ -1,11 +1,12 @@ - bundle: name: deploy-secret-scope-test-$UNIQUE_NAME resources: secret_scopes: secret_scope1: - name: my-secrets + name: $SECRET_SCOPE_NAME initial_manage_principal: users backend_type: "DATABRICKS" - + permissions: + - group_name: test_group1 + level: WRITE diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script index d52c0339e7..4cf823da24 100644 --- a/acceptance/bundle/deploy/secret-scope/script +++ b/acceptance/bundle/deploy/secret-scope/script @@ -1,3 +1,10 @@ +SECRET_SCOPE_NAME="my-secrets-$(uuid)" +if [ -z "$CLOUD_ENV" ]; then + SECRET_SCOPE_NAME="my-secrets-6260d50f-e8ff-4905-8f28-812345678903" # use hard-coded uuid when running locally +fi + +export SECRET_SCOPE_NAME + envsubst < databricks.yml.tmpl > databricks.yml cleanup() { From af48f725e2a23f45d9fc7070bc4da90a7841f6f1 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Thu, 1 May 2025 17:02:37 +0200 Subject: [PATCH 19/40] map secret scope permissions to secret acl --- .../deploy/secret-scope/databricks.yml.tmpl | 5 +- .../bundle/deploy/secret-scope/output.txt | 33 ++++++++++--- acceptance/bundle/deploy/secret-scope/script | 9 ++-- .../bundle/deploy/secret-scope/test.toml | 21 ++++++-- bundle/config/resources/secret_scope.go | 16 +++++- bundle/deploy/terraform/convert.go | 1 + .../terraform/tfdyn/convert_secret_scope.go | 49 ++++++++++++++++--- 7 files changed, 111 insertions(+), 23 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl index cc996ea255..34ce83b56b 100644 --- a/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl +++ b/acceptance/bundle/deploy/secret-scope/databricks.yml.tmpl @@ -5,8 +5,9 @@ resources: secret_scopes: secret_scope1: name: $SECRET_SCOPE_NAME - initial_manage_principal: users backend_type: "DATABRICKS" permissions: - - group_name: test_group1 + - user_name: admins level: WRITE + - user_name: users + level: READ diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt index 3a165c2a1e..12cac83f03 100644 --- a/acceptance/bundle/deploy/secret-scope/output.txt +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -7,28 +7,47 @@ Deployment complete! >>> [CLI] bundle summary --output json { - "initial_manage_principal": "users", + "backend_type": "DATABRICKS", + "initial_manage_principal": "", "modified_status": "created", - "name": "my-secrets" + "name": "my-secrets-[UUID]", + "permissions": [ + { + "level": "WRITE", + "user_name": "admins" + }, + { + "level": "READ", + "user_name": "users" + } + ] } >>> [CLI] secrets list-scopes -o json { "backend_type": "DATABRICKS", - "name": "my-secrets" + "name": "my-secrets-[UUID]" } ->>> [CLI] secrets list-acls my-secrets +>>> [CLI] secrets list-acls my-secrets-[UUID] [ + { + "permission": "READ", + "principal": "users" + }, + { + "permission": "WRITE", + "principal": "admins" + }, { "permission": "MANAGE", "principal": "[USERNAME]" } ] ->>> [CLI] secrets put-secret my-secrets my-key --string-value my-secret-value +>>> [CLI] secrets put-secret my-secrets-[UUID] my-key --string-value my-secret-value ->>> [CLI] secrets get-secret my-secrets my-key +>>> [CLI] secrets get-secret my-secrets-[UUID] my-key { "key":"my-key", "value":"bXktc2VjcmV0LXZhbHVl" @@ -36,6 +55,8 @@ Deployment complete! >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: + delete secret_acl secret_acl_secret_scope1_0 + delete secret_acl secret_acl_secret_scope1_1 delete secret_scope secret_scope1 All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-test-[UNIQUE_NAME]/default diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script index 4cf823da24..5650622132 100644 --- a/acceptance/bundle/deploy/secret-scope/script +++ b/acceptance/bundle/deploy/secret-scope/script @@ -2,7 +2,6 @@ SECRET_SCOPE_NAME="my-secrets-$(uuid)" if [ -z "$CLOUD_ENV" ]; then SECRET_SCOPE_NAME="my-secrets-6260d50f-e8ff-4905-8f28-812345678903" # use hard-coded uuid when running locally fi - export SECRET_SCOPE_NAME envsubst < databricks.yml.tmpl > databricks.yml @@ -14,9 +13,9 @@ trap cleanup EXIT trace $CLI bundle deploy trace $CLI bundle summary --output json | jq '.resources.secret_scopes.secret_scope1' +trace $CLI secrets list-scopes -o json | jq --arg value ${SECRET_SCOPE_NAME} '.[] | select(.name == $value)' -trace $CLI secrets list-scopes -o json | jq '.[] | select(.name == "my-secrets")' -trace $CLI secrets list-acls my-secrets +trace $CLI secrets list-acls ${SECRET_SCOPE_NAME} -trace $CLI secrets put-secret my-secrets my-key --string-value "my-secret-value" -trace $CLI secrets get-secret my-secrets my-key +trace $CLI secrets put-secret ${SECRET_SCOPE_NAME} my-key --string-value "my-secret-value" +trace $CLI secrets get-secret ${SECRET_SCOPE_NAME} my-key diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index d37623766d..15d2e7c8d0 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -1,8 +1,6 @@ Cloud = true Local = true -RecordRequests = true - Ignore = [ "databricks.yml", ] @@ -17,7 +15,7 @@ Response.Body = ''' "scopes": [ { "backend_type": "DATABRICKS", - "name": "my-secrets" + "name": "my-secrets-6260d50f-e8ff-4905-8f28-812345678903" } ] } @@ -29,6 +27,9 @@ Pattern = "POST /api/2.0/secrets/scopes/delete" [[Server]] Pattern = "POST /api/2.0/secrets/put" +[[Server]] +Pattern = "POST /api/2.0/secrets/acls/put" + [[Server]] Pattern = "GET /api/2.0/secrets/get" Response.Body = ''' @@ -43,6 +44,14 @@ Pattern = "GET /api/2.0/secrets/acls/list" Response.Body = ''' { "items": [ + { + "permission": "READ", + "principal": "users" + }, + { + "permission": "WRITE", + "principal": "admins" + }, { "permission": "MANAGE", "principal": "[USERNAME]" @@ -50,3 +59,9 @@ Response.Body = ''' ] } ''' + +[[Server]] +Pattern = "GET /api/2.0/secrets/acls/get" + +[[Server]] +Pattern = "POST /api/2.0/secrets/acls/delete" diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 7d9aaa6ca9..a68269fd54 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -9,11 +9,25 @@ import ( "github.com/databricks/databricks-sdk-go/service/workspace" ) +type SecretScopePermissionLevel string + +// SecretScopePermission holds the permission level setting for a single principal. +// Multiple of these can be defined on any secret scope. +// Secret scopes permissions are mapped to Secret ACLs +type SecretScopePermission struct { + Level SecretScopePermissionLevel `json:"level"` + + UserName string `json:"user_name,omitempty"` + ServicePrincipalName string `json:"service_principal_name,omitempty"` + GroupName string `json:"group_name,omitempty"` +} + type SecretScope struct { Name string `json:"name"` InitialManagePrincipal string `json:"initial_manage_principal"` - ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` + Permissions []SecretScopePermission `json:"permissions,omitempty"` + ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` *workspace.SecretScope } diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 739ab8b79c..841ed2e67b 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -224,6 +224,7 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { config.Resources.SecretScopes[resource.Name] = cur case "databricks_permissions": case "databricks_grants": + case "databricks_secret_acl": // Ignore; no need to pull these back into the configuration. default: return fmt.Errorf("missing mapping for %s", resource.Type) diff --git a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go index 760d95a7dd..97ebce7bba 100644 --- a/bundle/deploy/terraform/tfdyn/convert_secret_scope.go +++ b/bundle/deploy/terraform/tfdyn/convert_secret_scope.go @@ -2,6 +2,8 @@ package tfdyn import ( "context" + "fmt" + "github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" @@ -9,22 +11,57 @@ import ( "github.com/databricks/databricks-sdk-go/service/workspace" ) +type resourceSecretAcl struct { + schema.ResourceSecretAcl + DependsOn []string `json:"depends_on,omitempty"` +} + type secretScopeConverter struct{} +func convertPermissionsSecretScope(key, scopeName string, permissions []dyn.Value, out *schema.Resources) { + for idx, permission := range permissions { + level, _ := permission.Get("level").AsString() + userName, _ := permission.Get("user_name").AsString() + groupName, _ := permission.Get("group_name").AsString() + servicePrincipalName, _ := permission.Get("service_principal_name").AsString() + + principal := "" + if userName != "" { + principal = userName + } else if groupName != "" { + principal = groupName + } else if servicePrincipalName != "" { + principal = servicePrincipalName + } + + acl := &resourceSecretAcl{ + ResourceSecretAcl: schema.ResourceSecretAcl{ + Permission: level, + Principal: principal, + Scope: scopeName, + }, + DependsOn: []string{"databricks_secret_scope." + key}, + } + + aclKey := fmt.Sprintf("secret_acl_%s_%d", key, idx) + out.SecretAcl[aclKey] = acl + } +} + func (s secretScopeConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error { // Normalize the output value to the target schema. vout, diags := convert.Normalize(workspace.SecretScope{}, vin) for _, diag := range diags { log.Debugf(ctx, "secret scope normalization diagnostic: %s", diag.Summary) } + out.SecretScope[key] = vout.AsAny() // Configure permissions for this resource - // need to convert permissions to a new resource: https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/secret_acl - // TODO: add a validator to check that SecretScope has only one permission - out.SecretAcl = - - // Add the converted resource to the output. - out.SecretScope[key] = vout.AsAny() + scopeName, _ := vin.Get("name").AsString() + permissions, ok := vin.Get("permissions").AsSequence() + if ok && len(permissions) > 0 { + convertPermissionsSecretScope(key, scopeName, permissions, out) + } return nil } From d7e1ceb4ace85856e317bffc8fae2b166f46043f Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Thu, 1 May 2025 17:06:18 +0200 Subject: [PATCH 20/40] make schema --- bundle/internal/schema/annotations.yml | 16 +++++++++ bundle/schema/jsonschema.json | 49 ++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 4142801269..9bfb0a0e76 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -548,6 +548,22 @@ github.com/databricks/cli/bundle/config/resources.SecretScope: "name": "description": |- PLACEHOLDER + "permissions": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.SecretScopePermission: + "group_name": + "description": |- + PLACEHOLDER + "level": + "description": |- + PLACEHOLDER + "service_principal_name": + "description": |- + PLACEHOLDER + "user_name": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/variable.Lookup: "alert": "description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index ba6510c55c..2913db1954 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1320,6 +1320,9 @@ }, "name": { "$ref": "#/$defs/string" + }, + "permissions": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission" } }, "additionalProperties": false, @@ -1334,6 +1337,38 @@ } ] }, + "resources.SecretScopePermission": { + "oneOf": [ + { + "type": "object", + "properties": { + "group_name": { + "$ref": "#/$defs/string" + }, + "level": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel" + }, + "service_principal_name": { + "$ref": "#/$defs/string" + }, + "user_name": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "level" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.SecretScopePermissionLevel": { + "type": "string" + }, "resources.Volume": { "oneOf": [ { @@ -7900,6 +7935,20 @@ "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" } ] + }, + "resources.SecretScopePermission": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermission" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] } }, "config.ArtifactFile": { From 8a7b1ccb4207400d74763a13460e89f1ea5bb4e4 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 2 May 2025 15:38:53 +0200 Subject: [PATCH 21/40] add a test for a secret scope backed by azure keyvault --- .../secret-scope/backend-type/databricks.yml | 11 +++++++++++ .../deploy/secret-scope/backend-type/output.txt | 16 ++++++++++++++++ .../deploy/secret-scope/backend-type/script | 3 +++ .../deploy/secret-scope/backend-type/test.toml | 3 +++ acceptance/bundle/deploy/secret-scope/test.toml | 4 ++++ 5 files changed, 37 insertions(+) create mode 100644 acceptance/bundle/deploy/secret-scope/backend-type/databricks.yml create mode 100644 acceptance/bundle/deploy/secret-scope/backend-type/output.txt create mode 100644 acceptance/bundle/deploy/secret-scope/backend-type/script create mode 100644 acceptance/bundle/deploy/secret-scope/backend-type/test.toml diff --git a/acceptance/bundle/deploy/secret-scope/backend-type/databricks.yml b/acceptance/bundle/deploy/secret-scope/backend-type/databricks.yml new file mode 100644 index 0000000000..a8804c64c1 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/backend-type/databricks.yml @@ -0,0 +1,11 @@ +bundle: + name: deploy-secret-scope-azure-backend + +resources: + secret_scopes: + secret_scope_azure: + name: test-secrets-azure-backend + backend_type: "AZURE_KEYVAULT" + keyvault_metadata: + resource_id: my_azure_keyvault_id + dns_name: my_azure_keyvault_dns_name diff --git a/acceptance/bundle/deploy/secret-scope/backend-type/output.txt b/acceptance/bundle/deploy/secret-scope/backend-type/output.txt new file mode 100644 index 0000000000..f66a537ff1 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/backend-type/output.txt @@ -0,0 +1,16 @@ + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-azure-backend/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> jq -s .[] | select(.path=="/api/2.0/secrets/scopes/create") | .body out.requests.txt +{ + "backend_azure_keyvault": { + "dns_name": "my_azure_keyvault_dns_name", + "resource_id": "my_azure_keyvault_id" + }, + "scope": "test-secrets-azure-backend", + "scope_backend_type": "AZURE_KEYVAULT" +} diff --git a/acceptance/bundle/deploy/secret-scope/backend-type/script b/acceptance/bundle/deploy/secret-scope/backend-type/script new file mode 100644 index 0000000000..ae11605f3c --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/backend-type/script @@ -0,0 +1,3 @@ +trace $CLI bundle deploy +trace jq -s '.[] | select(.path=="/api/2.0/secrets/scopes/create") | .body' out.requests.txt +rm out.requests.txt diff --git a/acceptance/bundle/deploy/secret-scope/backend-type/test.toml b/acceptance/bundle/deploy/secret-scope/backend-type/test.toml new file mode 100644 index 0000000000..66e4b6b4ab --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/backend-type/test.toml @@ -0,0 +1,3 @@ +Local = true + +RecordRequests = true diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index 15d2e7c8d0..52ed70f0e8 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -16,6 +16,10 @@ Response.Body = ''' { "backend_type": "DATABRICKS", "name": "my-secrets-6260d50f-e8ff-4905-8f28-812345678903" + }, + { + "backend_type": "AZURE_KEYVAULT", + "name": "test-secrets-azure-backend" } ] } From bcafeaaf29a5a5521472e45f8c5101cf261efa56 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 2 May 2025 16:30:10 +0200 Subject: [PATCH 22/40] add a test for a setting top level permissions to a secret scope --- .../permissions/databricks.yml.tmpl | 15 +++++++++++++++ .../deploy/secret-scope/permissions/output.txt | 18 ++++++++++++++++++ .../deploy/secret-scope/permissions/script | 4 ++++ .../deploy/secret-scope/permissions/test.toml | 5 +++++ .../bundle/deploy/secret-scope/test.toml | 4 ++++ 5 files changed, 46 insertions(+) create mode 100644 acceptance/bundle/deploy/secret-scope/permissions/databricks.yml.tmpl create mode 100644 acceptance/bundle/deploy/secret-scope/permissions/output.txt create mode 100644 acceptance/bundle/deploy/secret-scope/permissions/script create mode 100644 acceptance/bundle/deploy/secret-scope/permissions/test.toml diff --git a/acceptance/bundle/deploy/secret-scope/permissions/databricks.yml.tmpl b/acceptance/bundle/deploy/secret-scope/permissions/databricks.yml.tmpl new file mode 100644 index 0000000000..5f6b4a8ec3 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/permissions/databricks.yml.tmpl @@ -0,0 +1,15 @@ +bundle: + name: deploy-secret-scope-with-permissions + +resources: + secret_scopes: + secret_scope_azure: + name: test-secrets-permissions + +permissions: + - user_name: $CURRENT_USER_NAME + level: CAN_MANAGE + - group_name: users + level: CAN_VIEW + - group_name: admins + level: CAN_MANAGE diff --git a/acceptance/bundle/deploy/secret-scope/permissions/output.txt b/acceptance/bundle/deploy/secret-scope/permissions/output.txt new file mode 100644 index 0000000000..d5f32ec902 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/permissions/output.txt @@ -0,0 +1,18 @@ + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/deploy-secret-scope-with-permissions/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> jq -s .[] | select(.path=="/api/2.0/secrets/acls/put") | .body out.requests.txt +{ + "permission": "MANAGE", + "principal": "admins", + "scope": "test-secrets-permissions" +} +{ + "permission": "READ", + "principal": "users", + "scope": "test-secrets-permissions" +} diff --git a/acceptance/bundle/deploy/secret-scope/permissions/script b/acceptance/bundle/deploy/secret-scope/permissions/script new file mode 100644 index 0000000000..51eaebcac3 --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/permissions/script @@ -0,0 +1,4 @@ +envsubst < databricks.yml.tmpl > databricks.yml +trace $CLI bundle deploy #--log-level TRACE +trace jq -s '.[] | select(.path=="/api/2.0/secrets/acls/put") | .body' out.requests.txt +rm out.requests.txt diff --git a/acceptance/bundle/deploy/secret-scope/permissions/test.toml b/acceptance/bundle/deploy/secret-scope/permissions/test.toml new file mode 100644 index 0000000000..19839c104e --- /dev/null +++ b/acceptance/bundle/deploy/secret-scope/permissions/test.toml @@ -0,0 +1,5 @@ +Local = true +RecordRequests = true + +[[Server]] +Pattern = "PUT /api/2.0/permissions/directories/{objectId}" diff --git a/acceptance/bundle/deploy/secret-scope/test.toml b/acceptance/bundle/deploy/secret-scope/test.toml index 52ed70f0e8..4d7598a227 100644 --- a/acceptance/bundle/deploy/secret-scope/test.toml +++ b/acceptance/bundle/deploy/secret-scope/test.toml @@ -20,6 +20,10 @@ Response.Body = ''' { "backend_type": "AZURE_KEYVAULT", "name": "test-secrets-azure-backend" + }, + { + "backend_type": "DATABRICKS", + "name": "test-secrets-permissions" } ] } From 9453ec94df6918cf4684cb141a531251eaba3267 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 2 May 2025 16:56:11 +0200 Subject: [PATCH 23/40] sort requests output to make the permissions test deterministic --- .../deploy/secret-scope/permissions/output.txt | 14 +++----------- .../bundle/deploy/secret-scope/permissions/script | 2 +- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/permissions/output.txt b/acceptance/bundle/deploy/secret-scope/permissions/output.txt index d5f32ec902..f00594d235 100644 --- a/acceptance/bundle/deploy/secret-scope/permissions/output.txt +++ b/acceptance/bundle/deploy/secret-scope/permissions/output.txt @@ -5,14 +5,6 @@ Deploying resources... Updating deployment state... Deployment complete! ->>> jq -s .[] | select(.path=="/api/2.0/secrets/acls/put") | .body out.requests.txt -{ - "permission": "MANAGE", - "principal": "admins", - "scope": "test-secrets-permissions" -} -{ - "permission": "READ", - "principal": "users", - "scope": "test-secrets-permissions" -} +>>> jq -s -c .[] | select(.path=="/api/2.0/secrets/acls/put") | .body out.requests.txt +{"permission":"MANAGE","principal":"admins","scope":"test-secrets-permissions"} +{"permission":"READ","principal":"users","scope":"test-secrets-permissions"} diff --git a/acceptance/bundle/deploy/secret-scope/permissions/script b/acceptance/bundle/deploy/secret-scope/permissions/script index 51eaebcac3..b6e6e9d881 100644 --- a/acceptance/bundle/deploy/secret-scope/permissions/script +++ b/acceptance/bundle/deploy/secret-scope/permissions/script @@ -1,4 +1,4 @@ envsubst < databricks.yml.tmpl > databricks.yml trace $CLI bundle deploy #--log-level TRACE -trace jq -s '.[] | select(.path=="/api/2.0/secrets/acls/put") | .body' out.requests.txt +trace jq -s -c '.[] | select(.path=="/api/2.0/secrets/acls/put") | .body' out.requests.txt | sort rm out.requests.txt From 9c03d000894c653b8a7162aa04ae7abf460801d7 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 2 May 2025 17:26:53 +0200 Subject: [PATCH 24/40] disable local-only tests in the cloud; make cloud test deterministic --- .../deploy/secret-scope/backend-type/test.toml | 1 + .../bundle/deploy/secret-scope/output.txt | 17 +++-------------- .../deploy/secret-scope/permissions/test.toml | 1 + acceptance/bundle/deploy/secret-scope/script | 2 +- 4 files changed, 6 insertions(+), 15 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/backend-type/test.toml b/acceptance/bundle/deploy/secret-scope/backend-type/test.toml index 66e4b6b4ab..34ad85ab0f 100644 --- a/acceptance/bundle/deploy/secret-scope/backend-type/test.toml +++ b/acceptance/bundle/deploy/secret-scope/backend-type/test.toml @@ -1,3 +1,4 @@ Local = true +Cloud = false RecordRequests = true diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt index 12cac83f03..681cbf7335 100644 --- a/acceptance/bundle/deploy/secret-scope/output.txt +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -30,20 +30,9 @@ Deployment complete! } >>> [CLI] secrets list-acls my-secrets-[UUID] -[ - { - "permission": "READ", - "principal": "users" - }, - { - "permission": "WRITE", - "principal": "admins" - }, - { - "permission": "MANAGE", - "principal": "[USERNAME]" - } -] +{"permission":"MANAGE","principal":"[USERNAME]"} +{"permission":"READ","principal":"users"} +{"permission":"WRITE","principal":"admins"} >>> [CLI] secrets put-secret my-secrets-[UUID] my-key --string-value my-secret-value diff --git a/acceptance/bundle/deploy/secret-scope/permissions/test.toml b/acceptance/bundle/deploy/secret-scope/permissions/test.toml index 19839c104e..aedaae4045 100644 --- a/acceptance/bundle/deploy/secret-scope/permissions/test.toml +++ b/acceptance/bundle/deploy/secret-scope/permissions/test.toml @@ -1,4 +1,5 @@ Local = true +Cloud = false RecordRequests = true [[Server]] diff --git a/acceptance/bundle/deploy/secret-scope/script b/acceptance/bundle/deploy/secret-scope/script index 5650622132..9a66467832 100644 --- a/acceptance/bundle/deploy/secret-scope/script +++ b/acceptance/bundle/deploy/secret-scope/script @@ -15,7 +15,7 @@ trace $CLI bundle deploy trace $CLI bundle summary --output json | jq '.resources.secret_scopes.secret_scope1' trace $CLI secrets list-scopes -o json | jq --arg value ${SECRET_SCOPE_NAME} '.[] | select(.name == $value)' -trace $CLI secrets list-acls ${SECRET_SCOPE_NAME} +trace $CLI secrets list-acls ${SECRET_SCOPE_NAME} | jq -c '.[]' | sort trace $CLI secrets put-secret ${SECRET_SCOPE_NAME} my-key --string-value "my-secret-value" trace $CLI secrets get-secret ${SECRET_SCOPE_NAME} my-key From c8b70a02abaabb830755d59d2e4309ae4dd7398a Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 11:37:44 +0200 Subject: [PATCH 25/40] add enum values for resource.SecretScopePermissionLevel in jsonschema --- .../schema/annotations_openapi_overrides.yml | 9 +++++++++ bundle/schema/jsonschema.json | 15 ++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 5bba842541..40e75055f0 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -45,6 +45,15 @@ github.com/databricks/cli/bundle/config/resources.AppPermissionLevel: CAN_MANAGE - |- CAN_USE +github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel: + "_": + "enum": + - |- + READ + - |- + WRITE + - |- + MANAGE github.com/databricks/cli/bundle/config/resources.Cluster: "_": "markdown_description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 2913db1954..03276d77fa 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1367,7 +1367,20 @@ ] }, "resources.SecretScopePermissionLevel": { - "type": "string" + "oneOf": [ + { + "type": "string", + "enum": [ + "READ", + "WRITE", + "MANAGE" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "resources.Volume": { "oneOf": [ From 6abed4e19c99fd7ef804d22d96c89fb196a3b044 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 13:03:23 +0200 Subject: [PATCH 26/40] minor cleanup: remove debug line and fix spelling in the comment --- bundle/config/mutator/resourcemutator/run_as.go | 2 +- bundle/config/resources_test.go | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/bundle/config/mutator/resourcemutator/run_as.go b/bundle/config/mutator/resourcemutator/run_as.go index dc553a601c..29be02fd66 100644 --- a/bundle/config/mutator/resourcemutator/run_as.go +++ b/bundle/config/mutator/resourcemutator/run_as.go @@ -129,7 +129,7 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics { )) } - // Secret Scopes do not support run_as in the API. + // Secret scopes do not support run_as in the API. if len(b.Config.Resources.SecretScopes) > 0 { diags = diags.Extend(reportRunAsNotSupported( "secret_scopes", diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 4d4c7f2266..d4544e777c 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -208,9 +208,6 @@ func TestResourcesBindSupport(t *testing.T) { // bind operation requires Exists to return true exists, err := r.Exists(ctx, m.WorkspaceClient, "0") - if err != nil { - panic(err) - } assert.NoError(t, err) assert.True(t, exists) } From e8491c50452481f5c784c3a89581db86ee4a2a9f Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 13:42:25 +0200 Subject: [PATCH 27/40] drop the initial_manage_principal field from SecretScope struct --- acceptance/bundle/deploy/secret-scope/output.txt | 1 - bundle/config/resources/secret_scope.go | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/acceptance/bundle/deploy/secret-scope/output.txt b/acceptance/bundle/deploy/secret-scope/output.txt index 681cbf7335..9c38113bf4 100644 --- a/acceptance/bundle/deploy/secret-scope/output.txt +++ b/acceptance/bundle/deploy/secret-scope/output.txt @@ -8,7 +8,6 @@ Deployment complete! >>> [CLI] bundle summary --output json { "backend_type": "DATABRICKS", - "initial_manage_principal": "", "modified_status": "created", "name": "my-secrets-[UUID]", "permissions": [ diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index a68269fd54..40eae6812a 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -23,8 +23,7 @@ type SecretScopePermission struct { } type SecretScope struct { - Name string `json:"name"` - InitialManagePrincipal string `json:"initial_manage_principal"` + Name string `json:"name"` Permissions []SecretScopePermission `json:"permissions,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` From 9668d172a8d7f875241df95c93fce019201220b0 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 14:35:22 +0200 Subject: [PATCH 28/40] make schema --- bundle/schema/jsonschema.json | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 03276d77fa..5f7997c41b 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1312,9 +1312,6 @@ "backend_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType" }, - "initial_manage_principal": { - "$ref": "#/$defs/string" - }, "keyvault_metadata": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" }, @@ -1327,8 +1324,7 @@ }, "additionalProperties": false, "required": [ - "name", - "initial_manage_principal" + "name" ] }, { From e53d0709b23104eadda4d43d6caa81e17652efee Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 15:26:38 +0200 Subject: [PATCH 29/40] make docs; fix lint --- bundle/docsgen/output/reference.md | 148 ++++++++++- bundle/docsgen/output/resources.md | 351 +++++++++---------------- bundle/internal/schema/annotations.yml | 3 - 3 files changed, 262 insertions(+), 240 deletions(-) diff --git a/bundle/docsgen/output/reference.md b/bundle/docsgen/output/reference.md index 9476587d1a..efc6598c4e 100644 --- a/bundle/docsgen/output/reference.md +++ b/bundle/docsgen/output/reference.md @@ -1,7 +1,7 @@ --- description: 'Configuration reference for databricks.yml' last_update: - date: 2025-04-23 + date: 2025-05-06 --- @@ -440,7 +440,7 @@ resources: - - `secret_scopes` - Map - - + - See [\_](#resourcessecret_scopes). - - `volumes` - Map @@ -449,6 +449,77 @@ resources: ::: +### resources.secret_scopes + +**`Type: Map`** + + + +```yaml +secret_scopes: + : + : +``` + + +:::list-table + +- - Key + - Type + - Description + +- - `backend_type` + - String + - + +- - `keyvault_metadata` + - Map + - See [\_](#resourcessecret_scopesnamekeyvault_metadata). + +- - `name` + - String + - + +- - `permissions` + - Sequence + - See [\_](#resourcessecret_scopesnamepermissions). + +::: + + +### resources.secret_scopes._name_.permissions + +**`Type: Sequence`** + + + + + +:::list-table + +- - Key + - Type + - Description + +- - `group_name` + - String + - + +- - `level` + - String + - + +- - `service_principal_name` + - String + - + +- - `user_name` + - String + - + +::: + + ## run_as **`Type: Map`** @@ -927,7 +998,7 @@ The resource definitions for the target. - - `secret_scopes` - Map - - + - See [\_](#targetsnameresourcessecret_scopes). - - `volumes` - Map @@ -936,6 +1007,77 @@ The resource definitions for the target. ::: +### targets._name_.resources.secret_scopes + +**`Type: Map`** + + + +```yaml +secret_scopes: + : + : +``` + + +:::list-table + +- - Key + - Type + - Description + +- - `backend_type` + - String + - + +- - `keyvault_metadata` + - Map + - See [\_](#targetsnameresourcessecret_scopesnamekeyvault_metadata). + +- - `name` + - String + - + +- - `permissions` + - Sequence + - See [\_](#targetsnameresourcessecret_scopesnamepermissions). + +::: + + +### targets._name_.resources.secret_scopes._name_.permissions + +**`Type: Sequence`** + + + + + +:::list-table + +- - Key + - Type + - Description + +- - `group_name` + - String + - + +- - `level` + - String + - + +- - `service_principal_name` + - String + - + +- - `user_name` + - String + - + +::: + + ### targets._name_.run_as **`Type: Map`** diff --git a/bundle/docsgen/output/resources.md b/bundle/docsgen/output/resources.md index 3738ca85a3..fbdf4fb990 100644 --- a/bundle/docsgen/output/resources.md +++ b/bundle/docsgen/output/resources.md @@ -1,7 +1,7 @@ --- description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.' last_update: - date: 2025-04-23 + date: 2025-05-06 --- @@ -1195,7 +1195,7 @@ If not specified at cluster creation, a set of default values will be used. - - `use_preemptible_executors` - Boolean - - This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead. + - This field is deprecated - - `zone_id` - String @@ -1226,7 +1226,7 @@ If `cluster_log_conf` is specified, init script logs are sent to `/ - - `dbfs` - Map - - destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`. See [\_](#clustersnameinit_scriptsdbfs). + - This field is deprecated - - `file` - Map @@ -1272,28 +1272,6 @@ Contains the Azure Data Lake Storage destination path ::: -### clusters._name_.init_scripts.dbfs - -**`Type: Map`** - -destination needs to be provided. e.g. -`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` - - - -:::list-table - -- - Key - - Type - - Description - -- - `destination` - - String - - dbfs destination, e.g. `dbfs:/my/path` - -::: - - ### clusters._name_.init_scripts.file **`Type: Map`** @@ -1808,7 +1786,7 @@ jobs: - - `format` - String - - Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. + - This field is deprecated - - `git_source` - Map @@ -1840,7 +1818,7 @@ jobs: - - `performance_target` - String - - The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. + - The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. - - `permissions` - Sequence @@ -1960,7 +1938,7 @@ An optional set of email addresses that is notified when runs of this job begin - - `no_alert_for_skipped_runs` - Boolean - - If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + - This field is deprecated - - `on_duration_warning_threshold_exceeded` - Sequence @@ -2008,7 +1986,7 @@ For other serverless tasks, the task environment is required to be specified usi - - `spec` - Map - - The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task. In this minimal environment spec, only pip dependencies are supported. See [\_](#jobsnameenvironmentsspec). + - The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. (Note: DLT uses a copied version of the Environment proto below, at //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal environment spec, only pip dependencies are supported. See [\_](#jobsnameenvironmentsspec). ::: @@ -2017,7 +1995,8 @@ For other serverless tasks, the task environment is required to be specified usi **`Type: Map`** -The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task. +The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. +(Note: DLT uses a copied version of the Environment proto below, at //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal environment spec, only pip dependencies are supported. @@ -2087,7 +2066,7 @@ Note: dbt and SQL File tasks support only version-controlled sources. If dbt or - - `job_source` - Map - - The source of the job specification in the remote repository when the job is source controlled. See [\_](#jobsnamegit_sourcejob_source). + - This field is deprecated ::: @@ -2113,35 +2092,6 @@ Read-only state of the remote repository at the time the job was run. This field ::: -### jobs._name_.git_source.job_source - -**`Type: Map`** - -The source of the job specification in the remote repository when the job is source controlled. - - - -:::list-table - -- - Key - - Type - - Description - -- - `dirty_state` - - String - - Dirty state indicates the job is not fully synced with the job specification in the remote repository. Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced. * `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced. - -- - `import_from_git_branch` - - String - - Name of the branch which the job is imported from. - -- - `job_config_path` - - String - - Path of the job YAML file that contains the job specification. - -::: - - ### jobs._name_.health **`Type: Map`** @@ -2705,7 +2655,7 @@ If not specified at cluster creation, a set of default values will be used. - - `use_preemptible_executors` - Boolean - - This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead. + - This field is deprecated - - `zone_id` - String @@ -2736,7 +2686,7 @@ If `cluster_log_conf` is specified, init script logs are sent to `/ - - `dbfs` - Map - - destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`. See [\_](#jobsnamejob_clustersnew_clusterinit_scriptsdbfs). + - This field is deprecated - - `file` - Map @@ -2782,28 +2732,6 @@ Contains the Azure Data Lake Storage destination path ::: -### jobs._name_.job_clusters.new_cluster.init_scripts.dbfs - -**`Type: Map`** - -destination needs to be provided. e.g. -`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` - - - -:::list-table - -- - Key - - Type - - Description - -- - `destination` - - String - - dbfs destination, e.g. `dbfs:/my/path` - -::: - - ### jobs._name_.job_clusters.new_cluster.init_scripts.file **`Type: Map`** @@ -3172,7 +3100,7 @@ Read endpoints return only 100 tasks. If more than 100 tasks are available, you - - `dashboard_task` - Map - - The task runs a DashboardTask when the `dashboard_task` field is present. See [\_](#jobsnametasksdashboard_task). + - The task refreshes a dashboard and sends a snapshot to subscribers. See [\_](#jobsnametasksdashboard_task). - - `dbt_task` - Map @@ -3365,7 +3293,7 @@ The condition task does not require a cluster to execute and does not support re **`Type: Map`** -The task runs a DashboardTask when the `dashboard_task` field is present. +The task refreshes a dashboard and sends a snapshot to subscribers. @@ -3385,7 +3313,7 @@ The task runs a DashboardTask when the `dashboard_task` field is present. - - `warehouse_id` - String - - The warehouse id to execute the dashboard with for the schedule + - Optional: The warehouse id to execute the dashboard with for the schedule. If not specified, the default warehouse of the dashboard will be used. ::: @@ -3531,7 +3459,7 @@ An optional set of email addresses that is notified when runs of this task begin - - `no_alert_for_skipped_runs` - Boolean - - If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field. + - This field is deprecated - - `on_duration_warning_threshold_exceeded` - Sequence @@ -3734,7 +3662,7 @@ The default value is an empty list. - - `egg` - String - - Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. + - This field is deprecated - - `jar` - String @@ -4328,7 +4256,7 @@ If not specified at cluster creation, a set of default values will be used. - - `use_preemptible_executors` - Boolean - - This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead. + - This field is deprecated - - `zone_id` - String @@ -4359,7 +4287,7 @@ If `cluster_log_conf` is specified, init script logs are sent to `/ - - `dbfs` - Map - - destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`. See [\_](#jobsnametasksnew_clusterinit_scriptsdbfs). + - This field is deprecated - - `file` - Map @@ -4405,28 +4333,6 @@ Contains the Azure Data Lake Storage destination path ::: -### jobs._name_.tasks.new_cluster.init_scripts.dbfs - -**`Type: Map`** - -destination needs to be provided. e.g. -`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` - - - -:::list-table - -- - Key - - Type - - Description - -- - `destination` - - String - - dbfs destination, e.g. `dbfs:/my/path` - -::: - - ### jobs._name_.tasks.new_cluster.init_scripts.file **`Type: Map`** @@ -4852,11 +4758,11 @@ The task triggers another job when the `run_job_task` field is present. - - `dbt_commands` - Sequence - - An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` + - This field is deprecated - - `jar_params` - Sequence - - A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. + - This field is deprecated - - `job_id` - Integer @@ -4868,7 +4774,7 @@ The task triggers another job when the `run_job_task` field is present. - - `notebook_params` - Map - - A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function. If not specified upon `run-now`, the triggered run uses the job’s base parameters. notebook_params cannot be specified in conjunction with jar_params. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. + - This field is deprecated - - `pipeline_params` - Map @@ -4876,19 +4782,19 @@ The task triggers another job when the `run_job_task` field is present. - - `python_named_params` - Map - - + - This field is deprecated - - `python_params` - Sequence - - A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. Important These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. + - This field is deprecated - - `spark_submit_params` - Sequence - - A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs Important These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. + - This field is deprecated - - `sql_params` - Map - - A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. + - This field is deprecated ::: @@ -4930,7 +4836,7 @@ The task runs a JAR when the `spark_jar_task` field is present. - - `jar_uri` - String - - Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create. + - This field is deprecated - - `main_class_name` - String @@ -4942,7 +4848,7 @@ The task runs a JAR when the `spark_jar_task` field is present. - - `run_as_repl` - Boolean - - Deprecated. A value of `false` is no longer supported. + - This field is deprecated ::: @@ -5377,7 +5283,7 @@ A configuration to trigger a run when certain conditions are met. The default be - - `table` - Map - - Old table trigger settings name. Deprecated in favor of `table_update`. See [\_](#jobsnametriggertable). + - This field is deprecated - - `table_update` - Map @@ -5440,39 +5346,6 @@ Periodic trigger settings. ::: -### jobs._name_.trigger.table - -**`Type: Map`** - -Old table trigger settings name. Deprecated in favor of `table_update`. - - - -:::list-table - -- - Key - - Type - - Description - -- - `condition` - - String - - The table(s) condition based on which to trigger a job run. - -- - `min_time_between_triggers_seconds` - - Integer - - If set, the trigger starts a run only after the specified amount of time has passed since the last time the trigger fired. The minimum allowed value is 60 seconds. - -- - `table_names` - - Sequence - - A list of Delta tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`. - -- - `wait_after_last_change_seconds` - - Integer - - If set, the trigger starts a run only after no table updates have occurred for the specified time and can be used to wait for a series of table updates before triggering a run. The minimum allowed value is 60 seconds. - -::: - - ### jobs._name_.trigger.table_update **`Type: Map`** @@ -5692,7 +5565,7 @@ model_serving_endpoints: - - `rate_limits` - Sequence - - Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. See [\_](#model_serving_endpointsnamerate_limits). + - This field is deprecated - - `route_optimized` - Boolean @@ -6127,7 +6000,7 @@ The list of served entities under the serving endpoint config. - - `workload_size` - String - - The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. + - The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. - - `workload_type` - String @@ -6606,7 +6479,7 @@ PaLM Config. Only required if the provider is 'palm'. - - `workload_size` - String - - The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. + - The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. - - `workload_type` - String @@ -6694,35 +6567,6 @@ The list of routes that define traffic to each served entity. ::: -### model_serving_endpoints._name_.rate_limits - -**`Type: Sequence`** - -Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits. - - - -:::list-table - -- - Key - - Type - - Description - -- - `calls` - - Integer - - Used to specify how many calls are allowed for a key within the renewal_period. - -- - `key` - - String - - Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. - -- - `renewal_period` - - String - - Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. - -::: - - ### model_serving_endpoints._name_.tags **`Type: Sequence`** @@ -7143,7 +6987,7 @@ Cluster settings for this pipeline deployment. - - `cluster_log_conf` - Map - - The configuration for delivering spark logs to a long-term storage destination. Only dbfs destinations are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`. . See [\_](#pipelinesnameclusterscluster_log_conf). + - The configuration for delivering spark logs to a long-term storage destination. Only dbfs destinations are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`. See [\_](#pipelinesnameclusterscluster_log_conf). - - `custom_tags` - Map @@ -7179,7 +7023,7 @@ Cluster settings for this pipeline deployment. - - `node_type_id` - String - - This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call. + - This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call. - - `num_workers` - Integer @@ -7191,7 +7035,7 @@ Cluster settings for this pipeline deployment. - - `spark_conf` - Map - - An object containing a set of optional, user-specified Spark configuration key-value pairs. See :method:clusters/create for more details. + - An object containing a set of optional, user-specified Spark configuration key-value pairs. See :method:clusters/create for more details. - - `spark_env_vars` - Map @@ -7229,7 +7073,7 @@ Note: autoscaling works best with DB runtime versions 3.0 or later. - - `mode` - String - - Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters. + - Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters. ::: @@ -7360,7 +7204,6 @@ Only dbfs destinations are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`. - @@ -7510,7 +7353,7 @@ If not specified at cluster creation, a set of default values will be used. - - `use_preemptible_executors` - Boolean - - This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead. + - This field is deprecated - - `zone_id` - String @@ -7539,7 +7382,7 @@ The configuration for storing init scripts. Any number of destinations can be sp - - `dbfs` - Map - - destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`. See [\_](#pipelinesnameclustersinit_scriptsdbfs). + - This field is deprecated - - `file` - Map @@ -7585,28 +7428,6 @@ Contains the Azure Data Lake Storage destination path ::: -### pipelines._name_.clusters.init_scripts.dbfs - -**`Type: Map`** - -destination needs to be provided. e.g. -`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }` - - - -:::list-table - -- - Key - - Type - - Description - -- - `destination` - - String - - dbfs destination, e.g. `dbfs:/my/path` - -::: - - ### pipelines._name_.clusters.init_scripts.file **`Type: Map`** @@ -7838,7 +7659,7 @@ The definition of a gateway pipeline to support change data capture. - - `connection_id` - String - - [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source. + - This field is deprecated - - `connection_name` - String @@ -7850,7 +7671,7 @@ The definition of a gateway pipeline to support change data capture. - - `gateway_storage_name` - String - - Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema. + - Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema. - - `gateway_storage_schema` - String @@ -7972,6 +7793,14 @@ Configuration settings to control the ingestion of tables. These settings overri - Type - Description +- - `exclude_columns` + - Sequence + - A list of column names to be excluded for the ingestion. When not specified, include_columns fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`. + +- - `include_columns` + - Sequence + - A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`. + - - `primary_keys` - Sequence - The primary key of the table used to apply changes. @@ -8042,6 +7871,14 @@ Configuration settings to control the ingestion of tables. These settings are ap - Type - Description +- - `exclude_columns` + - Sequence + - A list of column names to be excluded for the ingestion. When not specified, include_columns fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`. + +- - `include_columns` + - Sequence + - A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`. + - - `primary_keys` - Sequence - The primary key of the table used to apply changes. @@ -8120,6 +7957,14 @@ Configuration settings to control the ingestion of tables. These settings overri - Type - Description +- - `exclude_columns` + - Sequence + - A list of column names to be excluded for the ingestion. When not specified, include_columns fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`. + +- - `include_columns` + - Sequence + - A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`. + - - `primary_keys` - Sequence - The primary key of the table used to apply changes. @@ -8153,6 +7998,14 @@ Configuration settings to control the ingestion of tables. These settings are ap - Type - Description +- - `exclude_columns` + - Sequence + - A list of column names to be excluded for the ingestion. When not specified, include_columns fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`. + +- - `include_columns` + - Sequence + - A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`. + - - `primary_keys` - Sequence - The primary key of the table used to apply changes. @@ -8188,19 +8041,19 @@ Libraries or code needed by this deployment. - - `file` - Map - - The path to a file that defines a pipeline and is stored in the Databricks Repos. . See [\_](#pipelinesnamelibrariesfile). + - The path to a file that defines a pipeline and is stored in the Databricks Repos. See [\_](#pipelinesnamelibrariesfile). - - `jar` - String - - URI of the jar to be installed. Currently only DBFS is supported. + - URI of the jar to be installed. Currently only DBFS is supported. - - `maven` - Map - - Specification of a maven library to be installed. . See [\_](#pipelinesnamelibrariesmaven). + - Specification of a maven library to be installed. See [\_](#pipelinesnamelibrariesmaven). - - `notebook` - Map - - The path to a notebook that defines a pipeline and is stored in the Databricks workspace. . See [\_](#pipelinesnamelibrariesnotebook). + - The path to a notebook that defines a pipeline and is stored in the Databricks workspace. See [\_](#pipelinesnamelibrariesnotebook). - - `whl` - String @@ -8214,7 +8067,6 @@ Libraries or code needed by this deployment. **`Type: Map`** The path to a file that defines a pipeline and is stored in the Databricks Repos. - @@ -8226,7 +8078,7 @@ The path to a file that defines a pipeline and is stored in the Databricks Repos - - `path` - String - - The absolute path of the file. + - The absolute path of the source code. ::: @@ -8236,7 +8088,6 @@ The path to a file that defines a pipeline and is stored in the Databricks Repos **`Type: Map`** Specification of a maven library to be installed. - @@ -8266,7 +8117,6 @@ Specification of a maven library to be installed. **`Type: Map`** The path to a notebook that defines a pipeline and is stored in the Databricks workspace. - @@ -8278,7 +8128,7 @@ The path to a notebook that defines a pipeline and is stored in the Databricks w - - `path` - String - - The absolute path of the notebook. + - The absolute path of the source code. ::: @@ -8299,11 +8149,11 @@ List of notification settings for this pipeline. - - `alerts` - Sequence - - A list of alerts that trigger the sending of notifications to the configured destinations. The supported alerts are: * `on-update-success`: A pipeline update completes successfully. * `on-update-failure`: Each time a pipeline update fails. * `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error. * `on-flow-failure`: A single data flow fails. + - A list of alerts that trigger the sending of notifications to the configured destinations. The supported alerts are: * `on-update-success`: A pipeline update completes successfully. * `on-update-failure`: Each time a pipeline update fails. * `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error. * `on-flow-failure`: A single data flow fails. - - `email_recipients` - Sequence - - A list of email addresses notified when a configured alert is triggered. + - A list of email addresses notified when a configured alert is triggered. ::: @@ -8968,10 +8818,6 @@ secret_scopes: - String - -- - `initial_manage_principal` - - String - - - - - `keyvault_metadata` - Map - See [\_](#secret_scopesnamekeyvault_metadata). @@ -8980,6 +8826,43 @@ secret_scopes: - String - +- - `permissions` + - Sequence + - See [\_](#secret_scopesnamepermissions). + +::: + + +### secret_scopes._name_.permissions + +**`Type: Sequence`** + + + + + +:::list-table + +- - Key + - Type + - Description + +- - `group_name` + - String + - + +- - `level` + - String + - + +- - `service_principal_name` + - String + - + +- - `user_name` + - String + - + ::: diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 9bfb0a0e76..f313993ad3 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -539,9 +539,6 @@ github.com/databricks/cli/bundle/config/resources.SecretScope: "backend_type": "description": |- PLACEHOLDER - "initial_manage_principal": - "description": |- - PLACEHOLDER "keyvault_metadata": "description": |- PLACEHOLDER From f6cfd0a48ec814ad9db4f62c6ad0ee4fdbb6431a Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 17:25:14 +0200 Subject: [PATCH 30/40] add fields descriptions to the annotations.yml --- bundle/internal/schema/annotations.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index f313993ad3..2b62c3eba1 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -538,29 +538,29 @@ github.com/databricks/cli/bundle/config/resources.PipelinePermission: github.com/databricks/cli/bundle/config/resources.SecretScope: "backend_type": "description": |- - PLACEHOLDER + The backend type the scope will be created with. If not specified, will default to `DATABRICKS` "keyvault_metadata": "description": |- - PLACEHOLDER + The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT` "name": "description": |- - PLACEHOLDER + Scope name requested by the user. Scope names are unique. "permissions": "description": |- - PLACEHOLDER + The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. github.com/databricks/cli/bundle/config/resources.SecretScopePermission: "group_name": "description": |- - PLACEHOLDER + The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. "level": "description": |- - PLACEHOLDER + The allowed permission for user, group, service principal defined for this permission. "service_principal_name": "description": |- - PLACEHOLDER + The name of the service principal that has the permission set in level. This field translates to a `principal` field in secret scope ACL. "user_name": "description": |- - PLACEHOLDER + The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. github.com/databricks/cli/bundle/config/variable.Lookup: "alert": "description": |- From 77dd5260ec7a46e3b78dd0288806731711305fcf Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Tue, 6 May 2025 17:27:23 +0200 Subject: [PATCH 31/40] make schema --- bundle/schema/jsonschema.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 5f7997c41b..c4b29ae09e 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1310,15 +1310,19 @@ "type": "object", "properties": { "backend_type": { + "description": "The backend type the scope will be created with. If not specified, will default to `DATABRICKS`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType" }, "keyvault_metadata": { + "description": "The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" }, "name": { + "description": "Scope name requested by the user. Scope names are unique.", "$ref": "#/$defs/string" }, "permissions": { + "description": "The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission" } }, @@ -1339,15 +1343,19 @@ "type": "object", "properties": { "group_name": { + "description": "The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string" }, "level": { + "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel" }, "service_principal_name": { + "description": "The name of the service principal that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string" }, "user_name": { + "description": "The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string" } }, From c8b49be6d52758714c4c3d835b7c6efb60d8c8a2 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 9 May 2025 12:59:24 +0200 Subject: [PATCH 32/40] Update bundle/config/resources/secret_scope.go Co-authored-by: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> --- bundle/config/resources/secret_scope.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 40eae6812a..20a5b09009 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -59,7 +59,7 @@ func (s SecretScope) ResourceDescription() ResourceDescription { SingularName: "secret_scope", PluralName: "secret_scopes", SingularTitle: "Secret Scope", - PluralTitle: "Secret Scope", + PluralTitle: "Secret Scopes", TerraformResourceName: "databricks_secret_scope", } } From ecc1e077a59bdcb8d65f6a06ea5d64888b2f9cbe Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 9 May 2025 15:42:31 +0200 Subject: [PATCH 33/40] add secret_scopes to the allow list for run_as --- bundle/config/mutator/resourcemutator/run_as.go | 10 ---------- bundle/config/mutator/resourcemutator/run_as_test.go | 1 + 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/bundle/config/mutator/resourcemutator/run_as.go b/bundle/config/mutator/resourcemutator/run_as.go index 29be02fd66..fb5408bfbd 100644 --- a/bundle/config/mutator/resourcemutator/run_as.go +++ b/bundle/config/mutator/resourcemutator/run_as.go @@ -129,16 +129,6 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics { )) } - // Secret scopes do not support run_as in the API. - if len(b.Config.Resources.SecretScopes) > 0 { - diags = diags.Extend(reportRunAsNotSupported( - "secret_scopes", - b.Config.GetLocation("resources.secret_scopes"), - b.Config.Workspace.CurrentUser.UserName, - identity, - )) - } - return diags } diff --git a/bundle/config/mutator/resourcemutator/run_as_test.go b/bundle/config/mutator/resourcemutator/run_as_test.go index 803f601c3f..0349959dc7 100644 --- a/bundle/config/mutator/resourcemutator/run_as_test.go +++ b/bundle/config/mutator/resourcemutator/run_as_test.go @@ -143,6 +143,7 @@ var allowList = []string{ "registered_models", "experiments", "schemas", + "secret_scopes", "volumes", } From 66b9260933c73665655b740ec0d66dd5084766e5 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Fri, 9 May 2025 16:10:48 +0200 Subject: [PATCH 34/40] add fields directly to SecretScope struct without embedding *workspace.SecretScope --- .../resourcemutator/apply_target_mode_test.go | 6 +----- bundle/config/resources/secret_scope.go | 10 +++++----- bundle/config/resources_test.go | 3 +-- bundle/deploy/terraform/convert.go | 4 +--- bundle/deploy/terraform/convert_test.go | 14 +++----------- 5 files changed, 11 insertions(+), 26 deletions(-) diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index 24baef533a..575b9fb6a0 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -6,8 +6,6 @@ import ( "slices" "testing" - "github.com/databricks/databricks-sdk-go/service/workspace" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" @@ -156,9 +154,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, SecretScopes: map[string]*resources.SecretScope{ "secretScope1": { - SecretScope: &workspace.SecretScope{ - Name: "secretScope1", - }, + Name: "secretScope1", }, }, }, diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 20a5b09009..8c6507def0 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -23,12 +23,16 @@ type SecretScopePermission struct { } type SecretScope struct { + // A unique name to identify the secret scope. Name string `json:"name"` Permissions []SecretScopePermission `json:"permissions,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` - *workspace.SecretScope + // The type of secret scope backend. + BackendType workspace.ScopeBackendType `json:"backend_type,omitempty"` + // The metadata for the secret scope if the type is `AZURE_KEYVAULT` + KeyvaultMetadata *workspace.AzureKeyVaultSecretScopeMetadata `json:"keyvault_metadata,omitempty"` } func (s *SecretScope) UnmarshalJSON(b []byte) error { @@ -80,7 +84,3 @@ func (s SecretScope) GetURL() string { func (s SecretScope) InitializeURL(_ url.URL) { // Secret scopes do not have a URL } - -func (s SecretScope) IsNil() bool { - return s.SecretScope == nil -} diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index d4544e777c..656cb6a680 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -172,8 +172,7 @@ func TestResourcesBindSupport(t *testing.T) { }, SecretScopes: map[string]*resources.SecretScope{ "my_secret_scope": { - Name: "0", - SecretScope: &workspace.SecretScope{}, + Name: "0", }, }, } diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 841ed2e67b..befee0b9d9 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -4,8 +4,6 @@ import ( "context" "fmt" - "github.com/databricks/databricks-sdk-go/service/workspace" - "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/deploy/terraform/tfdyn" @@ -218,7 +216,7 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { } cur := config.Resources.SecretScopes[resource.Name] if cur == nil { - cur = &resources.SecretScope{ModifiedStatus: resources.ModifiedStatusDeleted, SecretScope: &workspace.SecretScope{}} + cur = &resources.SecretScope{ModifiedStatus: resources.ModifiedStatusDeleted} } cur.Name = instance.Attributes.Name config.Resources.SecretScopes[resource.Name] = cur diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index d5f0ec9606..6467ce69e8 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -5,8 +5,6 @@ import ( "reflect" "testing" - "github.com/databricks/databricks-sdk-go/service/workspace" - "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/tf/schema" @@ -846,9 +844,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { }, SecretScopes: map[string]*resources.SecretScope{ "test_secret_scope": { - SecretScope: &workspace.SecretScope{ - Name: "test_secret_scope", - }, + Name: "test_secret_scope", }, }, }, @@ -1047,14 +1043,10 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { }, SecretScopes: map[string]*resources.SecretScope{ "test_secret_scope": { - SecretScope: &workspace.SecretScope{ - Name: "test_secret_scope", - }, + Name: "test_secret_scope", }, "test_secret_scope_new": { - SecretScope: &workspace.SecretScope{ - Name: "test_secret_scope_new", - }, + Name: "test_secret_scope_new", }, }, }, From 54fc36fea63a8cb871bae259108cb092c4cdd80a Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Mon, 12 May 2025 14:40:15 +0200 Subject: [PATCH 35/40] change the annotation for service_principal_name field for the secret scope resource --- bundle/docsgen/output/reference.md | 38 +++---- bundle/docsgen/output/resources.md | 131 ++----------------------- bundle/internal/schema/annotations.yml | 2 +- 3 files changed, 30 insertions(+), 141 deletions(-) diff --git a/bundle/docsgen/output/reference.md b/bundle/docsgen/output/reference.md index efc6598c4e..052ad904e3 100644 --- a/bundle/docsgen/output/reference.md +++ b/bundle/docsgen/output/reference.md @@ -1,7 +1,7 @@ --- description: 'Configuration reference for databricks.yml' last_update: - date: 2025-05-06 + date: 2025-05-12 --- @@ -470,19 +470,19 @@ secret_scopes: - - `backend_type` - String - - + - The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - `keyvault_metadata` - Map - - See [\_](#resourcessecret_scopesnamekeyvault_metadata). + - The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`. See [\_](#resourcessecret_scopesnamekeyvault_metadata). - - `name` - String - - + - Scope name requested by the user. Scope names are unique. - - `permissions` - Sequence - - See [\_](#resourcessecret_scopesnamepermissions). + - The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. See [\_](#resourcessecret_scopesnamepermissions). ::: @@ -491,7 +491,7 @@ secret_scopes: **`Type: Sequence`** - +The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. @@ -503,19 +503,19 @@ secret_scopes: - - `group_name` - String - - + - The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. - - `level` - String - - + - The allowed permission for user, group, service principal defined for this permission. - - `service_principal_name` - String - - + - The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. - - `user_name` - String - - + - The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. ::: @@ -1028,19 +1028,19 @@ secret_scopes: - - `backend_type` - String - - + - The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - `keyvault_metadata` - Map - - See [\_](#targetsnameresourcessecret_scopesnamekeyvault_metadata). + - The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`. See [\_](#targetsnameresourcessecret_scopesnamekeyvault_metadata). - - `name` - String - - + - Scope name requested by the user. Scope names are unique. - - `permissions` - Sequence - - See [\_](#targetsnameresourcessecret_scopesnamepermissions). + - The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. See [\_](#targetsnameresourcessecret_scopesnamepermissions). ::: @@ -1049,7 +1049,7 @@ secret_scopes: **`Type: Sequence`** - +The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. @@ -1061,19 +1061,19 @@ secret_scopes: - - `group_name` - String - - + - The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. - - `level` - String - - + - The allowed permission for user, group, service principal defined for this permission. - - `service_principal_name` - String - - + - The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. - - `user_name` - String - - + - The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. ::: diff --git a/bundle/docsgen/output/resources.md b/bundle/docsgen/output/resources.md index fbdf4fb990..76df1a1783 100644 --- a/bundle/docsgen/output/resources.md +++ b/bundle/docsgen/output/resources.md @@ -1,7 +1,7 @@ --- description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.' last_update: - date: 2025-05-06 + date: 2025-05-12 --- @@ -6611,22 +6611,10 @@ models: - Type - Description -- - `creation_timestamp` - - Integer - - Timestamp recorded when this `registered_model` was created. - - - `description` - String - Description of this `registered_model`. -- - `last_updated_timestamp` - - Integer - - Timestamp recorded when metadata for this `registered_model` was last updated. - -- - `latest_versions` - - Sequence - - Collection of latest model versions for each stage. Only contains models with current `READY` status. See [\_](#modelsnamelatest_versions). - - - `name` - String - Unique name for the model. @@ -6639,105 +6627,6 @@ models: - Sequence - Tags: Additional metadata key-value pairs for this `registered_model`. See [\_](#modelsnametags). -- - `user_id` - - String - - User that created this `registered_model` - -::: - - -### models._name_.latest_versions - -**`Type: Sequence`** - -Collection of latest model versions for each stage. -Only contains models with current `READY` status. - - - -:::list-table - -- - Key - - Type - - Description - -- - `creation_timestamp` - - Integer - - Timestamp recorded when this `model_version` was created. - -- - `current_stage` - - String - - Current stage for this `model_version`. - -- - `description` - - String - - Description of this `model_version`. - -- - `last_updated_timestamp` - - Integer - - Timestamp recorded when metadata for this `model_version` was last updated. - -- - `name` - - String - - Unique name of the model - -- - `run_id` - - String - - MLflow run ID used when creating `model_version`, if `source` was generated by an experiment run stored in MLflow tracking server. - -- - `run_link` - - String - - Run Link: Direct link to the run that generated this version - -- - `source` - - String - - URI indicating the location of the source model artifacts, used when creating `model_version` - -- - `status` - - String - - Current status of `model_version` - -- - `status_message` - - String - - Details on current `status`, if it is pending or failed. - -- - `tags` - - Sequence - - Tags: Additional metadata key-value pairs for this `model_version`. See [\_](#modelsnamelatest_versionstags). - -- - `user_id` - - String - - User that created this `model_version`. - -- - `version` - - String - - Model's version number. - -::: - - -### models._name_.latest_versions.tags - -**`Type: Sequence`** - -Tags: Additional metadata key-value pairs for this `model_version`. - - - -:::list-table - -- - Key - - Type - - Description - -- - `key` - - String - - The tag key. - -- - `value` - - String - - The tag value. - ::: @@ -8816,19 +8705,19 @@ secret_scopes: - - `backend_type` - String - - + - The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - `keyvault_metadata` - Map - - See [\_](#secret_scopesnamekeyvault_metadata). + - The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`. See [\_](#secret_scopesnamekeyvault_metadata). - - `name` - String - - + - Scope name requested by the user. Scope names are unique. - - `permissions` - Sequence - - See [\_](#secret_scopesnamepermissions). + - The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. See [\_](#secret_scopesnamepermissions). ::: @@ -8837,7 +8726,7 @@ secret_scopes: **`Type: Sequence`** - +The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. @@ -8849,19 +8738,19 @@ secret_scopes: - - `group_name` - String - - + - The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. - - `level` - String - - + - The allowed permission for user, group, service principal defined for this permission. - - `service_principal_name` - String - - + - The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. - - `user_name` - String - - + - The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. ::: diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 2b62c3eba1..b9955647a9 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -557,7 +557,7 @@ github.com/databricks/cli/bundle/config/resources.SecretScopePermission: The allowed permission for user, group, service principal defined for this permission. "service_principal_name": "description": |- - The name of the service principal that has the permission set in level. This field translates to a `principal` field in secret scope ACL. + The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. "user_name": "description": |- The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. From c920f6121cef16fbe17e69a13deb474f671c8947 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Mon, 12 May 2025 14:51:17 +0200 Subject: [PATCH 36/40] add a comment on using list api to check if a scope exists --- bundle/config/resources/secret_scope.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 8c6507def0..405d95f4ac 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -44,6 +44,13 @@ func (s SecretScope) MarshalJSON() ([]byte, error) { } func (s SecretScope) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { + // NOTE: Scope lookup by name is not directly supported by the Secret scopes API + // As of May 2025 there is no direct API method to retrieve a scope using its name as an identifier. + // While scope names serve as unique identifiers, the API only provides: + // - List operations that returns a list of scopes + // - Other operational methods (e.g., reading a secret from a scope and parsing error messages) + // + // The indirect methods are not semantically ideal for simple existence checks, so we use the list API here scopes, err := w.Secrets.ListScopesAll(ctx) if err != nil { return false, nil From ecb90df647e8846c6ae3767eb525be9df98e5be3 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Mon, 12 May 2025 14:52:09 +0200 Subject: [PATCH 37/40] fix NEXT_CHANGELOG.md bullet formatting --- NEXT_CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 7565481c47..013a741375 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -15,5 +15,6 @@ * Fixed normalising requirements file path in dependencies section ([#2861](https://github.com/databricks/cli/pull/2861)) * Fix default-python template not to add environments when serverless=yes and include\_python=no ([#2866](https://github.com/databricks/cli/pull/2866)) * Fixed handling of Unicode characters in Python support ([#2873](https://github.com/databricks/cli/pull/2873)) +* Added support for secret scopes in DABs ([#2744](https://github.com/databricks/cli/pull/2744)) ### API Changes From ed2979ae66792b0a9c23919fb171f576bb015f3d Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Mon, 12 May 2025 15:21:02 +0200 Subject: [PATCH 38/40] add a comment on inlining fields to SecretScope struct --- bundle/config/resources/secret_scope.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bundle/config/resources/secret_scope.go b/bundle/config/resources/secret_scope.go index 405d95f4ac..d9922a1f97 100644 --- a/bundle/config/resources/secret_scope.go +++ b/bundle/config/resources/secret_scope.go @@ -29,6 +29,13 @@ type SecretScope struct { Permissions []SecretScopePermission `json:"permissions,omitempty"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` + // Secret scope configuration is explicitly defined here with individual fields + // to maintain API stability and prevent unintended configuration changes. + // This approach decouples our configuration from potential upstream model/SDK changes + // to `workspace.SecretScope`. While the upstream type serves as a response payload + // for workspace.ListScopesResponse, we adopt its field naming conventions + // for better developer experience compared to `workspace.CreateScope`. + // The type of secret scope backend. BackendType workspace.ScopeBackendType `json:"backend_type,omitempty"` // The metadata for the secret scope if the type is `AZURE_KEYVAULT` From 056a1cfe950a27e752a9157a334baf1b7564e1fc Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Mon, 12 May 2025 15:39:07 +0200 Subject: [PATCH 39/40] make schema --- bundle/schema/jsonschema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index c4b29ae09e..8d305809e2 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -1351,7 +1351,7 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel" }, "service_principal_name": { - "description": "The name of the service principal that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", + "description": "The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string" }, "user_name": { From fe13c7e5f1a4826722969f1706e5b14938adb295 Mon Sep 17 00:00:00 2001 From: Anton Nekipelov <226657+anton-107@users.noreply.github.com> Date: Wed, 14 May 2025 11:55:30 +0200 Subject: [PATCH 40/40] fix bundle name in bundle/deployment/bind/secret-scope acc test --- .../bundle/deployment/bind/secret-scope/databricks.yml.tmpl | 2 +- acceptance/bundle/deployment/bind/secret-scope/output.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl b/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl index 1377bb4923..b615c2c5f6 100644 --- a/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl +++ b/acceptance/bundle/deployment/bind/secret-scope/databricks.yml.tmpl @@ -1,5 +1,5 @@ bundle: - name: bind-dashboard-test-$UNIQUE_NAME + name: bind-secret-scope-test-$UNIQUE_NAME resources: secret_scopes: diff --git a/acceptance/bundle/deployment/bind/secret-scope/output.txt b/acceptance/bundle/deployment/bind/secret-scope/output.txt index d8e3368bda..53f66c3c8c 100644 --- a/acceptance/bundle/deployment/bind/secret-scope/output.txt +++ b/acceptance/bundle/deployment/bind/secret-scope/output.txt @@ -6,7 +6,7 @@ Updating deployment state... Successfully bound secret_scope with an id 'test-secret-scope-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default/files... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-secret-scope-test-[UNIQUE_NAME]/default/files... Deploying resources... Updating deployment state... Deployment complete! @@ -21,7 +21,7 @@ Deployment complete! Updating deployment state... >>> [CLI] bundle destroy --auto-approve -All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-dashboard-test-[UNIQUE_NAME]/default +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-secret-scope-test-[UNIQUE_NAME]/default Deleting files... Destroy complete!