Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
## Release v0.270.0

### Notable Changes
* Add 'databricks bundle plan' command. This command shows the deployment plan for the current bundle configuration without making any changes. ([#3530](https://github.com/databricks/cli/pull/3530)

### CLI

### Dependency updates

### Bundles
* Add 'databricks bundle plan' command ([#3530](https://github.com/databricks/cli/pull/3530)
* Add new Lakeflow Pipelines support for bundle generate ([#3568](https://github.com/databricks/cli/pull/3568))
* Introduce new bundle variable: `${workspace.current_user.domain_friendly_name}` ([#3623](https://github.com/databricks/cli/pull/3623))

Expand Down
1 change: 1 addition & 0 deletions acceptance/bundle/debug/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Usage:
databricks bundle debug [command]

Available Commands:
plan Show deployment plan in JSON format (experimental)
refschema Dump all relevant fields all bundle resources

Flags:
Expand Down
1 change: 1 addition & 0 deletions acceptance/bundle/help/bundle/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ Available Commands:
generate Generate bundle configuration
init Initialize using a bundle template
open Open a resource in the browser
plan Show deployment plan
run Run a job, pipeline update or app
schema Generate JSON Schema for bundle configuration
summary Summarize resources deployed by this bundle
Expand Down
8 changes: 4 additions & 4 deletions acceptance/bundle/resources/jobs/update/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ create jobs.foo

Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.jobs.foo": {
Expand All @@ -22,7 +22,7 @@ Deployment complete!
>>> [CLI] bundle plan
Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {}
}
Expand Down Expand Up @@ -71,7 +71,7 @@ update jobs.foo

Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.jobs.foo": {
Expand All @@ -89,7 +89,7 @@ Deployment complete!
>>> [CLI] bundle plan
Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {}
}
Expand Down
8 changes: 4 additions & 4 deletions acceptance/bundle/resources/jobs/update/script
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
echo "*" > .gitignore
trace $CLI bundle plan
trace $CLI bundle plan -o json
trace $CLI bundle debug plan
trace $CLI bundle deploy
trace $CLI bundle plan
trace $CLI bundle plan -o json
trace $CLI bundle debug plan

print_requests() {
jq --sort-keys 'select(.method != "GET" and (.path | contains("/jobs")))' < out.requests.txt
Expand All @@ -16,10 +16,10 @@ trace print_requests
title "Update trigger.periodic.unit and re-deploy"
trace update_file.py databricks.yml DAYS HOURS
trace $CLI bundle plan
trace $CLI bundle plan -o json
trace $CLI bundle debug plan
trace $CLI bundle deploy
trace $CLI bundle plan
trace $CLI bundle plan -o json
trace $CLI bundle debug plan
trace print_requests

title "Fetch job ID and verify remote state"
Expand Down
4 changes: 2 additions & 2 deletions acceptance/bundle/resources/pipelines/recreate/_script
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ trace cat databricks.yml
touch foo.py
touch bar.py
trace $CLI bundle plan # should show 'create'
trace $CLI bundle plan -o json
trace $CLI bundle debug plan
trace $CLI bundle deploy

ppid1=`read_id.py pipelines my`
Expand All @@ -17,7 +17,7 @@ trace print_requests

trace update_file.py databricks.yml $CONFIG_UPDATE
trace $CLI bundle plan # should show 'recreate'
trace $CLI bundle plan -o json
trace $CLI bundle debug plan
trace $CLI bundle deploy --auto-approve
trace print_requests

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ create pipelines.my

Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down Expand Up @@ -64,7 +64,7 @@ recreate pipelines.my

Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ create pipelines.my

Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down Expand Up @@ -69,7 +69,7 @@ recreate pipelines.my

Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ create pipelines.my

Plan: 1 to add, 0 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down Expand Up @@ -64,7 +64,7 @@ recreate pipelines.my

Plan: 1 to add, 0 to change, 1 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan
{
"plan": {
"resources.pipelines.my": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ update volumes.volume1

Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged

>>> [CLI] bundle plan -o json
>>> [CLI] bundle debug plan

>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
Expand Down
2 changes: 1 addition & 1 deletion acceptance/bundle/resources/volumes/change-name/script
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ trace update_file.py databricks.yml myvolume mynewvolume

trace $CLI bundle plan
# terraform marks this as "update", direct marks this as "update_with_id"
trace $CLI bundle plan -o json > out.plan.$DATABRICKS_BUNDLE_ENGINE.txt
trace $CLI bundle debug plan > out.plan.$DATABRICKS_BUNDLE_ENGINE.txt
trace $CLI bundle deploy
trace print_requests

Expand Down
1 change: 1 addition & 0 deletions cmd/bundle/debug.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,6 @@ func newDebugCommand() *cobra.Command {
}
cmd.AddCommand(debug.NewTerraformCommand())
cmd.AddCommand(debug.NewRefSchemaCommand())
cmd.AddCommand(debug.NewPlanCommand())
return cmd
}
44 changes: 44 additions & 0 deletions cmd/bundle/debug/plan.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package debug

import (
"encoding/json"
"fmt"

"github.com/databricks/cli/cmd/bundle/utils"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/logdiag"
"github.com/spf13/cobra"
)

func NewPlanCommand() *cobra.Command {
return &cobra.Command{
Use: "plan",
Short: "Show deployment plan in JSON format (experimental)",
Long: "Show the deployment plan for the current bundle configuration. This command is experimental and may change without notice.",
Args: root.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
ctx := logdiag.InitContext(cmd.Context())
cmd.SetContext(ctx)
b := utils.ConfigureBundleWithVariables(cmd)
if b == nil || logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
}
plan, err := utils.GetPlan(ctx, b)
if err != nil {
return err
}

out := cmd.OutOrStdout()

buf, err := json.MarshalIndent(plan, "", " ")
if err != nil {
return err
}
fmt.Fprintln(out, string(buf))
if logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
}
return nil
},
}
}
43 changes: 15 additions & 28 deletions cmd/bundle/plan.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@ package bundle
import (
"context"
"encoding/json"
"errors"
"fmt"
"strings"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/deployplan"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/cmd/bundle/utils"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/flags"
Expand All @@ -21,12 +20,11 @@ func newPlanCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "plan",
Copy link
Copy Markdown
Contributor

@lennartkats-db lennartkats-db Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is the exact expected behavior of bundle plan?

Running it locally:

$ databricks bundle plan
Building python_artifact...
Uploading dist/my_project-0.0.1-py3-none-any.whl...
create jobs.my_project_job
delete jobs.sample_job
create pipelines.my_project_pipeline
delete pipelines.sample_etl
  • It seems odd that it builds artifacts as a side effect (but maybe this is necessary?)
  • It seems odd that it uploads artifacts as a side effect (edit: this appears to be fixed in Fix bundle plan to not create workspace objects or upload the files #3442)
  • Should there not be a summary line, like in Terraform (Plan: 2 to add, 0 to change, 2 to destroy)?
  • Should we not use the term "destroy" instead of "delete"? Since that is also the term used for the databricks bundle destroy?

We need to be certain of the interface before making it public

cc @denik

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Building an artifact is unfortunately required, because we don't know the final filename of the artifact and we need that filename in the resource configuration.

Uploading the artifact is certainly a bug and we should have acceptance test + fix for it. I think we should fix it before the release.

Summary is nice to have but could be a follow up.

Regarding delete/destroy, this is not new, this is what DABs always used in other places:

The following resources will be deleted:
  delete app myapp

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@lennartkats-db are you sure you're using latest main? Not uploading artifacts as part of the plan was fixed here #3442

Copy link
Copy Markdown
Contributor

@lennartkats-db lennartkats-db Sep 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Cool, yes, I'm not using the latest main, good to see uploading is fixed.

It still seems to build the artifact though, but I suppose that might be necessary? It's quite unfortunate it prints a message about but if it is necessary then it should offer that transparency.

And delete is indeed used in other places. I also kind of like that word. But we should treat this moment as the time where we decide to cast that in
stone.

The summary I'd really want to have and seems easy to add.

One more observation: the output of the command should go to stdout, not stderr.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we not use the term "destroy" instead of "delete"? Since that is also the term used for the databricks bundle destroy?

Deletion can happen as a result of "bundle deploy" as well. It's a more specific and recognizable term IMO. All APIs and SDKs call call this deletion, not destruction.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Cool. So how about

So output could look like

$ databricks bundle plan
Building python_artifact...  ### this would still go to stderr
Plan: 2 to add, 0 to change, 2 to delete
  create jobs.my_project_job
  delete jobs.sample_job
  create pipelines.my_project_pipeline
  delete pipelines.sample_etl

(I'd also be up for variations of this exact output, but I'm proposing example output to try close on this.)

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here's PR to improve the plan output #3546

Short: "Show deployment plan",
Args: root.NoArgs,
Long: `Show the deployment plan for the current bundle configuration.

// Output format may change without notice; main use case is in acceptance tests.
// Today, this command also uploads libraries, which is not the intent here. We need to refactor
// libraries.Upload() mutator to separate config mutation with actual upload.
Hidden: true,
This command builds the bundle and displays the actions which will be done on resources that would be deployed, without making any changes.
It is useful for previewing changes before running 'bundle deploy'.`,
Args: root.NoArgs,
}

var force bool
Expand All @@ -36,6 +34,13 @@ func newPlanCommand() *cobra.Command {
cmd.Flags().StringVarP(&clusterId, "cluster-id", "c", "", "Override cluster in the deployment with the given cluster ID.")
cmd.Flags().MarkDeprecated("compute-id", "use --cluster-id instead")

cmd.PreRunE = func(cmd *cobra.Command, args []string) error {
if f := cmd.Flag("output"); f != nil && f.Changed {
return errors.New("the -o/--output flag is not supported for this command. Use an experimental 'databricks bundle debug plan' command instead")
}
return nil
}

cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := logdiag.InitContext(cmd.Context())
cmd.SetContext(ctx)
Expand All @@ -57,27 +62,9 @@ func newPlanCommand() *cobra.Command {
}
})

phases.Initialize(ctx, b)

if logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
}

bundle.ApplyContext(ctx, b, validate.FastValidate())

if logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
}

phases.Build(ctx, b)

if logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
}

plan := phases.Plan(ctx, b)
if logdiag.HasError(ctx) {
return root.ErrAlreadyPrinted
plan, err := utils.GetPlan(ctx, b)
if err != nil {
return err
}

// Count actions by type and collect formatted actions
Expand Down
27 changes: 27 additions & 0 deletions cmd/bundle/utils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import (
"context"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/deployplan"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/logdiag"
Expand Down Expand Up @@ -38,3 +41,27 @@ func ConfigureBundleWithVariables(cmd *cobra.Command) *bundle.Bundle {

return b
}

func GetPlan(ctx context.Context, b *bundle.Bundle) (*deployplan.Plan, error) {
phases.Initialize(ctx, b)
if logdiag.HasError(ctx) {
return nil, root.ErrAlreadyPrinted
}

bundle.ApplyContext(ctx, b, validate.FastValidate())
if logdiag.HasError(ctx) {
return nil, root.ErrAlreadyPrinted
}

phases.Build(ctx, b)
if logdiag.HasError(ctx) {
return nil, root.ErrAlreadyPrinted
}

plan := phases.Plan(ctx, b)
if logdiag.HasError(ctx) {
return nil, root.ErrAlreadyPrinted
}

return plan, nil
}
Loading