From ceb62c091b6b1702962f1db6acd8faadf4027a55 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 18 Jan 2024 15:14:24 +0000 Subject: [PATCH 01/16] Added bundle bind command --- bundle/config/resources.go | 15 ++++++ bundle/deploy/terraform/import.go | 39 ++++++++++++++ bundle/phases/bind.go | 24 +++++++++ cmd/bundle/bind.go | 85 +++++++++++++++++++++++++++++++ cmd/bundle/bundle.go | 1 + 5 files changed, 164 insertions(+) create mode 100644 bundle/deploy/terraform/import.go create mode 100644 bundle/phases/bind.go create mode 100644 cmd/bundle/bind.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 2b453c666e..7b91a2cef7 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -168,3 +168,18 @@ func (r *Resources) Merge() error { } return nil } + +func (r *Resources) FindResourceByConfigKey(key string) (string, error) { + for k := range r.Jobs { + if k == key { + return "databricks_job", nil + } + } + for k := range r.Pipelines { + if k == key { + return "databricks_pipeline", nil + } + } + + return "", fmt.Errorf("no such resource: %s", key) +} diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go new file mode 100644 index 0000000000..a2313120eb --- /dev/null +++ b/bundle/deploy/terraform/import.go @@ -0,0 +1,39 @@ +package terraform + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/hashicorp/terraform-exec/tfexec" +) + +type importResource struct { + resourceType string + resourceKey string + resourceId string +} + +// Apply implements bundle.Mutator. +func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { + tf := b.Terraform + if tf == nil { + return fmt.Errorf("terraform not initialized") + } + + err := tf.Init(ctx, tfexec.Upgrade(true)) + if err != nil { + return fmt.Errorf("terraform init: %w", err) + } + + return tf.Import(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey), m.resourceId) +} + +// Name implements bundle.Mutator. +func (*importResource) Name() string { + return "terraform.Import" +} + +func Import(resourceType string, resourceKey string, resourceId string) bundle.Mutator { + return &importResource{resourceType: resourceType, resourceKey: resourceKey, resourceId: resourceId} +} diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go new file mode 100644 index 0000000000..a22249ada6 --- /dev/null +++ b/bundle/phases/bind.go @@ -0,0 +1,24 @@ +package phases + +import ( + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/terraform" +) + +type BindOptions struct { + ResourceType string + ResourceKey string + ResourceId string +} + +func Bind(opts *BindOptions) bundle.Mutator { + return newPhase( + "bind", + []bundle.Mutator{ + terraform.Interpolate(), + terraform.Write(), + terraform.StatePull(), + terraform.Import(opts.ResourceType, opts.ResourceKey, opts.ResourceId), + }, + ) +} diff --git a/cmd/bundle/bind.go b/cmd/bundle/bind.go new file mode 100644 index 0000000000..50b07c66eb --- /dev/null +++ b/cmd/bundle/bind.go @@ -0,0 +1,85 @@ +package bundle + +import ( + "context" + "fmt" + "strconv" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/spf13/cobra" +) + +func newBindCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "bind KEY RESOURCE_ID", + Short: "Bind bundle-defined resources to existing resources", + PreRunE: ConfigureBundleWithVariables, + Args: cobra.ExactArgs(2), + } + + var autoApprove bool + cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Automatically approve the binding") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + b := bundle.Get(cmd.Context()) + r := b.Config.Resources + resourceType, err := r.FindResourceByConfigKey(args[0]) + if err != nil { + return err + } + + w := b.WorkspaceClient() + err = checkResourceExists(cmd.Context(), w, resourceType, args[1]) + if err != nil { + return fmt.Errorf("%s with an id '%s' is not found, err: %w", resourceType, args[1], err) + } + + ctx := cmd.Context() + if !autoApprove { + answer, err := cmdio.AskYesOrNo(ctx, "Binding to existing resource means that the resource will be managed by the bundle which can lead to changes in the resource. Do you want to continue?") + if err != nil { + return err + } + if !answer { + return nil + } + } + + return bundle.Apply(cmd.Context(), b, bundle.Seq( + phases.Initialize(), + phases.Bind(&phases.BindOptions{ + ResourceType: resourceType, + ResourceKey: args[0], + ResourceId: args[1], + }), + )) + } + + return cmd +} + +func checkResourceExists(ctx context.Context, w *databricks.WorkspaceClient, resourceType string, resourceId string) error { + switch resourceType { + case "databricks_job": + id, err := strconv.Atoi(resourceId) + if err != nil { + return err + } + _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: int64(id), + }) + return err + case "databricks_pipeline": + _, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{ + PipelineId: resourceId, + }) + return err + default: + return fmt.Errorf("unsupported resource type: %s", resourceType) + } +} diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go index 3aa6945b11..5b6e12d61d 100644 --- a/cmd/bundle/bundle.go +++ b/cmd/bundle/bundle.go @@ -23,5 +23,6 @@ func New() *cobra.Command { cmd.AddCommand(newValidateCommand()) cmd.AddCommand(newInitCommand()) cmd.AddCommand(newGenerateCommand()) + cmd.AddCommand(newBindCommand()) return cmd } From 952d660cb94e9218acdf02bf89061f7c9697c8b6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 23 Jan 2024 12:51:56 +0000 Subject: [PATCH 02/16] added confirm plan and unbind command --- bundle/config/resources.go | 15 ++++-- bundle/config/resources/job.go | 19 +++++++ bundle/config/resources/pipeline.go | 13 +++++ bundle/deploy/lock/release.go | 3 ++ bundle/deploy/terraform/import.go | 78 ++++++++++++++++++++++++++--- bundle/phases/bind.go | 41 +++++++++++---- cmd/bundle/bundle.go | 3 +- cmd/bundle/deploy.go | 3 +- cmd/bundle/{ => deployment}/bind.go | 48 +++++------------- cmd/bundle/deployment/deployment.go | 17 +++++++ cmd/bundle/deployment/unbind.go | 38 ++++++++++++++ cmd/bundle/destroy.go | 3 +- cmd/bundle/generate.go | 3 +- cmd/bundle/run.go | 3 +- cmd/bundle/sync.go | 3 +- cmd/bundle/utils/utils.go | 24 +++++++++ cmd/bundle/validate.go | 3 +- cmd/bundle/variables.go | 19 ------- 18 files changed, 255 insertions(+), 81 deletions(-) rename cmd/bundle/{ => deployment}/bind.go (51%) create mode 100644 cmd/bundle/deployment/deployment.go create mode 100644 cmd/bundle/deployment/unbind.go create mode 100644 cmd/bundle/utils/utils.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 7b91a2cef7..f27e5c3bdc 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -1,9 +1,11 @@ package config import ( + "context" "fmt" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go" ) // Resources defines Databricks resources associated with the bundle. @@ -169,17 +171,22 @@ func (r *Resources) Merge() error { return nil } -func (r *Resources) FindResourceByConfigKey(key string) (string, error) { +type ConfigResource interface { + Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool + Type() string +} + +func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) { for k := range r.Jobs { if k == key { - return "databricks_job", nil + return r.Jobs[k], nil } } for k := range r.Pipelines { if k == key { - return "databricks_pipeline", nil + return r.Pipelines[k], nil } } - return "", fmt.Errorf("no such resource: %s", key) + return nil, fmt.Errorf("no such resource: %s", key) } diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index bf29106a03..cba76b9e1b 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -1,7 +1,11 @@ package resources import ( + "context" + "strconv" + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/imdario/mergo" @@ -89,3 +93,18 @@ func (j *Job) MergeTasks() error { j.Tasks = tasks return nil } + +func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool { + jobId, err := strconv.Atoi(id) + if err != nil { + return false + } + _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: int64(jobId), + }) + return err == nil +} + +func (j *Job) Type() string { + return "databricks_job" +} diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 5c741f8af6..84f09745fb 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -1,9 +1,11 @@ package resources import ( + "context" "strings" "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/imdario/mergo" @@ -72,3 +74,14 @@ func (p *Pipeline) MergeClusters() error { p.Clusters = output return nil } + +func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool { + _, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{ + PipelineId: id, + }) + return err == nil +} + +func (p *Pipeline) Type() string { + return "databricks_pipeline" +} diff --git a/bundle/deploy/lock/release.go b/bundle/deploy/lock/release.go index 52d2719433..6fc4d65a41 100644 --- a/bundle/deploy/lock/release.go +++ b/bundle/deploy/lock/release.go @@ -12,6 +12,7 @@ import ( type Goal string const ( + GoalBind = Goal("bind") GoalDeploy = Goal("deploy") GoalDestroy = Goal("destroy") ) @@ -46,6 +47,8 @@ func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error { switch m.goal { case GoalDeploy: return b.Locker.Unlock(ctx) + case GoalBind: + return b.Locker.Unlock(ctx) case GoalDestroy: return b.Locker.Unlock(ctx, locker.AllowLockFileNotExist) default: diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index a2313120eb..4fc47e321b 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -1,17 +1,24 @@ package terraform import ( + "bytes" "context" "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/cmdio" "github.com/hashicorp/terraform-exec/tfexec" ) +type BindOptions struct { + AutoApprove bool + ResourceType string + ResourceKey string + ResourceId string +} + type importResource struct { - resourceType string - resourceKey string - resourceId string + opts *BindOptions } // Apply implements bundle.Mutator. @@ -26,7 +33,34 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("terraform init: %w", err) } - return tf.Import(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey), m.resourceId) + err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId) + if err != nil { + return fmt.Errorf("terraform import: %w", err) + } + + buf := bytes.NewBuffer(nil) + tf.SetStdout(buf) + changed, err := tf.Plan(ctx) + if err != nil { + return fmt.Errorf("terraform plan: %w", err) + } + + if changed && !m.opts.AutoApprove { + cmdio.LogString(ctx, buf.String()) + ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely only after running 'bundle deploy'.") + if err != nil { + return err + } + if !ans { + err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey)) + if err != nil { + return err + } + return fmt.Errorf("import aborted") + } + } + + return nil } // Name implements bundle.Mutator. @@ -34,6 +68,38 @@ func (*importResource) Name() string { return "terraform.Import" } -func Import(resourceType string, resourceKey string, resourceId string) bundle.Mutator { - return &importResource{resourceType: resourceType, resourceKey: resourceKey, resourceId: resourceId} +func Import(opts *BindOptions) bundle.Mutator { + return &importResource{opts: opts} +} + +type unbind struct { + resourceType string + resourceKey string +} + +func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) error { + tf := b.Terraform + if tf == nil { + return fmt.Errorf("terraform not initialized") + } + + err := tf.Init(ctx, tfexec.Upgrade(true)) + if err != nil { + return fmt.Errorf("terraform init: %w", err) + } + + err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey)) + if err != nil { + return fmt.Errorf("terraform state rm: %w", err) + } + + return nil +} + +func (*unbind) Name() string { + return "terraform.Unbind" +} + +func Unbind(resourceType string, resourceKey string) bundle.Mutator { + return &unbind{resourceType: resourceType, resourceKey: resourceKey} } diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index a22249ada6..0595d72d6e 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -2,23 +2,44 @@ package phases import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" ) -type BindOptions struct { - ResourceType string - ResourceKey string - ResourceId string +func Bind(opts *terraform.BindOptions) bundle.Mutator { + return newPhase( + "bind", + []bundle.Mutator{ + lock.Acquire(), + bundle.Defer( + bundle.Seq( + terraform.Interpolate(), + terraform.Write(), + terraform.StatePull(), + terraform.Import(opts), + terraform.StatePush(), + ), + lock.Release(lock.GoalBind), + ), + }, + ) } -func Bind(opts *BindOptions) bundle.Mutator { +func Unbind(resourceType string, resourceKey string) bundle.Mutator { return newPhase( - "bind", + "unbind", []bundle.Mutator{ - terraform.Interpolate(), - terraform.Write(), - terraform.StatePull(), - terraform.Import(opts.ResourceType, opts.ResourceKey, opts.ResourceId), + lock.Acquire(), + bundle.Defer( + bundle.Seq( + terraform.Interpolate(), + terraform.Write(), + terraform.StatePull(), + terraform.Unbind(resourceType, resourceKey), + terraform.StatePush(), + ), + lock.Release(lock.GoalBind), + ), }, ) } diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go index 5b6e12d61d..d30887fa0f 100644 --- a/cmd/bundle/bundle.go +++ b/cmd/bundle/bundle.go @@ -1,6 +1,7 @@ package bundle import ( + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/spf13/cobra" ) @@ -23,6 +24,6 @@ func New() *cobra.Command { cmd.AddCommand(newValidateCommand()) cmd.AddCommand(newInitCommand()) cmd.AddCommand(newGenerateCommand()) - cmd.AddCommand(newBindCommand()) + cmd.AddCommand(deployment.NewDeploymentCommand()) return cmd } diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 8818bbbf4d..32779f2b5b 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -3,6 +3,7 @@ package bundle import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/spf13/cobra" ) @@ -10,7 +11,7 @@ func newDeployCommand() *cobra.Command { cmd := &cobra.Command{ Use: "deploy", Short: "Deploy bundle", - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } var force bool diff --git a/cmd/bundle/bind.go b/cmd/bundle/deployment/bind.go similarity index 51% rename from cmd/bundle/bind.go rename to cmd/bundle/deployment/bind.go index 50b07c66eb..67d5fc88c4 100644 --- a/cmd/bundle/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -1,16 +1,13 @@ -package bundle +package deployment import ( - "context" "fmt" - "strconv" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/databricks-sdk-go" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/spf13/cobra" ) @@ -18,28 +15,29 @@ func newBindCommand() *cobra.Command { cmd := &cobra.Command{ Use: "bind KEY RESOURCE_ID", Short: "Bind bundle-defined resources to existing resources", - PreRunE: ConfigureBundleWithVariables, Args: cobra.ExactArgs(2), + PreRunE: utils.ConfigureBundleWithVariables, } var autoApprove bool + var forceLock bool cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Automatically approve the binding") + cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) r := b.Config.Resources - resourceType, err := r.FindResourceByConfigKey(args[0]) + resource, err := r.FindResourceByConfigKey(args[0]) if err != nil { return err } w := b.WorkspaceClient() - err = checkResourceExists(cmd.Context(), w, resourceType, args[1]) - if err != nil { - return fmt.Errorf("%s with an id '%s' is not found, err: %w", resourceType, args[1], err) + ctx := cmd.Context() + if !resource.Exists(ctx, w, args[1]) { + return fmt.Errorf("%s with an id '%s' is not found", resource.Type(), args[1]) } - ctx := cmd.Context() if !autoApprove { answer, err := cmdio.AskYesOrNo(ctx, "Binding to existing resource means that the resource will be managed by the bundle which can lead to changes in the resource. Do you want to continue?") if err != nil { @@ -50,10 +48,11 @@ func newBindCommand() *cobra.Command { } } + b.Config.Bundle.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), - phases.Bind(&phases.BindOptions{ - ResourceType: resourceType, + phases.Bind(&terraform.BindOptions{ + ResourceType: resource.Type(), ResourceKey: args[0], ResourceId: args[1], }), @@ -62,24 +61,3 @@ func newBindCommand() *cobra.Command { return cmd } - -func checkResourceExists(ctx context.Context, w *databricks.WorkspaceClient, resourceType string, resourceId string) error { - switch resourceType { - case "databricks_job": - id, err := strconv.Atoi(resourceId) - if err != nil { - return err - } - _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ - JobId: int64(id), - }) - return err - case "databricks_pipeline": - _, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{ - PipelineId: resourceId, - }) - return err - default: - return fmt.Errorf("unsupported resource type: %s", resourceType) - } -} diff --git a/cmd/bundle/deployment/deployment.go b/cmd/bundle/deployment/deployment.go new file mode 100644 index 0000000000..d29a8e72b1 --- /dev/null +++ b/cmd/bundle/deployment/deployment.go @@ -0,0 +1,17 @@ +package deployment + +import ( + "github.com/spf13/cobra" +) + +func NewDeploymentCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "deployment", + Short: "Deployment related commands", + Long: "Deployment related commands", + } + + cmd.AddCommand(newBindCommand()) + cmd.AddCommand(newUnbindCommand()) + return cmd +} diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go new file mode 100644 index 0000000000..76ba447666 --- /dev/null +++ b/cmd/bundle/deployment/unbind.go @@ -0,0 +1,38 @@ +package deployment + +import ( + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/spf13/cobra" +) + +func newUnbindCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "unbind KEY", + Short: "Unbind bundle-defined resources from its managed remote resource", + Args: cobra.ExactArgs(1), + PreRunE: utils.ConfigureBundleWithVariables, + } + + var forceLock bool + cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + b := bundle.Get(cmd.Context()) + r := b.Config.Resources + resource, err := r.FindResourceByConfigKey(args[0]) + if err != nil { + return err + } + + b.Config.Bundle.Lock.Force = forceLock + return bundle.Apply(cmd.Context(), b, bundle.Seq( + phases.Initialize(), + terraform.Unbind(resource.Type(), args[0]), + )) + } + + return cmd +} diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index 22d998abe2..37c9100141 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" @@ -17,7 +18,7 @@ func newDestroyCommand() *cobra.Command { Use: "destroy", Short: "Destroy deployed bundle resources", - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } var autoApprove bool diff --git a/cmd/bundle/generate.go b/cmd/bundle/generate.go index a593f52f25..52d94031a1 100644 --- a/cmd/bundle/generate.go +++ b/cmd/bundle/generate.go @@ -2,6 +2,7 @@ package bundle import ( "github.com/databricks/cli/cmd/bundle/generate" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/spf13/cobra" ) @@ -10,7 +11,7 @@ func newGenerateCommand() *cobra.Command { Use: "generate", Short: "Generate bundle configuration", Long: "Generate bundle configuration", - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } cmd.AddCommand(generate.NewGenerateJobCommand()) diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index c9e35aa3ba..5ac9b4527f 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/run" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -20,7 +21,7 @@ func newRunCommand() *cobra.Command { Short: "Run a resource (e.g. a job or a pipeline)", Args: cobra.MaximumNArgs(1), - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } var runOptions run.Options diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index ca81275b73..d9f8582c26 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/sync" "github.com/spf13/cobra" @@ -48,7 +49,7 @@ func newSyncCommand() *cobra.Command { Short: "Synchronize bundle tree to the workspace", Args: cobra.NoArgs, - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } var f syncFlags diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go new file mode 100644 index 0000000000..f68ab06b01 --- /dev/null +++ b/cmd/bundle/utils/utils.go @@ -0,0 +1,24 @@ +package utils + +import ( + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/cmd/root" + "github.com/spf13/cobra" +) + +func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { + // Load bundle config and apply target + err := root.MustConfigureBundle(cmd, args) + if err != nil { + return err + } + + variables, err := cmd.Flags().GetStringSlice("var") + if err != nil { + return err + } + + // Initialize variables by assigning them values passed as command line flags + b := bundle.Get(cmd.Context()) + return b.Config.InitializeVariables(variables) +} diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index b98cbd52dc..01b8c18acc 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/spf13/cobra" ) @@ -13,7 +14,7 @@ func newValidateCommand() *cobra.Command { Use: "validate", Short: "Validate configuration", - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, } cmd.RunE = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/bundle/variables.go b/cmd/bundle/variables.go index c3e4af6453..f8f5167ead 100644 --- a/cmd/bundle/variables.go +++ b/cmd/bundle/variables.go @@ -1,28 +1,9 @@ package bundle import ( - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) -func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { - // Load bundle config and apply target - err := root.MustConfigureBundle(cmd, args) - if err != nil { - return err - } - - variables, err := cmd.Flags().GetStringSlice("var") - if err != nil { - return err - } - - // Initialize variables by assigning them values passed as command line flags - b := bundle.Get(cmd.Context()) - return b.Config.InitializeVariables(variables) -} - func initVariableFlag(cmd *cobra.Command) { cmd.PersistentFlags().StringSlice("var", []string{}, `set values for variables defined in bundle config. Example: --var="foo=bar"`) } From 8075ae12ecb62510b6cf5e95f2b77e1608d3f13e Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 25 Jan 2024 12:42:25 +0000 Subject: [PATCH 03/16] use import block --- bundle/config/resources.go | 2 +- bundle/config/resources/job.go | 2 +- bundle/config/resources/pipeline.go | 2 +- bundle/deploy/lock/release.go | 3 ++- bundle/deploy/terraform/import.go | 33 +++++++++++++++++++++++------ bundle/phases/bind.go | 2 +- cmd/bundle/deployment/bind.go | 16 +++----------- cmd/bundle/deployment/unbind.go | 2 +- 8 files changed, 37 insertions(+), 25 deletions(-) diff --git a/bundle/config/resources.go b/bundle/config/resources.go index f27e5c3bdc..725076f921 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -173,7 +173,7 @@ func (r *Resources) Merge() error { type ConfigResource interface { Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool - Type() string + TerraformResourceName() string } func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) { diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index cba76b9e1b..c3726976e4 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -105,6 +105,6 @@ func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id stri return err == nil } -func (j *Job) Type() string { +func (j *Job) TerraformResourceName() string { return "databricks_job" } diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 84f09745fb..cd1ffabadb 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -82,6 +82,6 @@ func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id return err == nil } -func (p *Pipeline) Type() string { +func (p *Pipeline) TerraformResourceName() string { return "databricks_pipeline" } diff --git a/bundle/deploy/lock/release.go b/bundle/deploy/lock/release.go index 6fc4d65a41..951bbd1aa4 100644 --- a/bundle/deploy/lock/release.go +++ b/bundle/deploy/lock/release.go @@ -13,6 +13,7 @@ type Goal string const ( GoalBind = Goal("bind") + GoalUnbind = Goal("unbind") GoalDeploy = Goal("deploy") GoalDestroy = Goal("destroy") ) @@ -47,7 +48,7 @@ func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error { switch m.goal { case GoalDeploy: return b.Locker.Unlock(ctx) - case GoalBind: + case GoalBind, GoalUnbind: return b.Locker.Unlock(ctx) case GoalDestroy: return b.Locker.Unlock(ctx, locker.AllowLockFileNotExist) diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index 4fc47e321b..d238ae838d 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -4,6 +4,8 @@ import ( "bytes" "context" "fmt" + "os" + "path/filepath" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" @@ -33,9 +35,9 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("terraform init: %w", err) } - err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId) + importsFilePath, err := m.writeImportsFile(ctx, b) if err != nil { - return fmt.Errorf("terraform import: %w", err) + return fmt.Errorf("write imports file: %w", err) } buf := bytes.NewBuffer(nil) @@ -52,10 +54,8 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return err } if !ans { - err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey)) - if err != nil { - return err - } + // remove imports.tf file + _ = os.Remove(importsFilePath) return fmt.Errorf("import aborted") } } @@ -63,6 +63,27 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return nil } +func (m *importResource) writeImportsFile(ctx context.Context, b *bundle.Bundle) (string, error) { + // Write imports.tf file to the terraform root directory + dir, err := Dir(ctx, b) + if err != nil { + return "", err + } + + importsFilePath := filepath.Join(dir, "imports.tf") + f, err := os.Create(importsFilePath) + if err != nil { + return "", err + } + defer f.Close() + _, err = f.WriteString(fmt.Sprintf(`import { + to = %s.%s + id = "%s" +}`, m.opts.ResourceType, m.opts.ResourceKey, m.opts.ResourceId)) + + return importsFilePath, err +} + // Name implements bundle.Mutator. func (*importResource) Name() string { return "terraform.Import" diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index 0595d72d6e..21a408e037 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -38,7 +38,7 @@ func Unbind(resourceType string, resourceKey string) bundle.Mutator { terraform.Unbind(resourceType, resourceKey), terraform.StatePush(), ), - lock.Release(lock.GoalBind), + lock.Release(lock.GoalUnbind), ), }, ) diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 67d5fc88c4..aba72b6466 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -7,7 +7,6 @@ import ( "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" - "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) @@ -35,24 +34,15 @@ func newBindCommand() *cobra.Command { w := b.WorkspaceClient() ctx := cmd.Context() if !resource.Exists(ctx, w, args[1]) { - return fmt.Errorf("%s with an id '%s' is not found", resource.Type(), args[1]) - } - - if !autoApprove { - answer, err := cmdio.AskYesOrNo(ctx, "Binding to existing resource means that the resource will be managed by the bundle which can lead to changes in the resource. Do you want to continue?") - if err != nil { - return err - } - if !answer { - return nil - } + return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1]) } b.Config.Bundle.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ - ResourceType: resource.Type(), + AutoApprove: autoApprove, + ResourceType: resource.TerraformResourceName(), ResourceKey: args[0], ResourceId: args[1], }), diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index 76ba447666..d283692a39 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -30,7 +30,7 @@ func newUnbindCommand() *cobra.Command { b.Config.Bundle.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), - terraform.Unbind(resource.Type(), args[0]), + terraform.Unbind(resource.TerraformResourceName(), args[0]), )) } From c65aa6c163a29e38918c018d52c96a561d78cad9 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 25 Jan 2024 13:36:32 +0000 Subject: [PATCH 04/16] Added e2e test --- bundle/deploy/terraform/import.go | 3 ++ bundle/phases/bind.go | 1 - internal/bundle/bind_resource_test.go | 69 +++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 internal/bundle/bind_resource_test.go diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index d238ae838d..fe227d0d39 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -39,6 +40,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return fmt.Errorf("write imports file: %w", err) } + log.Debugf(ctx, "imports.tf file written to %s", importsFilePath) buf := bytes.NewBuffer(nil) tf.SetStdout(buf) @@ -60,6 +62,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { } } + log.Debugf(ctx, "resource imports approved") return nil } diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index 21a408e037..9ff2f527b7 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -17,7 +17,6 @@ func Bind(opts *terraform.BindOptions) bundle.Mutator { terraform.Write(), terraform.StatePull(), terraform.Import(opts), - terraform.StatePush(), ), lock.Release(lock.GoalBind), ), diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go new file mode 100644 index 0000000000..83b4593e62 --- /dev/null +++ b/internal/bundle/bind_resource_test.go @@ -0,0 +1,69 @@ +package bundle + +import ( + "context" + "fmt" + "testing" + + "github.com/databricks/cli/internal" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestAccBindJobToExistingJob(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + nodeTypeId := internal.GetNodeTypeId(env) + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, "basic", map[string]any{ + "unique_id": uniqueId, + "spark_version": "13.3.x-scala2.12", + "node_type_id": nodeTypeId, + }) + require.NoError(t, err) + + jobId := createTestJob(t) + t.Cleanup(func() { + destroyJob(t, jobId) + require.NoError(t, err) + }) + + t.Setenv("BUNDLE_ROOT", bundleRoot) + c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve") + _, _, err = c.Run() + require.NoError(t, err) + + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + ctx := context.Background() + // Check that job is bound and updated with config from bundle + job, err := w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: jobId, + }) + require.NoError(t, err) + require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) + require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") + + c = internal.NewCobraTestRunner(t, "bundle", "deployment", "unbind", "foo") + _, _, err = c.Run() + require.NoError(t, err) + + err = destroyBundle(t, bundleRoot) + require.NoError(t, err) + + // Check that job is unbound and exists after bundle is destroyed + job, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: jobId, + }) + require.NoError(t, err) + require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) + require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") + +} From 852e7bcf841fda2e14ce1225d6d91148c806c4fb Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 25 Jan 2024 13:40:32 +0000 Subject: [PATCH 05/16] fix build --- cmd/bundle/summary.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cmd/bundle/summary.go b/cmd/bundle/summary.go index efa3c679d8..596f7d3d8c 100644 --- a/cmd/bundle/summary.go +++ b/cmd/bundle/summary.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" @@ -20,7 +21,7 @@ func newSummaryCommand() *cobra.Command { Use: "summary", Short: "Describe the bundle resources and their deployment states", - PreRunE: ConfigureBundleWithVariables, + PreRunE: utils.ConfigureBundleWithVariables, // This command is currently intended for the Databricks VSCode extension only Hidden: true, From 2cfee7342471cec835786dfda81d86801d3ec3aa Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 7 Feb 2024 15:12:55 +0100 Subject: [PATCH 06/16] push state --- bundle/config/resources.go | 2 +- bundle/config/resources/job.go | 6 +- bundle/config/resources/pipeline.go | 4 +- bundle/deploy/terraform/import.go | 83 ++++++++------------------- bundle/deploy/terraform/unbind.go | 41 +++++++++++++ bundle/phases/bind.go | 5 +- bundle/phases/destroy.go | 2 +- cmd/bundle/deployment/bind.go | 7 ++- cmd/bundle/deployment/unbind.go | 3 +- internal/bundle/bind_resource_test.go | 10 ++++ 10 files changed, 89 insertions(+), 74 deletions(-) create mode 100644 bundle/deploy/terraform/unbind.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 725076f921..af27d88dd4 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -172,7 +172,7 @@ func (r *Resources) Merge() error { } type ConfigResource interface { - Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool + Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) TerraformResourceName() string } diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index 8e1aa10846..bd6ab76cf2 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -95,15 +95,15 @@ func (j *Job) MergeTasks() error { return nil } -func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool { +func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) { jobId, err := strconv.Atoi(id) if err != nil { - return false + return false, err } _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ JobId: int64(jobId), }) - return err == nil + return err == nil, err } func (j *Job) TerraformResourceName() string { diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 16583a4b24..4a896fa12c 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -76,11 +76,11 @@ func (p *Pipeline) MergeClusters() error { return nil } -func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) bool { +func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) { _, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{ PipelineId: id, }) - return err == nil + return err == nil, err } func (p *Pipeline) TerraformResourceName() string { diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index fe227d0d39..f4b17e0e75 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -26,21 +26,35 @@ type importResource struct { // Apply implements bundle.Mutator. func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { + dir, err := Dir(ctx, b) + if err != nil { + return err + } + + // If the bundle.tf.json file does not exist, write it. + // This is necessary because the import operation requires the resource to be defined in the Terraform configuration. + _, err = os.Stat(filepath.Join(dir, "bundle.tf.json")) + if err != nil { + err = bundle.Apply(ctx, b, Write()) + if err != nil { + return fmt.Errorf("terraform write: %w", err) + } + } + tf := b.Terraform if tf == nil { return fmt.Errorf("terraform not initialized") } - err := tf.Init(ctx, tfexec.Upgrade(true)) + err = tf.Init(ctx, tfexec.Upgrade(true)) if err != nil { return fmt.Errorf("terraform init: %w", err) } - importsFilePath, err := m.writeImportsFile(ctx, b) + err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId) if err != nil { - return fmt.Errorf("write imports file: %w", err) + return fmt.Errorf("terraform import: %w", err) } - log.Debugf(ctx, "imports.tf file written to %s", importsFilePath) buf := bytes.NewBuffer(nil) tf.SetStdout(buf) @@ -51,13 +65,15 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { if changed && !m.opts.AutoApprove { cmdio.LogString(ctx, buf.String()) - ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely only after running 'bundle deploy'.") + ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") if err != nil { return err } if !ans { - // remove imports.tf file - _ = os.Remove(importsFilePath) + err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey)) + if err != nil { + return err + } return fmt.Errorf("import aborted") } } @@ -66,27 +82,6 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return nil } -func (m *importResource) writeImportsFile(ctx context.Context, b *bundle.Bundle) (string, error) { - // Write imports.tf file to the terraform root directory - dir, err := Dir(ctx, b) - if err != nil { - return "", err - } - - importsFilePath := filepath.Join(dir, "imports.tf") - f, err := os.Create(importsFilePath) - if err != nil { - return "", err - } - defer f.Close() - _, err = f.WriteString(fmt.Sprintf(`import { - to = %s.%s - id = "%s" -}`, m.opts.ResourceType, m.opts.ResourceKey, m.opts.ResourceId)) - - return importsFilePath, err -} - // Name implements bundle.Mutator. func (*importResource) Name() string { return "terraform.Import" @@ -95,35 +90,3 @@ func (*importResource) Name() string { func Import(opts *BindOptions) bundle.Mutator { return &importResource{opts: opts} } - -type unbind struct { - resourceType string - resourceKey string -} - -func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) error { - tf := b.Terraform - if tf == nil { - return fmt.Errorf("terraform not initialized") - } - - err := tf.Init(ctx, tfexec.Upgrade(true)) - if err != nil { - return fmt.Errorf("terraform init: %w", err) - } - - err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey)) - if err != nil { - return fmt.Errorf("terraform state rm: %w", err) - } - - return nil -} - -func (*unbind) Name() string { - return "terraform.Unbind" -} - -func Unbind(resourceType string, resourceKey string) bundle.Mutator { - return &unbind{resourceType: resourceType, resourceKey: resourceKey} -} diff --git a/bundle/deploy/terraform/unbind.go b/bundle/deploy/terraform/unbind.go new file mode 100644 index 0000000000..74e15e1844 --- /dev/null +++ b/bundle/deploy/terraform/unbind.go @@ -0,0 +1,41 @@ +package terraform + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/hashicorp/terraform-exec/tfexec" +) + +type unbind struct { + resourceType string + resourceKey string +} + +func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) error { + tf := b.Terraform + if tf == nil { + return fmt.Errorf("terraform not initialized") + } + + err := tf.Init(ctx, tfexec.Upgrade(true)) + if err != nil { + return fmt.Errorf("terraform init: %w", err) + } + + err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey)) + if err != nil { + return fmt.Errorf("terraform state rm: %w", err) + } + + return nil +} + +func (*unbind) Name() string { + return "terraform.Unbind" +} + +func Unbind(resourceType string, resourceKey string) bundle.Mutator { + return &unbind{resourceType: resourceType, resourceKey: resourceKey} +} diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index 9ff2f527b7..94b7f2d436 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -13,10 +13,9 @@ func Bind(opts *terraform.BindOptions) bundle.Mutator { lock.Acquire(), bundle.Defer( bundle.Seq( - terraform.Interpolate(), - terraform.Write(), terraform.StatePull(), terraform.Import(opts), + terraform.StatePush(), ), lock.Release(lock.GoalBind), ), @@ -31,8 +30,6 @@ func Unbind(resourceType string, resourceKey string) bundle.Mutator { lock.Acquire(), bundle.Defer( bundle.Seq( - terraform.Interpolate(), - terraform.Write(), terraform.StatePull(), terraform.Unbind(resourceType, resourceKey), terraform.StatePush(), diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 216d292101..f974a05659 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -14,9 +14,9 @@ func Destroy() bundle.Mutator { lock.Acquire(), bundle.Defer( bundle.Seq( + terraform.StatePull(), terraform.Interpolate(), terraform.Write(), - terraform.StatePull(), terraform.Plan(terraform.PlanGoal("destroy")), terraform.Destroy(), terraform.StatePush(), diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index aba72b6466..12d3706115 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -33,7 +33,12 @@ func newBindCommand() *cobra.Command { w := b.WorkspaceClient() ctx := cmd.Context() - if !resource.Exists(ctx, w, args[1]) { + exists, err := resource.Exists(ctx, w, args[1]) + if err != nil { + return fmt.Errorf("failed to fetch the resource, err: %w", err) + } + + if !exists { return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1]) } diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index d283692a39..e192810c33 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -2,7 +2,6 @@ package deployment import ( "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" "github.com/spf13/cobra" @@ -30,7 +29,7 @@ func newUnbindCommand() *cobra.Command { b.Config.Bundle.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), - terraform.Unbind(resource.TerraformResourceName(), args[0]), + phases.Unbind(resource.TerraformResourceName(), args[0]), )) } diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go index 83b4593e62..0a11388cf4 100644 --- a/internal/bundle/bind_resource_test.go +++ b/internal/bundle/bind_resource_test.go @@ -3,6 +3,8 @@ package bundle import ( "context" "fmt" + "os" + "path/filepath" "testing" "github.com/databricks/cli/internal" @@ -36,6 +38,10 @@ func TestAccBindJobToExistingJob(t *testing.T) { _, _, err = c.Run() require.NoError(t, err) + // Remove .databricks directory to simulate a fresh deployment + err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) + require.NoError(t, err) + err = deployBundle(t, bundleRoot) require.NoError(t, err) @@ -55,6 +61,10 @@ func TestAccBindJobToExistingJob(t *testing.T) { _, _, err = c.Run() require.NoError(t, err) + // Remove .databricks directory to simulate a fresh deployment + err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) + require.NoError(t, err) + err = destroyBundle(t, bundleRoot) require.NoError(t, err) From 7f795ecc6489f044e7e30fca2ca9d6bac2064275 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 7 Feb 2024 15:23:44 +0100 Subject: [PATCH 07/16] fix tests --- cmd/bundle/deployment/bind.go | 2 +- cmd/bundle/deployment/unbind.go | 2 +- internal/bundle/bind_resource_test.go | 16 +++++++++------- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 12d3706115..eff8c998c7 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -42,7 +42,7 @@ func newBindCommand() *cobra.Command { return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1]) } - b.Config.Bundle.Lock.Force = forceLock + b.Config.Bundle.Deployment.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index e192810c33..e7de8a3d47 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -26,7 +26,7 @@ func newUnbindCommand() *cobra.Command { return err } - b.Config.Bundle.Lock.Force = forceLock + b.Config.Bundle.Deployment.Lock.Force = forceLock return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Unbind(resource.TerraformResourceName(), args[0]), diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go index 0a11388cf4..6337e2dfd4 100644 --- a/internal/bundle/bind_resource_test.go +++ b/internal/bundle/bind_resource_test.go @@ -1,13 +1,13 @@ package bundle import ( - "context" "fmt" "os" "path/filepath" "testing" "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/acc" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/google/uuid" @@ -18,18 +18,21 @@ func TestAccBindJobToExistingJob(t *testing.T) { env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") t.Log(env) + ctx, wt := acc.WorkspaceTest(t) + gt := &generateJobTest{T: t, w: wt.W} + nodeTypeId := internal.GetNodeTypeId(env) uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, "basic", map[string]any{ + bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "spark_version": "13.3.x-scala2.12", "node_type_id": nodeTypeId, }) require.NoError(t, err) - jobId := createTestJob(t) + jobId := gt.createTestJob(ctx) t.Cleanup(func() { - destroyJob(t, jobId) + gt.destroyJob(ctx, jobId) require.NoError(t, err) }) @@ -42,13 +45,12 @@ func TestAccBindJobToExistingJob(t *testing.T) { err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) require.NoError(t, err) - err = deployBundle(t, bundleRoot) + err = deployBundle(t, ctx, bundleRoot) require.NoError(t, err) w, err := databricks.NewWorkspaceClient() require.NoError(t, err) - ctx := context.Background() // Check that job is bound and updated with config from bundle job, err := w.Jobs.Get(ctx, jobs.GetJobRequest{ JobId: jobId, @@ -65,7 +67,7 @@ func TestAccBindJobToExistingJob(t *testing.T) { err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) require.NoError(t, err) - err = destroyBundle(t, bundleRoot) + err = destroyBundle(t, ctx, bundleRoot) require.NoError(t, err) // Check that job is unbound and exists after bundle is destroyed From 4e8d030c6ffbbf1d60b20eadcda0937b610dc294 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 12 Feb 2024 13:53:10 +0100 Subject: [PATCH 08/16] addressed feedback --- bundle/config/resources.go | 19 +++++++++++++--- bundle/config/resources/job.go | 7 +++++- bundle/config/resources/pipeline.go | 7 +++++- bundle/deploy/terraform/import.go | 35 +++++++++++++++++++++-------- cmd/bundle/deployment/bind.go | 9 +++++++- 5 files changed, 62 insertions(+), 15 deletions(-) diff --git a/bundle/config/resources.go b/bundle/config/resources.go index af27d88dd4..d0b64d1a36 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -177,16 +177,29 @@ type ConfigResource interface { } func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) { + found := make([]ConfigResource, 0) for k := range r.Jobs { if k == key { - return r.Jobs[k], nil + found = append(found, r.Jobs[k]) } } for k := range r.Pipelines { if k == key { - return r.Pipelines[k], nil + found = append(found, r.Pipelines[k]) } } - return nil, fmt.Errorf("no such resource: %s", key) + if len(found) == 0 { + return nil, fmt.Errorf("no such resource: %s", key) + } + + if len(found) > 1 { + keys := make([]string, 0, len(found)) + for _, r := range found { + keys = append(keys, fmt.Sprintf("%s:%s", r.TerraformResourceName(), key)) + } + return nil, fmt.Errorf("ambiguous: %s (can resolve to all of %s)", key, keys) + } + + return found[0], nil } diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index bd6ab76cf2..da85f94dcc 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/jobs" @@ -103,7 +104,11 @@ func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id stri _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ JobId: int64(jobId), }) - return err == nil, err + if err != nil { + log.Debugf(ctx, "job %s does not exist", id) + return false, err + } + return true, nil } func (j *Job) TerraformResourceName() string { diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 4a896fa12c..97aeef156d 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -80,7 +81,11 @@ func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id _, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{ PipelineId: id, }) - return err == nil, err + if err != nil { + log.Debugf(ctx, "pipeline %s does not exist", id) + return false, err + } + return true, nil } func (p *Pipeline) TerraformResourceName() string { diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index f4b17e0e75..a046adf607 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -4,12 +4,12 @@ import ( "bytes" "context" "fmt" + "io" "os" "path/filepath" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/log" "github.com/hashicorp/terraform-exec/tfexec" ) @@ -35,7 +35,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { // This is necessary because the import operation requires the resource to be defined in the Terraform configuration. _, err = os.Stat(filepath.Join(dir, "bundle.tf.json")) if err != nil { - err = bundle.Apply(ctx, b, Write()) + err = bundle.Apply(ctx, b, bundle.Seq(Interpolate(), Write())) if err != nil { return fmt.Errorf("terraform write: %w", err) } @@ -51,18 +51,22 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("terraform init: %w", err) } - err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId) + tmpState := filepath.Join(os.TempDir(), TerraformStateFileName) + + err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId, tfexec.StateOut(tmpState)) if err != nil { return fmt.Errorf("terraform import: %w", err) } buf := bytes.NewBuffer(nil) tf.SetStdout(buf) - changed, err := tf.Plan(ctx) + changed, err := tf.Plan(ctx, tfexec.State(tmpState)) if err != nil { return fmt.Errorf("terraform plan: %w", err) } + defer os.Remove(tmpState) + if changed && !m.opts.AutoApprove { cmdio.LogString(ctx, buf.String()) ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") @@ -70,15 +74,28 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return err } if !ans { - err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey)) - if err != nil { - return err - } return fmt.Errorf("import aborted") } } - log.Debugf(ctx, "resource imports approved") + // If user confirmed changes, move the state file from temp dir to state location + f, err := os.Create(filepath.Join(dir, TerraformStateFileName)) + if err != nil { + return err + } + defer f.Close() + + tmpF, err := os.Open(tmpState) + if err != nil { + return err + } + defer tmpF.Close() + + _, err = io.Copy(f, tmpF) + if err != nil { + return err + } + return nil } diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index eff8c998c7..5412928070 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) @@ -43,7 +44,7 @@ func newBindCommand() *cobra.Command { } b.Config.Bundle.Deployment.Lock.Force = forceLock - return bundle.Apply(cmd.Context(), b, bundle.Seq( + err = bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ AutoApprove: autoApprove, @@ -52,6 +53,12 @@ func newBindCommand() *cobra.Command { ResourceId: args[1], }), )) + if err != nil { + return fmt.Errorf("failed to bind the resource, err: %w", err) + } + + cmdio.LogString(ctx, fmt.Sprintf("Successfully bound %s with an id '%s'. Run 'bundle deploy' to deploy changes to your workspace", resource.TerraformResourceName(), args[1])) + return nil } return cmd From 637ceb387835182cdc58d689ef78d7229a06b9e6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 12 Feb 2024 14:04:48 +0100 Subject: [PATCH 09/16] added lint ignore --- bundle/deploy/terraform/import.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index a046adf607..5c3149f404 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -60,6 +60,8 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { buf := bytes.NewBuffer(nil) tf.SetStdout(buf) + + //lint:ignore SA1019 We use legacy -state flag for now to plan the import changes based on temporary state file changed, err := tf.Plan(ctx, tfexec.State(tmpState)) if err != nil { return fmt.Errorf("terraform plan: %w", err) From b837d0b1a35cb6759142868d202b427cb6d888be Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 11:15:34 +0100 Subject: [PATCH 10/16] Added test for bind abort --- internal/bundle/bind_resource_test.go | 45 +++++++++++++++++++++++++++ internal/helpers.go | 8 +++++ 2 files changed, 53 insertions(+) diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go index 6337e2dfd4..eb2f566d19 100644 --- a/internal/bundle/bind_resource_test.go +++ b/internal/bundle/bind_resource_test.go @@ -79,3 +79,48 @@ func TestAccBindJobToExistingJob(t *testing.T) { require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") } + +func TestAccAbortBind(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + ctx, wt := acc.WorkspaceTest(t) + gt := &generateJobTest{T: t, w: wt.W} + + nodeTypeId := internal.GetNodeTypeId(env) + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ + "unique_id": uniqueId, + "spark_version": "13.3.x-scala2.12", + "node_type_id": nodeTypeId, + }) + require.NoError(t, err) + + jobId := gt.createTestJob(ctx) + t.Cleanup(func() { + gt.destroyJob(ctx, jobId) + destroyBundle(t, ctx, bundleRoot) + }) + + t.Setenv("BUNDLE_ROOT", bundleRoot) + c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId)) + + // Simulate user aborting the bind. This is done by not providing any input to the prompt in non-interactive mode. + _, _, err = c.Run() + require.ErrorContains(t, err, "failed to bind the resource") + + err = deployBundle(t, ctx, bundleRoot) + require.NoError(t, err) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + // Check that job is not bound and not updated with config from bundle + job, err := w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: jobId, + }) + require.NoError(t, err) + + require.NotEqual(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) + require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test") +} diff --git a/internal/helpers.go b/internal/helpers.go index 22e38e2113..6377ae07e7 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -131,6 +131,14 @@ func (t *cobraTestRunner) WaitForTextPrinted(text string, timeout time.Duration) }, timeout, 50*time.Millisecond) } +func (t *cobraTestRunner) WaitForOutput(text string, timeout time.Duration) { + require.Eventually(t.T, func() bool { + currentStdout := t.stdout.String() + currentErrout := t.stderr.String() + return strings.Contains(currentStdout, text) || strings.Contains(currentErrout, text) + }, timeout, 50*time.Millisecond) +} + func (t *cobraTestRunner) WithStdin() { reader, writer := io.Pipe() t.stdinR = reader From 48228065ea7303a1e09dca05bc99efecdd7560f0 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 11:50:31 +0100 Subject: [PATCH 11/16] use -target flag --- bundle/deploy/terraform/import.go | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index 5c3149f404..2379473952 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -53,7 +53,8 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { tmpState := filepath.Join(os.TempDir(), TerraformStateFileName) - err = tf.Import(ctx, fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey), m.opts.ResourceId, tfexec.StateOut(tmpState)) + importAddress := fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey) + err = tf.Import(ctx, importAddress, m.opts.ResourceId, tfexec.StateOut(tmpState)) if err != nil { return fmt.Errorf("terraform import: %w", err) } @@ -62,7 +63,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { tf.SetStdout(buf) //lint:ignore SA1019 We use legacy -state flag for now to plan the import changes based on temporary state file - changed, err := tf.Plan(ctx, tfexec.State(tmpState)) + changed, err := tf.Plan(ctx, tfexec.State(tmpState), tfexec.Target(importAddress)) if err != nil { return fmt.Errorf("terraform plan: %w", err) } @@ -70,7 +71,10 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { defer os.Remove(tmpState) if changed && !m.opts.AutoApprove { - cmdio.LogString(ctx, buf.String()) + output := buf.String() + // Remove output starting from Warning until end of output + output = output[:bytes.Index([]byte(output), []byte("Warning:"))] + cmdio.LogString(ctx, output) ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") if err != nil { return err From 1962a68f0a6647aa3acbee752180f6cc89362d48 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 13:59:24 +0100 Subject: [PATCH 12/16] use os.MkdirTemp --- bundle/deploy/terraform/import.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index 2379473952..3b59fc9ae8 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -50,8 +50,11 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return fmt.Errorf("terraform init: %w", err) } - - tmpState := filepath.Join(os.TempDir(), TerraformStateFileName) + tmpDir, err := os.MkdirTemp("", "state-*") + if err != nil { + return fmt.Errorf("terraform init: %w", err) + } + tmpState := filepath.Join(tmpDir, TerraformStateFileName) importAddress := fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey) err = tf.Import(ctx, importAddress, m.opts.ResourceId, tfexec.StateOut(tmpState)) @@ -68,7 +71,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("terraform plan: %w", err) } - defer os.Remove(tmpState) + defer os.RemoveAll(tmpDir) if changed && !m.opts.AutoApprove { output := buf.String() From 04071d6565642c8e2bd651ea8c80f8a4dba14532 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 16:57:07 +0100 Subject: [PATCH 13/16] always write state file --- bundle/deploy/terraform/import.go | 10 ---------- bundle/phases/bind.go | 4 ++++ 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/bundle/deploy/terraform/import.go b/bundle/deploy/terraform/import.go index 3b59fc9ae8..5fc436f201 100644 --- a/bundle/deploy/terraform/import.go +++ b/bundle/deploy/terraform/import.go @@ -31,16 +31,6 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - // If the bundle.tf.json file does not exist, write it. - // This is necessary because the import operation requires the resource to be defined in the Terraform configuration. - _, err = os.Stat(filepath.Join(dir, "bundle.tf.json")) - if err != nil { - err = bundle.Apply(ctx, b, bundle.Seq(Interpolate(), Write())) - if err != nil { - return fmt.Errorf("terraform write: %w", err) - } - } - tf := b.Terraform if tf == nil { return fmt.Errorf("terraform not initialized") diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index 94b7f2d436..b2e92d6e2b 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -14,6 +14,8 @@ func Bind(opts *terraform.BindOptions) bundle.Mutator { bundle.Defer( bundle.Seq( terraform.StatePull(), + terraform.Interpolate(), + terraform.Write(), terraform.Import(opts), terraform.StatePush(), ), @@ -31,6 +33,8 @@ func Unbind(resourceType string, resourceKey string) bundle.Mutator { bundle.Defer( bundle.Seq( terraform.StatePull(), + terraform.Interpolate(), + terraform.Write(), terraform.Unbind(resourceType, resourceKey), terraform.StatePush(), ), From 6cbdde2542aa0ac84e35181c40e4c4b28852143c Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 17:27:57 +0100 Subject: [PATCH 14/16] added test for generate + bind --- internal/bundle/bind_resource_test.go | 61 ++++++++++++++++++- .../template/databricks.yml.tmpl | 4 +- 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go index eb2f566d19..7584f29b68 100644 --- a/internal/bundle/bind_resource_test.go +++ b/internal/bundle/bind_resource_test.go @@ -77,7 +77,6 @@ func TestAccBindJobToExistingJob(t *testing.T) { require.NoError(t, err) require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") - } func TestAccAbortBind(t *testing.T) { @@ -124,3 +123,63 @@ func TestAccAbortBind(t *testing.T) { require.NotEqual(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test") } + +func TestAccGenerateAndBind(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + ctx, wt := acc.WorkspaceTest(t) + gt := &generateJobTest{T: t, w: wt.W} + + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{ + "unique_id": uniqueId, + }) + require.NoError(t, err) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + jobId := gt.createTestJob(ctx) + t.Cleanup(func() { + _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: jobId, + }) + if err == nil { + gt.destroyJob(ctx, jobId) + } + }) + + t.Setenv("BUNDLE_ROOT", bundleRoot) + c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "job", + "--key", "test_job_key", + "--existing-job-id", fmt.Sprint(jobId), + "--config-dir", filepath.Join(bundleRoot, "resources"), + "--source-dir", filepath.Join(bundleRoot, "src")) + _, _, err = c.Run() + require.NoError(t, err) + + _, err = os.Stat(filepath.Join(bundleRoot, "src", "test.py")) + require.NoError(t, err) + + matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.yml")) + require.NoError(t, err) + + require.Len(t, matches, 1) + + c = internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "test_job_key", fmt.Sprint(jobId), "--auto-approve") + _, _, err = c.Run() + require.NoError(t, err) + + err = deployBundle(t, ctx, bundleRoot) + require.NoError(t, err) + + err = destroyBundle(t, ctx, bundleRoot) + require.NoError(t, err) + + // Check that job is unbound and exists after bundle is destroyed + _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ + JobId: jobId, + }) + require.ErrorContains(t, err, "does not exist.") +} diff --git a/internal/bundle/bundles/with_includes/template/databricks.yml.tmpl b/internal/bundle/bundles/with_includes/template/databricks.yml.tmpl index 5d17e0fdab..85d31ce3ec 100644 --- a/internal/bundle/bundles/with_includes/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/with_includes/template/databricks.yml.tmpl @@ -4,5 +4,5 @@ bundle: workspace: root_path: "~/.bundle/{{.unique_id}}" -includes: - - resources/*yml +include: + - resources/*.yml From 022ebc4422ee9e47cd5bd7ad69fb349cfe83b5de Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 17:38:25 +0100 Subject: [PATCH 15/16] fix comment --- internal/bundle/bind_resource_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/bundle/bind_resource_test.go b/internal/bundle/bind_resource_test.go index 7584f29b68..d44ad2c316 100644 --- a/internal/bundle/bind_resource_test.go +++ b/internal/bundle/bind_resource_test.go @@ -177,7 +177,7 @@ func TestAccGenerateAndBind(t *testing.T) { err = destroyBundle(t, ctx, bundleRoot) require.NoError(t, err) - // Check that job is unbound and exists after bundle is destroyed + // Check that job is bound and does not extsts after bundle is destroyed _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ JobId: jobId, }) From 7f8e3b4bff6872a4653657c69bd7c457ead3a235 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 14 Feb 2024 18:14:10 +0100 Subject: [PATCH 16/16] fix pipeline test --- internal/bundle/generate_pipeline_test.go | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/internal/bundle/generate_pipeline_test.go b/internal/bundle/generate_pipeline_test.go index 0005e29fa1..b8a1ac8493 100644 --- a/internal/bundle/generate_pipeline_test.go +++ b/internal/bundle/generate_pipeline_test.go @@ -28,7 +28,7 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { }) require.NoError(t, err) - pipelineId := gt.createTestPipeline(ctx) + pipelineId, name := gt.createTestPipeline(ctx) t.Cleanup(func() { gt.destroyPipeline(ctx, pipelineId) }) @@ -52,9 +52,16 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { require.Len(t, matches, 1) // check the content of generated yaml - data, err := os.ReadFile(matches[0]) + fileName := matches[0] + data, err := os.ReadFile(fileName) require.NoError(t, err) generatedYaml := string(data) + + // Replace pipeline name + generatedYaml = strings.ReplaceAll(generatedYaml, name, internal.RandomName("copy-generated-pipeline-")) + err = os.WriteFile(fileName, []byte(generatedYaml), 0644) + require.NoError(t, err) + require.Contains(t, generatedYaml, "libraries:") require.Contains(t, generatedYaml, "- notebook:") require.Contains(t, generatedYaml, fmt.Sprintf("path: %s", filepath.Join("..", "src", "notebook.py"))) @@ -73,7 +80,7 @@ type generatePipelineTest struct { w *databricks.WorkspaceClient } -func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) string { +func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string, string) { t := gt.T w := gt.w @@ -87,8 +94,9 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) string { err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')")) require.NoError(t, err) + name := internal.RandomName("generated-pipeline-") resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{ - Name: internal.RandomName("generated-pipeline-"), + Name: name, Libraries: []pipelines.PipelineLibrary{ { Notebook: &pipelines.NotebookLibrary{ @@ -104,7 +112,7 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) string { }) require.NoError(t, err) - return resp.PipelineId + return resp.PipelineId, name } func (gt *generatePipelineTest) destroyPipeline(ctx context.Context, pipelineId string) {