From 4bedb54d8f19f7d00127ce37532fb31c32340305 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 18:01:45 +0200 Subject: [PATCH 01/75] rename Location -> Locations. Also add bundle.config.GetLocations function and modify all diagnostics paths to be relative to the bundle root path --- .../mutator/python/python_diagnostics.go | 10 +- .../mutator/python/python_diagnostics_test.go | 2 +- .../mutator/python/python_mutator_test.go | 2 +- bundle/config/mutator/run_as.go | 8 +- bundle/config/root.go | 8 + bundle/config/validate/files_to_sync.go | 8 +- .../validate/job_cluster_key_defined.go | 8 +- .../config/validate/validate_sync_patterns.go | 8 +- bundle/render/render_text_output.go | 19 +- bundle/render/render_text_output_test.go | 12 +- .../sync_include_exclude_no_matches_test.go | 6 +- libs/diag/diagnostic.go | 4 +- libs/dyn/convert/normalize.go | 56 ++--- libs/dyn/convert/normalize_test.go | 208 +++++++++--------- 14 files changed, 187 insertions(+), 172 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go index b8efc9ef73..ce50e38cdc 100644 --- a/bundle/config/mutator/python/python_diagnostics.go +++ b/bundle/config/mutator/python/python_diagnostics.go @@ -56,11 +56,11 @@ func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) { } diag := diag.Diagnostic{ - Severity: severity, - Summary: parsedLine.Summary, - Detail: parsedLine.Detail, - Location: convertPythonLocation(parsedLine.Location), - Path: path, + Severity: severity, + Summary: parsedLine.Summary, + Detail: parsedLine.Detail, + Locations: convertPythonLocation(parsedLine.Location), + Path: path, } diags = diags.Append(diag) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 7b66e2537b..3c895ad99c 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -39,7 +39,7 @@ func TestParsePythonDiagnostics(t *testing.T) { { Severity: diag.Error, Summary: "error summary", - Location: dyn.Location{ + Locations: dyn.Location{ File: "src/examples/file.py", Line: 1, Column: 2, diff --git a/bundle/config/mutator/python/python_mutator_test.go b/bundle/config/mutator/python/python_mutator_test.go index 588589831b..e13234447d 100644 --- a/bundle/config/mutator/python/python_mutator_test.go +++ b/bundle/config/mutator/python/python_mutator_test.go @@ -101,7 +101,7 @@ func TestPythonMutator_load(t *testing.T) { File: "src/examples/file.py", Line: 10, Column: 5, - }, diags[0].Location) + }, diags[0].Locations) } func TestPythonMutator_load_disallowed(t *testing.T) { diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index d344a988ae..e757964b05 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -178,10 +178,10 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { setRunAsForJobs(b) return diag.Diagnostics{ { - Severity: diag.Warning, - Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", - Path: dyn.MustPathFromString("experimental.use_legacy_run_as"), - Location: b.Config.GetLocation("experimental.use_legacy_run_as"), + Severity: diag.Warning, + Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", + Path: dyn.MustPathFromString("experimental.use_legacy_run_as"), + Locations: b.Config.GetLocation("experimental.use_legacy_run_as"), }, } } diff --git a/bundle/config/root.go b/bundle/config/root.go index 594a9105f6..0c1f710b4e 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -524,6 +524,14 @@ func (r Root) GetLocation(path string) dyn.Location { return v.Location() } +func (r Root) GetLocations(path string) []dyn.Location { + v, err := dyn.Get(r.value, path) + if err != nil { + return []dyn.Location{} + } + return v.Locations() +} + // Value returns the dynamic configuration value of the root object. This value // is the source of truth and is kept in sync with values in the typed configuration. func (r Root) Value() dyn.Value { diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index d53e382432..4ad58d2613 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -43,10 +43,10 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. } else { loc := location{path: "sync.exclude", rb: rb} diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", - Location: loc.Location(), - Path: loc.Path(), + Severity: diag.Warning, + Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", + Locations: loc.Location(), + Path: loc.Path(), }) } diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index 37ed3f417e..d4f2ba782c 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -39,10 +39,10 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund } diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), - Location: loc.Location(), - Path: loc.Path(), + Severity: diag.Warning, + Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), + Locations: loc.Location(), + Path: loc.Path(), }) } } diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index a04c10776c..758ccec0e2 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -64,10 +64,10 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di loc := location{path: fmt.Sprintf("%s[%d]", path, index), rb: rb} mu.Lock() diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("Pattern %s does not match any files", p), - Location: loc.Location(), - Path: loc.Path(), + Severity: diag.Warning, + Summary: fmt.Sprintf("Pattern %s does not match any files", p), + Locations: loc.Location(), + Path: loc.Path(), }) mu.Unlock() } diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index 439ae61323..68876dc783 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -127,6 +127,7 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti return err } +// TODO: Write tests when multiple locations are rendered. func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate)) warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate)) @@ -141,12 +142,18 @@ func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) t = warningT } - // Make file relative to bundle root - if d.Location.File != "" && b != nil { - out, err := filepath.Rel(b.RootPath, d.Location.File) - // if we can't relativize the path, just use path as-is - if err == nil { - d.Location.File = out + for i := range d.Locations { + if b == nil { + break + } + + // Make location relative to bundle root + if d.Locations[i].File != "" { + out, err := filepath.Rel(b.RootPath, d.Locations[i].File) + // if we can't relativize the path, just use path as-is + if err == nil { + d.Locations[i].File = out + } } } diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index b7aec88648..470d007390 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -91,7 +91,7 @@ func TestRenderTextOutput(t *testing.T) { Severity: diag.Error, Summary: "error (1)", Detail: "detail (1)", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.py", Line: 1, Column: 1, @@ -101,7 +101,7 @@ func TestRenderTextOutput(t *testing.T) { Severity: diag.Error, Summary: "error (2)", Detail: "detail (2)", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.py", Line: 2, Column: 1, @@ -111,7 +111,7 @@ func TestRenderTextOutput(t *testing.T) { Severity: diag.Warning, Summary: "warning (3)", Detail: "detail (3)", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.py", Line: 3, Column: 1, @@ -177,7 +177,7 @@ func TestRenderTextOutput(t *testing.T) { Severity: diag.Error, Summary: "error (1)", Detail: "detail (1)", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.py", Line: 1, Column: 1, @@ -187,7 +187,7 @@ func TestRenderTextOutput(t *testing.T) { Severity: diag.Warning, Summary: "warning (2)", Detail: "detail (2)", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.py", Line: 3, Column: 1, @@ -252,7 +252,7 @@ func TestRenderDiagnostics(t *testing.T) { Severity: diag.Error, Summary: "failed to load xxx", Detail: "'name' is required", - Location: dyn.Location{ + Locations: dyn.Location{ File: "foo.yaml", Line: 1, Column: 2, diff --git a/bundle/tests/sync_include_exclude_no_matches_test.go b/bundle/tests/sync_include_exclude_no_matches_test.go index 94cedbaa62..d01de84c06 100644 --- a/bundle/tests/sync_include_exclude_no_matches_test.go +++ b/bundle/tests/sync_include_exclude_no_matches_test.go @@ -21,9 +21,9 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { require.Equal(t, diags[0].Severity, diag.Warning) require.Equal(t, diags[0].Summary, "Pattern dist does not match any files") - require.Equal(t, diags[0].Location.File, filepath.Join("sync", "override", "databricks.yml")) - require.Equal(t, diags[0].Location.Line, 17) - require.Equal(t, diags[0].Location.Column, 11) + require.Equal(t, diags[0].Locations.File, filepath.Join("sync", "override", "databricks.yml")) + require.Equal(t, diags[0].Locations.Line, 17) + require.Equal(t, diags[0].Locations.Column, 11) require.Equal(t, diags[0].Path.String(), "sync.exclude[0]") summaries := []string{ diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 6215275512..062ca7e552 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -17,9 +17,9 @@ type Diagnostic struct { // This may be multiple lines and may be nil. Detail string - // Location is a source code location associated with the diagnostic message. + // Locations is a source code location associated with the diagnostic message. // It may be zero if there is no associated location. - Location dyn.Location + Locations []dyn.Location // Path is a path to the value in a configuration tree that the diagnostic is associated with. // It may be nil if there is no associated path. diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 246c97eaf9..fea0e57d6a 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -65,19 +65,19 @@ func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value, seen [] func nullWarning(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnostic { return diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("expected a %s value, found null", expected), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf("expected a %s value, found null", expected), + Locations: src.Location(), + Path: path, } } func typeMismatch(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnostic { return diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), + Locations: src.Location(), + Path: path, } } @@ -96,10 +96,10 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen if !ok { if !pv.IsAnchor() { diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), - Location: pk.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), + Locations: pk.Location(), + Path: path, }) } continue @@ -320,10 +320,10 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn out = int64(src.MustFloat()) if src.MustFloat() != float64(out) { return dyn.InvalidValue, diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf(`cannot accurately represent "%g" as integer due to precision loss`, src.MustFloat()), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf(`cannot accurately represent "%g" as integer due to precision loss`, src.MustFloat()), + Locations: src.Location(), + Path: path, }) } case dyn.KindString: @@ -336,10 +336,10 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn } return dyn.InvalidValue, diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), + Locations: src.Location(), + Path: path, }) } case dyn.KindNil: @@ -363,10 +363,10 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d out = float64(src.MustInt()) if src.MustInt() != int64(out) { return dyn.InvalidValue, diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf(`cannot accurately represent "%d" as floating point number due to precision loss`, src.MustInt()), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf(`cannot accurately represent "%d" as floating point number due to precision loss`, src.MustInt()), + Locations: src.Location(), + Path: path, }) } case dyn.KindString: @@ -379,10 +379,10 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d } return dyn.InvalidValue, diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), - Location: src.Location(), - Path: path, + Severity: diag.Warning, + Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), + Locations: src.Location(), + Path: path, }) } case dyn.KindNil: diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 452ed4eb1d..cf3a1935a1 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -40,10 +40,10 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, - Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Key("bar")), + Severity: diag.Warning, + Summary: `expected string, found map`, + Locations: dyn.Location{}, + Path: dyn.NewPath(dyn.Key("bar")), }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -66,10 +66,10 @@ func TestNormalizeStructUnknownField(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `unknown field: bar`, - Location: vin.Get("foo").Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `unknown field: bar`, + Locations: vin.Get("foo").Location(), + Path: dyn.EmptyPath, }, err[0]) // The field that can be mapped to the struct field is retained. @@ -100,10 +100,10 @@ func TestNormalizeStructError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found string`, - Location: vin.Get("foo").Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found string`, + Locations: vin.Get("foo").Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -245,10 +245,10 @@ func TestNormalizeStructRandomStringError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -262,10 +262,10 @@ func TestNormalizeStructIntError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found int`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found int`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -291,10 +291,10 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, - Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Key("bar")), + Severity: diag.Warning, + Summary: `expected string, found map`, + Locations: dyn.Location{}, + Path: dyn.NewPath(dyn.Key("bar")), }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -317,10 +317,10 @@ func TestNormalizeMapError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -372,10 +372,10 @@ func TestNormalizeMapRandomStringError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -385,10 +385,10 @@ func TestNormalizeMapIntError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected map, found int`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected map, found int`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -415,10 +415,10 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, - Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Index(2)), + Severity: diag.Warning, + Summary: `expected string, found map`, + Locations: dyn.Location{}, + Path: dyn.NewPath(dyn.Index(2)), }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -439,10 +439,10 @@ func TestNormalizeSliceError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected sequence, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected sequence, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -494,10 +494,10 @@ func TestNormalizeSliceRandomStringError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected sequence, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected sequence, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -507,10 +507,10 @@ func TestNormalizeSliceIntError(t *testing.T) { _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected sequence, found int`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected sequence, found int`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -528,10 +528,10 @@ func TestNormalizeStringNil(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected a string value, found null`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected a string value, found null`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -565,10 +565,10 @@ func TestNormalizeStringError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, - Location: dyn.Location{}, - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected string, found map`, + Locations: dyn.Location{}, + Path: dyn.EmptyPath, }, err[0]) } @@ -586,10 +586,10 @@ func TestNormalizeBoolNil(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected a bool value, found null`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected a bool value, found null`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -628,10 +628,10 @@ func TestNormalizeBoolFromStringError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected bool, found string`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected bool, found string`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -641,10 +641,10 @@ func TestNormalizeBoolError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected bool, found map`, - Location: dyn.Location{}, - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected bool, found map`, + Locations: dyn.Location{}, + Path: dyn.EmptyPath, }, err[0]) } @@ -662,10 +662,10 @@ func TestNormalizeIntNil(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected a int value, found null`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected a int value, found null`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -683,10 +683,10 @@ func TestNormalizeIntFromFloatError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `cannot accurately represent "1.5" as integer due to precision loss`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `cannot accurately represent "1.5" as integer due to precision loss`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -712,10 +712,10 @@ func TestNormalizeIntFromStringError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `cannot parse "abc" as an integer`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `cannot parse "abc" as an integer`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -725,10 +725,10 @@ func TestNormalizeIntError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected int, found map`, - Location: dyn.Location{}, - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected int, found map`, + Locations: dyn.Location{}, + Path: dyn.EmptyPath, }, err[0]) } @@ -746,10 +746,10 @@ func TestNormalizeFloatNil(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected a float value, found null`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected a float value, found null`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -771,10 +771,10 @@ func TestNormalizeFloatFromIntError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -800,10 +800,10 @@ func TestNormalizeFloatFromStringError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `cannot parse "abc" as a floating point number`, - Location: vin.Location(), - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `cannot parse "abc" as a floating point number`, + Locations: vin.Location(), + Path: dyn.EmptyPath, }, err[0]) } @@ -813,10 +813,10 @@ func TestNormalizeFloatError(t *testing.T) { _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected float, found map`, - Location: dyn.Location{}, - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `expected float, found map`, + Locations: dyn.Location{}, + Path: dyn.EmptyPath, }, err[0]) } From 7ab131fe7013aa68745a861033e2723e0184b7fc Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 18:37:08 +0200 Subject: [PATCH 02/75] validate.Location() -> validate.Locations() --- bundle/config/validate/validate.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/validate/validate.go b/bundle/config/validate/validate.go index af7e984a11..ef6488862a 100644 --- a/bundle/config/validate/validate.go +++ b/bundle/config/validate/validate.go @@ -16,8 +16,8 @@ type location struct { rb bundle.ReadOnlyBundle } -func (l location) Location() dyn.Location { - return l.rb.Config().GetLocation(l.path) +func (l location) Locations() []dyn.Location { + return l.rb.Config().GetLocations(l.path) } func (l location) Path() dyn.Path { From 39b3eeab1ddeb5ddc99d7fa90622ae636ad4351d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 18:45:36 +0200 Subject: [PATCH 03/75] fix TestSyncIncludeExcludeNoMatchesTest --- bundle/tests/sync_include_exclude_no_matches_test.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/bundle/tests/sync_include_exclude_no_matches_test.go b/bundle/tests/sync_include_exclude_no_matches_test.go index d01de84c06..5f4fa47ce4 100644 --- a/bundle/tests/sync_include_exclude_no_matches_test.go +++ b/bundle/tests/sync_include_exclude_no_matches_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/libs/diag" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -21,11 +22,13 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { require.Equal(t, diags[0].Severity, diag.Warning) require.Equal(t, diags[0].Summary, "Pattern dist does not match any files") - require.Equal(t, diags[0].Locations.File, filepath.Join("sync", "override", "databricks.yml")) - require.Equal(t, diags[0].Locations.Line, 17) - require.Equal(t, diags[0].Locations.Column, 11) require.Equal(t, diags[0].Path.String(), "sync.exclude[0]") + assert.Len(t, diags[0].Locations, 1) + require.Equal(t, diags[0].Locations[0].File, filepath.Join("sync", "override", "databricks.yml")) + require.Equal(t, diags[0].Locations[0].Line, 17) + require.Equal(t, diags[0].Locations[0].Column, 11) + summaries := []string{ fmt.Sprintf("Pattern %s does not match any files", filepath.Join("src", "*")), fmt.Sprintf("Pattern %s does not match any files", filepath.Join("tests", "*")), From 14a200a39d9acc169121876ba3d209904481958f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 18:47:59 +0200 Subject: [PATCH 04/75] patch to transform location -> locations --- .../mutator/python/python_diagnostics.go | 2 +- .../mutator/python/python_diagnostics_test.go | 5 +- .../mutator/python/python_mutator_test.go | 6 +- bundle/config/mutator/run_as.go | 2 +- bundle/config/validate/files_to_sync.go | 2 +- .../validate/job_cluster_key_defined.go | 2 +- .../config/validate/validate_sync_patterns.go | 2 +- bundle/render/render_text_output_test.go | 65 +++++++------------ libs/dyn/convert/normalize.go | 14 ++-- libs/dyn/convert/normalize_test.go | 52 +++++++-------- 10 files changed, 65 insertions(+), 87 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go index ce50e38cdc..e9cb0c5668 100644 --- a/bundle/config/mutator/python/python_diagnostics.go +++ b/bundle/config/mutator/python/python_diagnostics.go @@ -59,7 +59,7 @@ func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) { Severity: severity, Summary: parsedLine.Summary, Detail: parsedLine.Detail, - Locations: convertPythonLocation(parsedLine.Location), + Locations: []dyn.Location{convertPythonLocation(parsedLine.Location)}, Path: path, } diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 3c895ad99c..eee29bee7d 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -39,11 +39,10 @@ func TestParsePythonDiagnostics(t *testing.T) { { Severity: diag.Error, Summary: "error summary", - Locations: dyn.Location{ + Locations: []dyn.Location{{ File: "src/examples/file.py", Line: 1, - Column: 2, - }, + Column: 2}}, }, }, }, diff --git a/bundle/config/mutator/python/python_mutator_test.go b/bundle/config/mutator/python/python_mutator_test.go index e13234447d..dc4ed9322e 100644 --- a/bundle/config/mutator/python/python_mutator_test.go +++ b/bundle/config/mutator/python/python_mutator_test.go @@ -97,11 +97,11 @@ func TestPythonMutator_load(t *testing.T) { assert.Equal(t, 1, len(diags)) assert.Equal(t, "job doesn't have any tasks", diags[0].Summary) - assert.Equal(t, dyn.Location{ + assert.Equal(t, []dyn.Location{{ File: "src/examples/file.py", Line: 10, - Column: 5, - }, diags[0].Locations) + Column: 5}}, diags[0].Locations) + } func TestPythonMutator_load_disallowed(t *testing.T) { diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index e757964b05..168918d0db 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -181,7 +181,7 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { Severity: diag.Warning, Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", Path: dyn.MustPathFromString("experimental.use_legacy_run_as"), - Locations: b.Config.GetLocation("experimental.use_legacy_run_as"), + Locations: b.Config.GetLocations("experimental.use_legacy_run_as"), }, } } diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index 4ad58d2613..67b94090a3 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -45,7 +45,7 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", - Locations: loc.Location(), + Locations: loc.Locations(), Path: loc.Path(), }) } diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index d4f2ba782c..392259ade6 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -41,7 +41,7 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), - Locations: loc.Location(), + Locations: loc.Locations(), Path: loc.Path(), }) } diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index 758ccec0e2..abcbce16a2 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -66,7 +66,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("Pattern %s does not match any files", p), - Locations: loc.Location(), + Locations: loc.Locations(), Path: loc.Path(), }) mu.Unlock() diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index 470d007390..8adf54161e 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -88,34 +88,22 @@ func TestRenderTextOutput(t *testing.T) { bundle: loadingBundle, diags: diag.Diagnostics{ diag.Diagnostic{ - Severity: diag.Error, - Summary: "error (1)", - Detail: "detail (1)", - Locations: dyn.Location{ - File: "foo.py", - Line: 1, - Column: 1, - }, + Severity: diag.Error, + Summary: "error (1)", + Detail: "detail (1)", + Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}}, }, diag.Diagnostic{ - Severity: diag.Error, - Summary: "error (2)", - Detail: "detail (2)", - Locations: dyn.Location{ - File: "foo.py", - Line: 2, - Column: 1, - }, + Severity: diag.Error, + Summary: "error (2)", + Detail: "detail (2)", + Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}}, }, diag.Diagnostic{ - Severity: diag.Warning, - Summary: "warning (3)", - Detail: "detail (3)", - Locations: dyn.Location{ - File: "foo.py", - Line: 3, - Column: 1, - }, + Severity: diag.Warning, + Summary: "warning (3)", + Detail: "detail (3)", + Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, }, }, opts: RenderOptions{RenderSummaryTable: true}, @@ -174,24 +162,16 @@ func TestRenderTextOutput(t *testing.T) { bundle: nil, diags: diag.Diagnostics{ diag.Diagnostic{ - Severity: diag.Error, - Summary: "error (1)", - Detail: "detail (1)", - Locations: dyn.Location{ - File: "foo.py", - Line: 1, - Column: 1, - }, + Severity: diag.Error, + Summary: "error (1)", + Detail: "detail (1)", + Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}}, }, diag.Diagnostic{ - Severity: diag.Warning, - Summary: "warning (2)", - Detail: "detail (2)", - Locations: dyn.Location{ - File: "foo.py", - Line: 3, - Column: 1, - }, + Severity: diag.Warning, + Summary: "warning (2)", + Detail: "detail (2)", + Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, }, }, opts: RenderOptions{RenderSummaryTable: false}, @@ -252,11 +232,10 @@ func TestRenderDiagnostics(t *testing.T) { Severity: diag.Error, Summary: "failed to load xxx", Detail: "'name' is required", - Locations: dyn.Location{ + Locations: []dyn.Location{{ File: "foo.yaml", Line: 1, - Column: 2, - }, + Column: 2}}, }, }, expected: "Error: failed to load xxx\n" + diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index fea0e57d6a..df3c858b06 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -67,7 +67,7 @@ func nullWarning(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnosti return diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("expected a %s value, found null", expected), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, } } @@ -76,7 +76,7 @@ func typeMismatch(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnost return diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, } } @@ -98,7 +98,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), - Locations: pk.Location(), + Locations: []dyn.Location{pk.Location()}, Path: path, }) } @@ -322,7 +322,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%g" as integer due to precision loss`, src.MustFloat()), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, }) } @@ -338,7 +338,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, }) } @@ -365,7 +365,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%d" as floating point number due to precision loss`, src.MustInt()), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, }) } @@ -381,7 +381,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), - Locations: src.Location(), + Locations: []dyn.Location{src.Location()}, Path: path, }) } diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index cf3a1935a1..536bfa6d0a 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -42,7 +42,7 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected string, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.NewPath(dyn.Key("bar")), }, err[0]) @@ -68,7 +68,7 @@ func TestNormalizeStructUnknownField(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `unknown field: bar`, - Locations: vin.Get("foo").Location(), + Locations: []dyn.Location{vin.Get("foo").Location()}, Path: dyn.EmptyPath, }, err[0]) @@ -102,7 +102,7 @@ func TestNormalizeStructError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found string`, - Locations: vin.Get("foo").Location(), + Locations: []dyn.Location{vin.Get("foo").Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -247,7 +247,7 @@ func TestNormalizeStructRandomStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -264,7 +264,7 @@ func TestNormalizeStructIntError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found int`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -293,7 +293,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected string, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.NewPath(dyn.Key("bar")), }, err[0]) @@ -319,7 +319,7 @@ func TestNormalizeMapError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -374,7 +374,7 @@ func TestNormalizeMapRandomStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -387,7 +387,7 @@ func TestNormalizeMapIntError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected map, found int`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -417,7 +417,7 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected string, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.NewPath(dyn.Index(2)), }, err[0]) @@ -441,7 +441,7 @@ func TestNormalizeSliceError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected sequence, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -496,7 +496,7 @@ func TestNormalizeSliceRandomStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected sequence, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -509,7 +509,7 @@ func TestNormalizeSliceIntError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected sequence, found int`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -530,7 +530,7 @@ func TestNormalizeStringNil(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected a string value, found null`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -567,7 +567,7 @@ func TestNormalizeStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected string, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.EmptyPath, }, err[0]) } @@ -588,7 +588,7 @@ func TestNormalizeBoolNil(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected a bool value, found null`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -630,7 +630,7 @@ func TestNormalizeBoolFromStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected bool, found string`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -643,7 +643,7 @@ func TestNormalizeBoolError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected bool, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.EmptyPath, }, err[0]) } @@ -664,7 +664,7 @@ func TestNormalizeIntNil(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected a int value, found null`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -685,7 +685,7 @@ func TestNormalizeIntFromFloatError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `cannot accurately represent "1.5" as integer due to precision loss`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -714,7 +714,7 @@ func TestNormalizeIntFromStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `cannot parse "abc" as an integer`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -727,7 +727,7 @@ func TestNormalizeIntError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected int, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.EmptyPath, }, err[0]) } @@ -748,7 +748,7 @@ func TestNormalizeFloatNil(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected a float value, found null`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -773,7 +773,7 @@ func TestNormalizeFloatFromIntError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -802,7 +802,7 @@ func TestNormalizeFloatFromStringError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `cannot parse "abc" as a floating point number`, - Locations: vin.Location(), + Locations: []dyn.Location{vin.Location()}, Path: dyn.EmptyPath, }, err[0]) } @@ -815,7 +815,7 @@ func TestNormalizeFloatError(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected float, found map`, - Locations: dyn.Location{}, + Locations: []dyn.Location{{}}, Path: dyn.EmptyPath, }, err[0]) } From b380344608e814da3ab758c21017af75f6e397d5 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 19:45:27 +0200 Subject: [PATCH 05/75] add support for printing multiple locations in validate errors and warnings --- bundle/config/validate/files_to_sync.go | 2 ++ bundle/render/render_text_output.go | 32 +++++++++++++++++++----- bundle/render/render_text_output_test.go | 26 +++++++++++++++++++ 3 files changed, 54 insertions(+), 6 deletions(-) diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index 67b94090a3..af34ac1f54 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -46,6 +46,8 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", Locations: loc.Locations(), + // TODO: Highlight in the PR that the semantics have been changed. Also that + // for array values, .Location or .Locations does not make a difference. Path: loc.Path(), }) } diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index 68876dc783..3ba221dafb 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -9,10 +9,33 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/iam" "github.com/fatih/color" ) +func printLocations(locations []dyn.Location) string { + res := strings.Builder{} + + first := true + for _, loc := range locations { + if loc.File == "" { + continue + } + + res.WriteString("\n") + if first { + res.WriteString(" in ") + first = false + } else { + res.WriteString(" ") + } + + res.WriteString(loc.String()) + } + return res.String() +} + var renderFuncMap = template.FuncMap{ "red": color.RedString, "green": color.GreenString, @@ -26,15 +49,14 @@ var renderFuncMap = template.FuncMap{ "italic": func(format string, a ...interface{}) string { return color.New(color.Italic).Sprintf(format, a...) }, + "printLocations": printLocations, } const errorTemplate = `{{ "Error" | red }}: {{ .Summary }} {{- if .Path.String }} {{ "at " }}{{ .Path.String | green }} {{- end }} -{{- if .Location.File }} - {{ "in " }}{{ .Location.String | cyan }} -{{- end }} +{{- printLocations .Locations -}} {{- if .Detail }} {{ .Detail }} @@ -46,9 +68,7 @@ const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }} {{- if .Path.String }} {{ "at " }}{{ .Path.String | green }} {{- end }} -{{- if .Location.File }} - {{ "in " }}{{ .Location.String | cyan }} -{{- end }} +{{- printLocations .Locations -}} {{- if .Detail }} {{ .Detail }} diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index 8adf54161e..81e2761995 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -242,6 +242,32 @@ func TestRenderDiagnostics(t *testing.T) { " in foo.yaml:1:2\n\n" + "'name' is required\n\n", }, + { + name: "error with multiple source locations", + diags: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "failed to load xxx", + Detail: "'name' is required", + Locations: []dyn.Location{ + { + File: "foo.yaml", + Line: 1, + Column: 2, + }, + { + File: "bar.yaml", + Line: 3, + Column: 4, + }, + }, + }, + }, + expected: "Error: failed to load xxx\n" + + " in foo.yaml:1:2\n" + + " bar.yaml:3:4\n\n" + + "'name' is required\n\n", + }, { name: "error with path", diags: diag.Diagnostics{ From 50b8fa57de0ba76b1c458330df9a62cccd988ab3 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 19:47:08 +0200 Subject: [PATCH 06/75] newlines in python_diagnostics_test.go --- .../config/mutator/python/python_diagnostics_test.go | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index eee29bee7d..09d9f93bd2 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -39,10 +39,13 @@ func TestParsePythonDiagnostics(t *testing.T) { { Severity: diag.Error, Summary: "error summary", - Locations: []dyn.Location{{ - File: "src/examples/file.py", - Line: 1, - Column: 2}}, + Locations: []dyn.Location{ + { + File: "src/examples/file.py", + Line: 1, + Column: 2, + }, + }, }, }, }, From 4d1e2a04f6817b5f4e7de80c4b9547137583ec03 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 19:48:17 +0200 Subject: [PATCH 07/75] newlines in python_mutator_test.go --- bundle/config/mutator/python/python_mutator_test.go | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bundle/config/mutator/python/python_mutator_test.go b/bundle/config/mutator/python/python_mutator_test.go index dc4ed9322e..fbe835f928 100644 --- a/bundle/config/mutator/python/python_mutator_test.go +++ b/bundle/config/mutator/python/python_mutator_test.go @@ -97,10 +97,13 @@ func TestPythonMutator_load(t *testing.T) { assert.Equal(t, 1, len(diags)) assert.Equal(t, "job doesn't have any tasks", diags[0].Summary) - assert.Equal(t, []dyn.Location{{ - File: "src/examples/file.py", - Line: 10, - Column: 5}}, diags[0].Locations) + assert.Equal(t, []dyn.Location{ + { + File: "src/examples/file.py", + Line: 10, + Column: 5, + }, + }, diags[0].Locations) } From 3b8fc61f99ebe87402c737c61f72d82aef1b6013 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 19:50:10 +0200 Subject: [PATCH 08/75] clean up --- bundle/config/validate/files_to_sync.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index af34ac1f54..67b94090a3 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -46,8 +46,6 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", Locations: loc.Locations(), - // TODO: Highlight in the PR that the semantics have been changed. Also that - // for array values, .Location or .Locations does not make a difference. Path: loc.Path(), }) } From 83558a658aadb153637df6cc9dcacf4e4de536e9 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 19:57:02 +0200 Subject: [PATCH 09/75] only select single location for undefined job_cluster_key --- bundle/config/validate/files_to_sync.go | 6 ++++-- bundle/config/validate/job_cluster_key_defined.go | 10 +++++++--- bundle/config/validate/validate.go | 4 ++++ 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index 67b94090a3..ae6bfef1a9 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -43,8 +43,10 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. } else { loc := location{path: "sync.exclude", rb: rb} diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", + Severity: diag.Warning, + Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", + // Show all locations where sync.exclude is defined, since merging + // sync.exclude is additive. Locations: loc.Locations(), Path: loc.Path(), }) diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index 392259ade6..168303d83b 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" ) func JobClusterKeyDefined() bundle.ReadOnlyMutator { @@ -39,9 +40,12 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund } diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), - Locations: loc.Locations(), + Severity: diag.Warning, + Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), + // Show only the location where the job_cluster_key is defined. + // Other associated locations are not relevant since they are + // overridden during merging. + Locations: []dyn.Location{loc.Location()}, Path: loc.Path(), }) } diff --git a/bundle/config/validate/validate.go b/bundle/config/validate/validate.go index ef6488862a..b4da0bc053 100644 --- a/bundle/config/validate/validate.go +++ b/bundle/config/validate/validate.go @@ -16,6 +16,10 @@ type location struct { rb bundle.ReadOnlyBundle } +func (l location) Location() dyn.Location { + return l.rb.Config().GetLocation(l.path) +} + func (l location) Locations() []dyn.Location { return l.rb.Config().GetLocations(l.path) } From d9b8902bce09d532326d50a5582c59f13a340a49 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 18 Jul 2024 20:00:56 +0200 Subject: [PATCH 10/75] single location for validate_sync_patterns.go --- bundle/config/validate/validate_sync_patterns.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index abcbce16a2..f3655ca949 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/fileset" "golang.org/x/sync/errgroup" ) @@ -66,7 +67,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("Pattern %s does not match any files", p), - Locations: loc.Locations(), + Locations: []dyn.Location{loc.Location()}, Path: loc.Path(), }) mu.Unlock() From 5b94edd1a683e1eb4c6b397ad85777351f742195 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 14:05:26 +0200 Subject: [PATCH 11/75] add cyan color to location --- bundle/render/render_text_output.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index 3ba221dafb..9389ecead2 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -31,7 +31,7 @@ func printLocations(locations []dyn.Location) string { res.WriteString(" ") } - res.WriteString(loc.String()) + res.WriteString(color.CyanString(loc.String())) } return res.String() } @@ -147,7 +147,6 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti return err } -// TODO: Write tests when multiple locations are rendered. func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate)) warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate)) From 52c2d2e321f21ea8dc22fa06d45b6605316f5743 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 14:05:46 +0200 Subject: [PATCH 12/75] Fix test in python_diagnostics_test.go --- .../mutator/python/python_diagnostics_test.go | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 09d9f93bd2..51f3a5fc7f 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -54,9 +54,10 @@ func TestParsePythonDiagnostics(t *testing.T) { input: `{"severity": "error", "summary": "error summary", "path": "resources.jobs.job0.name"}`, expected: diag.Diagnostics{ { - Severity: diag.Error, - Summary: "error summary", - Path: dyn.MustPathFromString("resources.jobs.job0.name"), + Severity: diag.Error, + Summary: "error summary", + Path: dyn.MustPathFromString("resources.jobs.job0.name"), + Locations: []dyn.Location{{}}, }, }, }, @@ -75,9 +76,10 @@ func TestParsePythonDiagnostics(t *testing.T) { input: `{"severity": "warning", "summary": "warning summary", "detail": "warning detail"}`, expected: diag.Diagnostics{ { - Severity: diag.Warning, - Summary: "warning summary", - Detail: "warning detail", + Severity: diag.Warning, + Summary: "warning summary", + Detail: "warning detail", + Locations: []dyn.Location{{}}, }, }, }, @@ -87,12 +89,14 @@ func TestParsePythonDiagnostics(t *testing.T) { `{"severity": "error", "summary": "error summary (2)"}`, expected: diag.Diagnostics{ { - Severity: diag.Error, - Summary: "error summary (1)", + Severity: diag.Error, + Summary: "error summary (1)", + Locations: []dyn.Location{{}}, }, { - Severity: diag.Error, - Summary: "error summary (2)", + Severity: diag.Error, + Summary: "error summary (2)", + Locations: []dyn.Location{{}}, }, }, }, From 6ed74d044ede9d696d802f6807bae55e7e7a3232 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 14:21:20 +0200 Subject: [PATCH 13/75] fix comment for Diagnostic.Locations --- libs/diag/diagnostic.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 062ca7e552..305089d228 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -17,8 +17,8 @@ type Diagnostic struct { // This may be multiple lines and may be nil. Detail string - // Locations is a source code location associated with the diagnostic message. - // It may be zero if there is no associated location. + // Locations are the source code locations associated with the diagnostic message. + // It may be empty if there are no associated locations. Locations []dyn.Location // Path is a path to the value in a configuration tree that the diagnostic is associated with. From 1202bcc6fc8b558173619b6d0b636bac187a1202 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 14:41:34 +0200 Subject: [PATCH 14/75] include multiple locations for unknown field warning --- libs/dyn/convert/normalize.go | 3 ++- libs/dyn/convert/normalize_test.go | 28 +++++++++++++++++++--------- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index df3c858b06..b62c106c15 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -98,7 +98,8 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), - Locations: []dyn.Location{pk.Location()}, + // Show all locations the unknown field is defined at. + Locations: pk.Locations(), Path: path, }) } diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 536bfa6d0a..df9a1a9a53 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -58,23 +58,33 @@ func TestNormalizeStructUnknownField(t *testing.T) { } var typ Tmp - vin := dyn.V(map[string]dyn.Value{ - "foo": dyn.V("bar"), - "bar": dyn.V("baz"), - }) + + m := dyn.NewMapping() + m.Set(dyn.V("foo"), dyn.V("val-foo")) + // Set the unknown field, with location information. + m.Set(dyn.NewValue("bar", []dyn.Location{ + {File: "hello.yaml", Line: 1, Column: 1}, + {File: "world.yaml", Line: 2, Column: 2}, + }), dyn.V("var-bar")) + + vin := dyn.V(m) vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `unknown field: bar`, - Locations: []dyn.Location{vin.Get("foo").Location()}, - Path: dyn.EmptyPath, + Severity: diag.Warning, + Summary: `unknown field: bar`, + // Assert location of the unknown field is included in the diagnostic. + Locations: []dyn.Location{ + {File: "hello.yaml", Line: 1, Column: 1}, + {File: "world.yaml", Line: 2, Column: 2}, + }, + Path: dyn.EmptyPath, }, err[0]) // The field that can be mapped to the struct field is retained. assert.Equal(t, map[string]any{ - "foo": "bar", + "foo": "val-foo", }, vout.AsAny()) } From 16072f6c2d11451e5dce9b50c670703e3e81bb64 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 14:47:28 +0200 Subject: [PATCH 15/75] fmt --- libs/dyn/convert/normalize.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index b62c106c15..bf5756e7f1 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -96,8 +96,8 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen if !ok { if !pv.IsAnchor() { diags = diags.Append(diag.Diagnostic{ - Severity: diag.Warning, - Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), + Severity: diag.Warning, + Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), // Show all locations the unknown field is defined at. Locations: pk.Locations(), Path: path, From 6c24df8b104c4fb051713903d82090a0bab07feb Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 19 Jul 2024 17:14:09 +0200 Subject: [PATCH 16/75] Error on duplicate resource keys after YAML files have been loaded --- bundle/config/resources.go | 120 ------------------ bundle/config/resources_test.go | 120 ------------------ bundle/config/root.go | 11 -- bundle/config/root_test.go | 16 --- bundle/config/validate/pre_initialize.go | 28 ++++ .../config/validate/unique_resource_keys.go | 69 ++++++++++ bundle/phases/initialize.go | 2 + .../databricks.yml | 2 +- .../resources.yml | 0 .../databricks.yml | 3 + .../resources.yml | 2 +- .../databricks.yml | 0 .../resources1.yml | 0 .../resources2.yml | 0 .../databricks.yml | 7 +- .../databricks.yml | 0 bundle/tests/validate_test.go | 46 +++++++ 17 files changed, 156 insertions(+), 270 deletions(-) create mode 100644 bundle/config/validate/pre_initialize.go create mode 100644 bundle/config/validate/unique_resource_keys.go rename bundle/tests/{conflicting_resource_ids/one_subconfiguration => validate/duplicate_resource_name_in_subconfiguration}/databricks.yml (84%) rename bundle/{config/testdata => tests/validate}/duplicate_resource_name_in_subconfiguration/resources.yml (100%) rename bundle/{config/testdata/duplicate_resource_name_in_subconfiguration => tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job}/databricks.yml (76%) rename bundle/tests/{conflicting_resource_ids/one_subconfiguration => validate/duplicate_resource_name_in_subconfiguration_job_and_job}/resources.yml (77%) rename bundle/tests/{conflicting_resource_ids/two_subconfigurations => validate/duplicate_resource_names_in_different_subconfiguations}/databricks.yml (100%) rename bundle/tests/{conflicting_resource_ids/two_subconfigurations => validate/duplicate_resource_names_in_different_subconfiguations}/resources1.yml (100%) rename bundle/tests/{conflicting_resource_ids/two_subconfigurations => validate/duplicate_resource_names_in_different_subconfiguations}/resources2.yml (100%) rename bundle/tests/{conflicting_resource_ids/no_subconfigurations => validate/duplicate_resource_names_in_root_job_and_experiment}/databricks.yml (53%) rename bundle/{config/testdata/duplicate_resource_names_in_root => tests/validate/duplicate_resource_names_in_root_job_and_pipeline}/databricks.yml (100%) create mode 100644 bundle/tests/validate_test.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index f70052ec02..062e38ed51 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -20,126 +20,6 @@ type Resources struct { QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"` } -type UniqueResourceIdTracker struct { - Type map[string]string - ConfigPath map[string]string -} - -// verifies merging is safe by checking no duplicate identifiers exist -func (r *Resources) VerifySafeMerge(other *Resources) error { - rootTracker, err := r.VerifyUniqueResourceIdentifiers() - if err != nil { - return err - } - otherTracker, err := other.VerifyUniqueResourceIdentifiers() - if err != nil { - return err - } - for k := range otherTracker.Type { - if _, ok := rootTracker.Type[k]; ok { - return fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - rootTracker.Type[k], - rootTracker.ConfigPath[k], - otherTracker.Type[k], - otherTracker.ConfigPath[k], - ) - } - } - return nil -} - -// This function verifies there are no duplicate names used for the resource definations -func (r *Resources) VerifyUniqueResourceIdentifiers() (*UniqueResourceIdTracker, error) { - tracker := &UniqueResourceIdTracker{ - Type: make(map[string]string), - ConfigPath: make(map[string]string), - } - for k := range r.Jobs { - tracker.Type[k] = "job" - tracker.ConfigPath[k] = r.Jobs[k].ConfigFilePath - } - for k := range r.Pipelines { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "pipeline", - r.Pipelines[k].ConfigFilePath, - ) - } - tracker.Type[k] = "pipeline" - tracker.ConfigPath[k] = r.Pipelines[k].ConfigFilePath - } - for k := range r.Models { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "mlflow_model", - r.Models[k].ConfigFilePath, - ) - } - tracker.Type[k] = "mlflow_model" - tracker.ConfigPath[k] = r.Models[k].ConfigFilePath - } - for k := range r.Experiments { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "mlflow_experiment", - r.Experiments[k].ConfigFilePath, - ) - } - tracker.Type[k] = "mlflow_experiment" - tracker.ConfigPath[k] = r.Experiments[k].ConfigFilePath - } - for k := range r.ModelServingEndpoints { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "model_serving_endpoint", - r.ModelServingEndpoints[k].ConfigFilePath, - ) - } - tracker.Type[k] = "model_serving_endpoint" - tracker.ConfigPath[k] = r.ModelServingEndpoints[k].ConfigFilePath - } - for k := range r.RegisteredModels { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "registered_model", - r.RegisteredModels[k].ConfigFilePath, - ) - } - tracker.Type[k] = "registered_model" - tracker.ConfigPath[k] = r.RegisteredModels[k].ConfigFilePath - } - for k := range r.QualityMonitors { - if _, ok := tracker.Type[k]; ok { - return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", - k, - tracker.Type[k], - tracker.ConfigPath[k], - "quality_monitor", - r.QualityMonitors[k].ConfigFilePath, - ) - } - tracker.Type[k] = "quality_monitor" - tracker.ConfigPath[k] = r.QualityMonitors[k].ConfigFilePath - } - return tracker, nil -} - type resource struct { resource ConfigResource resource_type string diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 7415029b13..6860d73daa 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -5,129 +5,9 @@ import ( "reflect" "testing" - "github.com/databricks/cli/bundle/config/paths" - "github.com/databricks/cli/bundle/config/resources" "github.com/stretchr/testify/assert" ) -func TestVerifyUniqueResourceIdentifiers(t *testing.T) { - r := Resources{ - Jobs: map[string]*resources.Job{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo.yml", - }, - }, - }, - Models: map[string]*resources.MlflowModel{ - "bar": { - Paths: paths.Paths{ - ConfigFilePath: "bar.yml", - }, - }, - }, - Experiments: map[string]*resources.MlflowExperiment{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo2.yml", - }, - }, - }, - } - _, err := r.VerifyUniqueResourceIdentifiers() - assert.ErrorContains(t, err, "multiple resources named foo (job at foo.yml, mlflow_experiment at foo2.yml)") -} - -func TestVerifySafeMerge(t *testing.T) { - r := Resources{ - Jobs: map[string]*resources.Job{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo.yml", - }, - }, - }, - Models: map[string]*resources.MlflowModel{ - "bar": { - Paths: paths.Paths{ - ConfigFilePath: "bar.yml", - }, - }, - }, - } - other := Resources{ - Pipelines: map[string]*resources.Pipeline{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo2.yml", - }, - }, - }, - } - err := r.VerifySafeMerge(&other) - assert.ErrorContains(t, err, "multiple resources named foo (job at foo.yml, pipeline at foo2.yml)") -} - -func TestVerifySafeMergeForSameResourceType(t *testing.T) { - r := Resources{ - Jobs: map[string]*resources.Job{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo.yml", - }, - }, - }, - Models: map[string]*resources.MlflowModel{ - "bar": { - Paths: paths.Paths{ - ConfigFilePath: "bar.yml", - }, - }, - }, - } - other := Resources{ - Jobs: map[string]*resources.Job{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo2.yml", - }, - }, - }, - } - err := r.VerifySafeMerge(&other) - assert.ErrorContains(t, err, "multiple resources named foo (job at foo.yml, job at foo2.yml)") -} - -func TestVerifySafeMergeForRegisteredModels(t *testing.T) { - r := Resources{ - Jobs: map[string]*resources.Job{ - "foo": { - Paths: paths.Paths{ - ConfigFilePath: "foo.yml", - }, - }, - }, - RegisteredModels: map[string]*resources.RegisteredModel{ - "bar": { - Paths: paths.Paths{ - ConfigFilePath: "bar.yml", - }, - }, - }, - } - other := Resources{ - RegisteredModels: map[string]*resources.RegisteredModel{ - "bar": { - Paths: paths.Paths{ - ConfigFilePath: "bar2.yml", - }, - }, - }, - } - err := r.VerifySafeMerge(&other) - assert.ErrorContains(t, err, "multiple resources named bar (registered_model at bar.yml, registered_model at bar2.yml)") -} - // This test ensures that all resources have a custom marshaller and unmarshaller. // This is required because DABs resources map to Databricks APIs, and they do so // by embedding the corresponding Go SDK structs. diff --git a/bundle/config/root.go b/bundle/config/root.go index 594a9105f6..de0460c882 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -100,11 +100,6 @@ func LoadFromBytes(path string, raw []byte) (*Root, diag.Diagnostics) { if err != nil { return nil, diag.Errorf("failed to load %s: %v", path, err) } - - _, err = r.Resources.VerifyUniqueResourceIdentifiers() - if err != nil { - diags = diags.Extend(diag.FromErr(err)) - } return &r, diags } @@ -281,12 +276,6 @@ func (r *Root) InitializeVariables(vars []string) error { } func (r *Root) Merge(other *Root) error { - // Check for safe merge, protecting against duplicate resource identifiers - err := r.Resources.VerifySafeMerge(&other.Resources) - if err != nil { - return err - } - // Merge dynamic configuration values. return r.Mutate(func(root dyn.Value) (dyn.Value, error) { return merge.Merge(root, other.value) diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index aed670d6cd..c95e6e86cd 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -30,22 +30,6 @@ func TestRootLoad(t *testing.T) { assert.Equal(t, "basic", root.Bundle.Name) } -func TestDuplicateIdOnLoadReturnsError(t *testing.T) { - _, diags := Load("./testdata/duplicate_resource_names_in_root/databricks.yml") - assert.ErrorContains(t, diags.Error(), "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") -} - -func TestDuplicateIdOnMergeReturnsError(t *testing.T) { - root, diags := Load("./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml") - require.NoError(t, diags.Error()) - - other, diags := Load("./testdata/duplicate_resource_name_in_subconfiguration/resources.yml") - require.NoError(t, diags.Error()) - - err := root.Merge(other) - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") -} - func TestInitializeVariables(t *testing.T) { fooDefault := "abc" root := &Root{ diff --git a/bundle/config/validate/pre_initialize.go b/bundle/config/validate/pre_initialize.go new file mode 100644 index 0000000000..d18050b818 --- /dev/null +++ b/bundle/config/validate/pre_initialize.go @@ -0,0 +1,28 @@ +package validate + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" +) + +type preInitialize struct{} + +// Apply implements bundle.Mutator. +func (v *preInitialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), bundle.Parallel( + UniqueResourceKeys(), + )) +} + +// Name implements bundle.Mutator. +func (v *preInitialize) Name() string { + return "validate:pre_initialize" +} + +// Validations to perform before initialization of the bundle. These validations +// are thus applied for most bundle commands. +func PreInitialize() bundle.Mutator { + return &preInitialize{} +} diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go new file mode 100644 index 0000000000..63b75daed0 --- /dev/null +++ b/bundle/config/validate/unique_resource_keys.go @@ -0,0 +1,69 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" +) + +func UniqueResourceKeys() bundle.ReadOnlyMutator { + return &uniqueResourceKeys{} +} + +// TODO: Might need to enforce sorted walk on dyn.Walk +type uniqueResourceKeys struct{} + +func (m *uniqueResourceKeys) Name() string { + return "validate:unique_resource_keys" +} + +func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { + diags := diag.Diagnostics{} + + type resourceInfo struct { + p dyn.Path + l dyn.Location + } + + seenResources := make(map[string]resourceInfo) + _, err := dyn.Walk(rb.Config().Value().Get("resources"), func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + // The path is expected to be of length 2, and of the form .. + // Eg: jobs.my_job, pipelines.my_pipeline, etc. + if len(p) < 2 { + return v, nil + } + if len(p) > 2 { + return v, dyn.ErrSkip + } + + if len(v.Locations()) == 0 { + + // key for the source. Eg: "my_job" for jobs.my_job. + key := p[1].Key() + info, ok := seenResources[key] + + for _, l := range v.Locations() { + info, ok := seenResources[p[1].Key()] + if !ok { + seenResources[p[1].Key()] = resourceInfo{p, l} + continue + } + + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s (%s at %s, %s at %s)", p[1].Key(), p, l, info.p, info.l), + Location: l, + Path: p, + }) + } + return v, nil + }) + if err != nil { + diags = append(diags, diag.FromErr(err)...) + } + + return diags +} diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index a32de2c561..a6eab67715 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" + "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/deploy/metadata" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/permissions" @@ -19,6 +20,7 @@ func Initialize() bundle.Mutator { return newPhase( "initialize", []bundle.Mutator{ + validate.PreInitialize(), mutator.RewriteSyncPaths(), mutator.MergeJobClusters(), mutator.MergeJobTasks(), diff --git a/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration/databricks.yml similarity index 84% rename from bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml rename to bundle/tests/validate/duplicate_resource_name_in_subconfiguration/databricks.yml index ea4dec2e1e..5bec674839 100644 --- a/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml +++ b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration/databricks.yml @@ -5,7 +5,7 @@ workspace: profile: test include: - - "*.yml" + - ./resources.yml resources: jobs: diff --git a/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/resources.yml b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration/resources.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_name_in_subconfiguration/resources.yml rename to bundle/tests/validate/duplicate_resource_name_in_subconfiguration/resources.yml diff --git a/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml similarity index 76% rename from bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml rename to bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml index a816029204..5bec674839 100644 --- a/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml +++ b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml @@ -4,6 +4,9 @@ bundle: workspace: profile: test +include: + - ./resources.yml + resources: jobs: foo: diff --git a/bundle/tests/conflicting_resource_ids/one_subconfiguration/resources.yml b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml similarity index 77% rename from bundle/tests/conflicting_resource_ids/one_subconfiguration/resources.yml rename to bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml index c3dcb6e2fe..f644bc848b 100644 --- a/bundle/tests/conflicting_resource_ids/one_subconfiguration/resources.yml +++ b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml @@ -1,4 +1,4 @@ resources: - pipelines: + jobs: foo: name: pipeline foo diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml b/bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/databricks.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml rename to bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/resources1.yml b/bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/two_subconfigurations/resources1.yml rename to bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/resources2.yml b/bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/two_subconfigurations/resources2.yml rename to bundle/tests/validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml diff --git a/bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml b/bundle/tests/validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml similarity index 53% rename from bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml rename to bundle/tests/validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml index 1e9aa10b1f..d286f10496 100644 --- a/bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml +++ b/bundle/tests/validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml @@ -8,6 +8,11 @@ resources: jobs: foo: name: job foo + bar: + name: job bar pipelines: + baz: + name: pipeline baz + experiments: foo: - name: pipeline foo + name: experiment foo diff --git a/bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml b/bundle/tests/validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml rename to bundle/tests/validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go new file mode 100644 index 0000000000..f657ff8694 --- /dev/null +++ b/bundle/tests/validate_test.go @@ -0,0 +1,46 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/validate" + "github.com/stretchr/testify/assert" +) + +func TestValidateUniqueResourceIdentifiers(t *testing.T) { + tcases := []struct { + name string + errorMsg string + }{ + // { + // name: "duplicate_resource_names_in_root_job_and_pipeline", + // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml:10:7, pipelines.foo at validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml:13:7)", + // }, + // { + // name: "duplicate_resource_names_in_root_job_and_experiment", + // errorMsg: "multiple resources named foo (experiments.foo at validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml:18:7, jobs.foo at validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml:10:7)", + // }, + // { + // name: "duplicate_resource_name_in_subconfiguration", + // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_name_in_subconfiguration/databricks.yml:13:7, pipelines.foo at validate/duplicate_resource_name_in_subconfiguration/resources.yml:4:7)", + // }, + { + name: "duplicate_resource_name_in_subconfiguration_job_and_job", + errorMsg: "multiple resources have been defined with the same key: foo (jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml:4:7, jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml:13:7)", + }, + { + name: "duplicate_resource_names_in_different_subconfiguations", + errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml:4:7, pipelines.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml:4:7)", + }, + } + + for _, tc := range tcases { + t.Run(tc.name, func(t *testing.T) { + b := load(t, "./validate/"+tc.name) + diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.UniqueResourceKeys()) + assert.ErrorContains(t, diags.Error(), tc.errorMsg) + }) + } +} From 0d76a4267868ae5bf255237b36966815526b23d9 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 22 Jul 2024 20:05:48 +0200 Subject: [PATCH 17/75] Associate multiple paths with a diagnostic --- .../mutator/python/python_diagnostics.go | 2 +- .../mutator/python/python_diagnostics_test.go | 2 +- bundle/config/mutator/run_as.go | 2 +- bundle/config/validate/files_to_sync.go | 3 +- .../validate/job_cluster_key_defined.go | 3 +- .../config/validate/validate_sync_patterns.go | 3 +- bundle/render/render_text_output_test.go | 2 +- .../sync_include_exclude_no_matches_test.go | 2 +- libs/diag/diagnostic.go | 6 +-- libs/dyn/convert/normalize.go | 14 ++--- libs/dyn/convert/normalize_test.go | 52 +++++++++---------- 11 files changed, 47 insertions(+), 44 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go index b8efc9ef73..f5fc365c19 100644 --- a/bundle/config/mutator/python/python_diagnostics.go +++ b/bundle/config/mutator/python/python_diagnostics.go @@ -60,7 +60,7 @@ func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) { Summary: parsedLine.Summary, Detail: parsedLine.Detail, Location: convertPythonLocation(parsedLine.Location), - Path: path, + Paths: []dyn.Path{path}, } diags = diags.Append(diag) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 7b66e2537b..dc937ca953 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -54,7 +54,7 @@ func TestParsePythonDiagnostics(t *testing.T) { { Severity: diag.Error, Summary: "error summary", - Path: dyn.MustPathFromString("resources.jobs.job0.name"), + Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, }, }, }, diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index d344a988ae..6b47a6fe8a 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -180,7 +180,7 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { { Severity: diag.Warning, Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", - Path: dyn.MustPathFromString("experimental.use_legacy_run_as"), + Paths: []dyn.Path{dyn.MustPathFromString("experimental.use_legacy_run_as")}, Location: b.Config.GetLocation("experimental.use_legacy_run_as"), }, } diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index d53e382432..9652f27a77 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" ) func FilesToSync() bundle.ReadOnlyMutator { @@ -46,7 +47,7 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", Location: loc.Location(), - Path: loc.Path(), + Paths: []dyn.Path{loc.Path()}, }) } diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index 37ed3f417e..65571f3524 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" ) func JobClusterKeyDefined() bundle.ReadOnlyMutator { @@ -42,7 +43,7 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund Severity: diag.Warning, Summary: fmt.Sprintf("job_cluster_key %s is not defined", task.JobClusterKey), Location: loc.Location(), - Path: loc.Path(), + Paths: []dyn.Path{loc.Path()}, }) } } diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index a04c10776c..65b598a152 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/fileset" "golang.org/x/sync/errgroup" ) @@ -67,7 +68,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di Severity: diag.Warning, Summary: fmt.Sprintf("Pattern %s does not match any files", p), Location: loc.Location(), - Path: loc.Path(), + Paths: []dyn.Path{loc.Path()}, }) mu.Unlock() } diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index b7aec88648..ccdb6410dd 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -270,7 +270,7 @@ func TestRenderDiagnostics(t *testing.T) { Severity: diag.Error, Detail: "'name' is required", Summary: "failed to load xxx", - Path: dyn.MustPathFromString("resources.jobs.xxx"), + Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.xxx")}, }, }, expected: "Error: failed to load xxx\n" + diff --git a/bundle/tests/sync_include_exclude_no_matches_test.go b/bundle/tests/sync_include_exclude_no_matches_test.go index 94cedbaa62..59eadc1195 100644 --- a/bundle/tests/sync_include_exclude_no_matches_test.go +++ b/bundle/tests/sync_include_exclude_no_matches_test.go @@ -24,7 +24,7 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { require.Equal(t, diags[0].Location.File, filepath.Join("sync", "override", "databricks.yml")) require.Equal(t, diags[0].Location.Line, 17) require.Equal(t, diags[0].Location.Column, 11) - require.Equal(t, diags[0].Path.String(), "sync.exclude[0]") + require.Equal(t, diags[0].Paths[0].String(), "sync.exclude[0]") summaries := []string{ fmt.Sprintf("Pattern %s does not match any files", filepath.Join("src", "*")), diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 6215275512..e73914fb2d 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -21,9 +21,9 @@ type Diagnostic struct { // It may be zero if there is no associated location. Location dyn.Location - // Path is a path to the value in a configuration tree that the diagnostic is associated with. - // It may be nil if there is no associated path. - Path dyn.Path + // Paths are paths to the values in the configuration tree that the diagnostic is associated with. + // It may be nil if there are no associated paths. + Paths []dyn.Path } // Errorf creates a new error diagnostic. diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 246c97eaf9..0f8b13bf03 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -68,7 +68,7 @@ func nullWarning(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnosti Severity: diag.Warning, Summary: fmt.Sprintf("expected a %s value, found null", expected), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, } } @@ -77,7 +77,7 @@ func typeMismatch(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnost Severity: diag.Warning, Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, } } @@ -99,7 +99,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen Severity: diag.Warning, Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), Location: pk.Location(), - Path: path, + Paths: []dyn.Path{path}, }) } continue @@ -323,7 +323,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%g" as integer due to precision loss`, src.MustFloat()), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, }) } case dyn.KindString: @@ -339,7 +339,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, }) } case dyn.KindNil: @@ -366,7 +366,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%d" as floating point number due to precision loss`, src.MustInt()), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, }) } case dyn.KindString: @@ -382,7 +382,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), Location: src.Location(), - Path: path, + Paths: []dyn.Path{path}, }) } case dyn.KindNil: diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 452ed4eb1d..85c45de562 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -43,7 +43,7 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Key("bar")), + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -69,7 +69,7 @@ func TestNormalizeStructUnknownField(t *testing.T) { Severity: diag.Warning, Summary: `unknown field: bar`, Location: vin.Get("foo").Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) // The field that can be mapped to the struct field is retained. @@ -103,7 +103,7 @@ func TestNormalizeStructError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Location: vin.Get("foo").Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -248,7 +248,7 @@ func TestNormalizeStructRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -265,7 +265,7 @@ func TestNormalizeStructIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -294,7 +294,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Key("bar")), + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -320,7 +320,7 @@ func TestNormalizeMapError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -375,7 +375,7 @@ func TestNormalizeMapRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -388,7 +388,7 @@ func TestNormalizeMapIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -418,7 +418,7 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Location: dyn.Location{}, - Path: dyn.NewPath(dyn.Index(2)), + Paths: []dyn.Path{dyn.NewPath(dyn.Index(2))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -442,7 +442,7 @@ func TestNormalizeSliceError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -497,7 +497,7 @@ func TestNormalizeSliceRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -510,7 +510,7 @@ func TestNormalizeSliceIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found int`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -531,7 +531,7 @@ func TestNormalizeStringNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a string value, found null`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -568,7 +568,7 @@ func TestNormalizeStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Location: dyn.Location{}, - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -589,7 +589,7 @@ func TestNormalizeBoolNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a bool value, found null`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -631,7 +631,7 @@ func TestNormalizeBoolFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found string`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -644,7 +644,7 @@ func TestNormalizeBoolError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found map`, Location: dyn.Location{}, - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -665,7 +665,7 @@ func TestNormalizeIntNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a int value, found null`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -686,7 +686,7 @@ func TestNormalizeIntFromFloatError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "1.5" as integer due to precision loss`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -715,7 +715,7 @@ func TestNormalizeIntFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as an integer`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -728,7 +728,7 @@ func TestNormalizeIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected int, found map`, Location: dyn.Location{}, - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -749,7 +749,7 @@ func TestNormalizeFloatNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a float value, found null`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -774,7 +774,7 @@ func TestNormalizeFloatFromIntError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -803,7 +803,7 @@ func TestNormalizeFloatFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as a floating point number`, Location: vin.Location(), - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -816,7 +816,7 @@ func TestNormalizeFloatError(t *testing.T) { Severity: diag.Warning, Summary: `expected float, found map`, Location: dyn.Location{}, - Path: dyn.EmptyPath, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } From 14cd225f02b98287ba198f7198b0567f3d216b68 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 22 Jul 2024 20:12:41 +0200 Subject: [PATCH 18/75] add tests and modify rendering --- bundle/render/render_text_output.go | 28 +++++++++++++++++++----- bundle/render/render_text_output_test.go | 21 ++++++++++++++++++ 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index 439ae61323..e32b9afd97 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -9,10 +9,29 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/iam" "github.com/fatih/color" ) +func printPaths(paths []dyn.Path) string { + res := strings.Builder{} + + first := true + for _, p := range paths { + res.WriteString("\n") + if first { + res.WriteString(" at ") + first = false + } else { + res.WriteString(" ") + } + + res.WriteString(color.GreenString(p.String())) + } + return res.String() +} + var renderFuncMap = template.FuncMap{ "red": color.RedString, "green": color.GreenString, @@ -26,12 +45,11 @@ var renderFuncMap = template.FuncMap{ "italic": func(format string, a ...interface{}) string { return color.New(color.Italic).Sprintf(format, a...) }, + "printPaths": printPaths, } const errorTemplate = `{{ "Error" | red }}: {{ .Summary }} -{{- if .Path.String }} - {{ "at " }}{{ .Path.String | green }} -{{- end }} +{{- printPaths .Paths -}} {{- if .Location.File }} {{ "in " }}{{ .Location.String | cyan }} {{- end }} @@ -43,9 +61,7 @@ const errorTemplate = `{{ "Error" | red }}: {{ .Summary }} ` const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }} -{{- if .Path.String }} - {{ "at " }}{{ .Path.String | green }} -{{- end }} +{{- printPaths .Paths -}} {{- if .Location.File }} {{ "in " }}{{ .Location.String | cyan }} {{- end }} diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index ccdb6410dd..77471e922f 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -278,6 +278,27 @@ func TestRenderDiagnostics(t *testing.T) { "\n" + "'name' is required\n\n", }, + { + name: "error with multiple paths", + diags: diag.Diagnostics{ + { + Severity: diag.Error, + Detail: "'name' is required", + Summary: "failed to load xxx", + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.jobs.xxx"), + dyn.MustPathFromString("resources.jobs.yyy"), + dyn.MustPathFromString("resources.jobs.zzz"), + }, + }, + }, + expected: "Error: failed to load xxx\n" + + " at resources.jobs.xxx\n" + + " resources.jobs.yyy\n" + + " resources.jobs.zzz\n" + + "\n" + + "'name' is required\n\n", + }, } for _, tc := range testCases { From a5507600709ad68be1ef7a3709c92947e602af9b Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 23 Jul 2024 11:04:59 +0200 Subject: [PATCH 19/75] fix unit tests --- bundle/config/mutator/python/python_diagnostics.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go index f5fc365c19..5d66fed6c7 100644 --- a/bundle/config/mutator/python/python_diagnostics.go +++ b/bundle/config/mutator/python/python_diagnostics.go @@ -54,13 +54,17 @@ func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) { if err != nil { return nil, fmt.Errorf("failed to parse path: %s", err) } + var paths []dyn.Path + if path != nil { + paths = []dyn.Path{path} + } diag := diag.Diagnostic{ Severity: severity, Summary: parsedLine.Summary, Detail: parsedLine.Detail, Location: convertPythonLocation(parsedLine.Location), - Paths: []dyn.Path{path}, + Paths: paths, } diags = diags.Append(diag) From 21781fe45c1f8c1c90582f20364c21494f52c332 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 22 Jul 2024 18:03:23 +0200 Subject: [PATCH 20/75] wip --- .../config/validate/unique_resource_keys.go | 56 ++++++++++++------- bundle/tests/validate_test.go | 8 +-- 2 files changed, 41 insertions(+), 23 deletions(-) diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 63b75daed0..62b1686f52 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -14,21 +14,31 @@ func UniqueResourceKeys() bundle.ReadOnlyMutator { } // TODO: Might need to enforce sorted walk on dyn.Walk +// TODO: return multiple locations with diagnostics. Can be a followup. type uniqueResourceKeys struct{} func (m *uniqueResourceKeys) Name() string { return "validate:unique_resource_keys" } +func conflictingResourceKeysErr(key string, p1 dyn.Path, l1 dyn.Location, p2 dyn.Path, l2 dyn.Location) diag.Diagnostic { + return diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s (%s at %s, %s at %s)", key, p1, l1, p2, l2), + Location: l1, + } +} + func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { diags := diag.Diagnostics{} - type resourceInfo struct { + type r struct { p dyn.Path l dyn.Location } + seenResource := make(map[string]r) - seenResources := make(map[string]resourceInfo) + seenResources := make(map[string]dyn.Location) _, err := dyn.Walk(rb.Config().Value().Get("resources"), func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // The path is expected to be of length 2, and of the form .. // Eg: jobs.my_job, pipelines.my_pipeline, etc. @@ -39,26 +49,34 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle return v, dyn.ErrSkip } - if len(v.Locations()) == 0 { + // Each resource should be completely defined in a single YAML file. We + // do not allow users to split the definition of a single resource across + // multiple files. + // Users can use simple / complex variables to modularize their configuration. + if locations := v.Locations(); len(locations) >= 2 { + diags = append(diags, conflictingResourceKeysErr(p[1].Key(), p, locations[0], p, locations[1])) + } - // key for the source. Eg: "my_job" for jobs.my_job. - key := p[1].Key() - info, ok := seenResources[key] + // l, ok := seenResources[p[1].Key()] + // if ok { + // diags = append(diags, conflictingResourceKeysErr(p[1].Key(), p, l, p, v.Locations()[0])) + // } else { + // seenResources[p[1].Key()] = v.Locations()[0] + // } - for _, l := range v.Locations() { - info, ok := seenResources[p[1].Key()] - if !ok { - seenResources[p[1].Key()] = resourceInfo{p, l} - continue - } + // // key for the source. Eg: "my_job" for jobs.my_job. + // key := p[1].Key() + // info, ok := seenResources[key] - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s (%s at %s, %s at %s)", p[1].Key(), p, l, info.p, info.l), - Location: l, - Path: p, - }) - } + // for _, l := range v.Locations() { + // info, ok := seenResources[p[1].Key()] + // if !ok { + // seenResources[p[1].Key()] = resourceInfo{p, l} + // continue + // } + + // diags = append(diags) + // } return v, nil }) if err != nil { diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go index f657ff8694..f5da1024dd 100644 --- a/bundle/tests/validate_test.go +++ b/bundle/tests/validate_test.go @@ -30,10 +30,10 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { name: "duplicate_resource_name_in_subconfiguration_job_and_job", errorMsg: "multiple resources have been defined with the same key: foo (jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml:4:7, jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml:13:7)", }, - { - name: "duplicate_resource_names_in_different_subconfiguations", - errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml:4:7, pipelines.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml:4:7)", - }, + // { + // name: "duplicate_resource_names_in_different_subconfiguations", + // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml:4:7, pipelines.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml:4:7)", + // }, } for _, tc := range tcases { From a48909598bf9f9b2ba540e87c1e450076b608480 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 23 Jul 2024 15:15:46 +0200 Subject: [PATCH 21/75] first version --- .../config/validate/unique_resource_keys.go | 69 ++++++++----------- 1 file changed, 27 insertions(+), 42 deletions(-) diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 62b1686f52..ff9eede057 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -14,31 +14,19 @@ func UniqueResourceKeys() bundle.ReadOnlyMutator { } // TODO: Might need to enforce sorted walk on dyn.Walk -// TODO: return multiple locations with diagnostics. Can be a followup. type uniqueResourceKeys struct{} func (m *uniqueResourceKeys) Name() string { return "validate:unique_resource_keys" } -func conflictingResourceKeysErr(key string, p1 dyn.Path, l1 dyn.Location, p2 dyn.Path, l2 dyn.Location) diag.Diagnostic { - return diag.Diagnostic{ - Severity: diag.Error, - Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s (%s at %s, %s at %s)", key, p1, l1, p2, l2), - Location: l1, - } -} - func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { diags := diag.Diagnostics{} - type r struct { - p dyn.Path - l dyn.Location - } - seenResource := make(map[string]r) + // Map of resource key to the paths and locations the resource is defined at. + paths := map[string][]dyn.Path{} + locations := map[string][]dyn.Location{} - seenResources := make(map[string]dyn.Location) _, err := dyn.Walk(rb.Config().Value().Get("resources"), func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // The path is expected to be of length 2, and of the form .. // Eg: jobs.my_job, pipelines.my_pipeline, etc. @@ -49,39 +37,36 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle return v, dyn.ErrSkip } - // Each resource should be completely defined in a single YAML file. We - // do not allow users to split the definition of a single resource across - // multiple files. - // Users can use simple / complex variables to modularize their configuration. - if locations := v.Locations(); len(locations) >= 2 { - diags = append(diags, conflictingResourceKeysErr(p[1].Key(), p, locations[0], p, locations[1])) - } - - // l, ok := seenResources[p[1].Key()] - // if ok { - // diags = append(diags, conflictingResourceKeysErr(p[1].Key(), p, l, p, v.Locations()[0])) - // } else { - // seenResources[p[1].Key()] = v.Locations()[0] - // } + // The key for the resource. Eg: "my_job" for jobs.my_job. + k := p[1].Key() - // // key for the source. Eg: "my_job" for jobs.my_job. - // key := p[1].Key() - // info, ok := seenResources[key] - - // for _, l := range v.Locations() { - // info, ok := seenResources[p[1].Key()] - // if !ok { - // seenResources[p[1].Key()] = resourceInfo{p, l} - // continue - // } - - // diags = append(diags) - // } + paths[k] = append(paths[k], p) + locations[k] = append(locations[k], v.Locations()...) return v, nil }) if err != nil { diags = append(diags, diag.FromErr(err)...) } + for k, ps := range paths { + if len(ps) <= 1 { + continue + } + + // TODO: What happens on target overrides? Ensure they do not misbehave. + // 1. What was the previous behaviour for target overrides? + // 2. What if a completely new resource with a conflicting key is defined + // in a target override. + // + // If there are multiple resources with the same key, report an error. + // NOTE: This includes if the same resource is defined in multiple files as + // TODO: continue this comment. + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s", k), + Locations: locations[k], + Paths: ps, + }) + } return diags } From a1f1f5c21d0596f09dcfeebafd2b38227723dcf0 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 23 Jul 2024 19:13:03 +0200 Subject: [PATCH 22/75] inline code to render paths in template --- bundle/render/render_text_output.go | 28 ++++++---------------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index e32b9afd97..43870f1358 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -9,29 +9,10 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" - "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/iam" "github.com/fatih/color" ) -func printPaths(paths []dyn.Path) string { - res := strings.Builder{} - - first := true - for _, p := range paths { - res.WriteString("\n") - if first { - res.WriteString(" at ") - first = false - } else { - res.WriteString(" ") - } - - res.WriteString(color.GreenString(p.String())) - } - return res.String() -} - var renderFuncMap = template.FuncMap{ "red": color.RedString, "green": color.GreenString, @@ -45,11 +26,12 @@ var renderFuncMap = template.FuncMap{ "italic": func(format string, a ...interface{}) string { return color.New(color.Italic).Sprintf(format, a...) }, - "printPaths": printPaths, } const errorTemplate = `{{ "Error" | red }}: {{ .Summary }} -{{- printPaths .Paths -}} +{{- range $index, $element := .Paths }} + {{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }} +{{- end }} {{- if .Location.File }} {{ "in " }}{{ .Location.String | cyan }} {{- end }} @@ -61,7 +43,9 @@ const errorTemplate = `{{ "Error" | red }}: {{ .Summary }} ` const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }} -{{- printPaths .Paths -}} +{{- range $index, $element := .Paths }} + {{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }} +{{- end }} {{- if .Location.File }} {{ "in " }}{{ .Location.String | cyan }} {{- end }} From 24338e250276e81addac7871f73e19de956bfcee Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 24 Jul 2024 13:22:53 +0200 Subject: [PATCH 23/75] fmt --- .../mutator/python/python_diagnostics.go | 2 +- bundle/config/mutator/run_as.go | 2 +- bundle/config/validate/files_to_sync.go | 2 +- .../validate/job_cluster_key_defined.go | 2 +- .../config/validate/validate_sync_patterns.go | 2 +- libs/dyn/convert/normalize.go | 14 ++--- libs/dyn/convert/normalize_test.go | 60 +++++++++---------- 7 files changed, 42 insertions(+), 42 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go index ac59e6f1f1..12822065bb 100644 --- a/bundle/config/mutator/python/python_diagnostics.go +++ b/bundle/config/mutator/python/python_diagnostics.go @@ -70,7 +70,7 @@ func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) { Summary: parsedLine.Summary, Detail: parsedLine.Detail, Locations: locations, - Paths: paths, + Paths: paths, } diags = diags.Append(diag) diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index 8ad27b87c9..423bc38e2d 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -180,7 +180,7 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { { Severity: diag.Warning, Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", - Paths: []dyn.Path{dyn.MustPathFromString("experimental.use_legacy_run_as")}, + Paths: []dyn.Path{dyn.MustPathFromString("experimental.use_legacy_run_as")}, Locations: b.Config.GetLocations("experimental.use_legacy_run_as"), }, } diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index ad28f31d59..7cdad772ac 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -49,7 +49,7 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. // Show all locations where sync.exclude is defined, since merging // sync.exclude is additive. Locations: loc.Locations(), - Paths: []dyn.Path{loc.Path()}, + Paths: []dyn.Path{loc.Path()}, }) } diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index b3e2cc36f8..368c3edb13 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -46,7 +46,7 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund // Other associated locations are not relevant since they are // overridden during merging. Locations: []dyn.Location{loc.Location()}, - Paths: []dyn.Path{loc.Path()}, + Paths: []dyn.Path{loc.Path()}, }) } } diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index 628cd132ce..573077b66e 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -68,7 +68,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di Severity: diag.Warning, Summary: fmt.Sprintf("Pattern %s does not match any files", p), Locations: []dyn.Location{loc.Location()}, - Paths: []dyn.Path{loc.Path()}, + Paths: []dyn.Path{loc.Path()}, }) mu.Unlock() } diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index b62766bf80..c80a914f14 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -68,7 +68,7 @@ func nullWarning(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnosti Severity: diag.Warning, Summary: fmt.Sprintf("expected a %s value, found null", expected), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, } } @@ -77,7 +77,7 @@ func typeMismatch(expected dyn.Kind, src dyn.Value, path dyn.Path) diag.Diagnost Severity: diag.Warning, Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, } } @@ -100,7 +100,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen Summary: fmt.Sprintf("unknown field: %s", pk.MustString()), // Show all locations the unknown field is defined at. Locations: pk.Locations(), - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, }) } continue @@ -324,7 +324,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%g" as integer due to precision loss`, src.MustFloat()), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, }) } case dyn.KindString: @@ -340,7 +340,7 @@ func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value, path dyn Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, }) } case dyn.KindNil: @@ -367,7 +367,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d Severity: diag.Warning, Summary: fmt.Sprintf(`cannot accurately represent "%d" as floating point number due to precision loss`, src.MustInt()), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, }) } case dyn.KindString: @@ -383,7 +383,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d Severity: diag.Warning, Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), Locations: []dyn.Location{src.Location()}, - Paths: []dyn.Path{path}, + Paths: []dyn.Path{path}, }) } case dyn.KindNil: diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index b75dffd2ab..c2256615e9 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -40,10 +40,10 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, + Severity: diag.Warning, + Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -79,7 +79,7 @@ func TestNormalizeStructUnknownField(t *testing.T) { {File: "hello.yaml", Line: 1, Column: 1}, {File: "world.yaml", Line: 2, Column: 2}, }, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) // The field that can be mapped to the struct field is retained. @@ -113,7 +113,7 @@ func TestNormalizeStructError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Get("foo").Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -258,7 +258,7 @@ func TestNormalizeStructRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -275,7 +275,7 @@ func TestNormalizeStructIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -304,7 +304,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -330,7 +330,7 @@ func TestNormalizeMapError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -385,7 +385,7 @@ func TestNormalizeMapRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -398,7 +398,7 @@ func TestNormalizeMapIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -425,10 +425,10 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, + Severity: diag.Warning, + Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Index(2))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Index(2))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -452,7 +452,7 @@ func TestNormalizeSliceError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -507,7 +507,7 @@ func TestNormalizeSliceRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -520,7 +520,7 @@ func TestNormalizeSliceIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -541,7 +541,7 @@ func TestNormalizeStringNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a string value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -578,7 +578,7 @@ func TestNormalizeStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -599,7 +599,7 @@ func TestNormalizeBoolNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a bool value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -641,7 +641,7 @@ func TestNormalizeBoolFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -654,7 +654,7 @@ func TestNormalizeBoolError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -675,7 +675,7 @@ func TestNormalizeIntNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a int value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -696,7 +696,7 @@ func TestNormalizeIntFromFloatError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "1.5" as integer due to precision loss`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -725,7 +725,7 @@ func TestNormalizeIntFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as an integer`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -738,7 +738,7 @@ func TestNormalizeIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected int, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -759,7 +759,7 @@ func TestNormalizeFloatNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a float value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -784,7 +784,7 @@ func TestNormalizeFloatFromIntError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -813,7 +813,7 @@ func TestNormalizeFloatFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as a floating point number`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -826,7 +826,7 @@ func TestNormalizeFloatError(t *testing.T) { Severity: diag.Warning, Summary: `expected float, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } From be5361c3d823809c85273ea6a16b4cd92076e723 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 24 Jul 2024 15:29:03 +0200 Subject: [PATCH 24/75] enforce sorted mapping walks by default, and get the tests ready --- bundle/config/validate/pre_initialize.go | 28 ----- .../config/validate/unique_resource_keys.go | 60 +++++----- bundle/phases/initialize.go | 2 - bundle/tests/validate_test.go | 110 ++++++++++++++---- libs/dyn/mapping.go | 11 +- libs/dyn/walk.go | 4 +- 6 files changed, 127 insertions(+), 88 deletions(-) delete mode 100644 bundle/config/validate/pre_initialize.go diff --git a/bundle/config/validate/pre_initialize.go b/bundle/config/validate/pre_initialize.go deleted file mode 100644 index d18050b818..0000000000 --- a/bundle/config/validate/pre_initialize.go +++ /dev/null @@ -1,28 +0,0 @@ -package validate - -import ( - "context" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/diag" -) - -type preInitialize struct{} - -// Apply implements bundle.Mutator. -func (v *preInitialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), bundle.Parallel( - UniqueResourceKeys(), - )) -} - -// Name implements bundle.Mutator. -func (v *preInitialize) Name() string { - return "validate:pre_initialize" -} - -// Validations to perform before initialization of the bundle. These validations -// are thus applied for most bundle commands. -func PreInitialize() bundle.Mutator { - return &preInitialize{} -} diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index ff9eede057..71e2d8d922 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -3,70 +3,64 @@ package validate import ( "context" "fmt" + "slices" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" ) -func UniqueResourceKeys() bundle.ReadOnlyMutator { +func UniqueResourceKeys() bundle.Mutator { return &uniqueResourceKeys{} } -// TODO: Might need to enforce sorted walk on dyn.Walk +// TODO: Comment why this mutator needs to be run before target overrides. type uniqueResourceKeys struct{} func (m *uniqueResourceKeys) Name() string { return "validate:unique_resource_keys" } -func (m *uniqueResourceKeys) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} - // Map of resource key to the paths and locations the resource is defined at. - paths := map[string][]dyn.Path{} - locations := map[string][]dyn.Location{} + // Map of resource key to the pathsByKey and locations the resource is defined at. + pathsByKey := map[string][]dyn.Path{} + locationsByKey := map[string][]dyn.Location{} - _, err := dyn.Walk(rb.Config().Value().Get("resources"), func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - // The path is expected to be of length 2, and of the form .. - // Eg: jobs.my_job, pipelines.my_pipeline, etc. - if len(p) < 2 { - return v, nil - } - if len(p) > 2 { - return v, dyn.ErrSkip - } + // Gather the paths and locations of all resources. + // TODO: confirm MapByPattern behaves as I expect it to. + _, err := dyn.MapByPattern( + b.Config.Value().Get("resources"), + dyn.NewPattern(dyn.AnyKey(), dyn.AnyKey()), + func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + // The key for the resource. Eg: "my_job" for jobs.my_job. + k := p[1].Key() - // The key for the resource. Eg: "my_job" for jobs.my_job. - k := p[1].Key() + // dyn.Path under the hood is a slice. So, we need to clone it. + pathsByKey[k] = append(pathsByKey[k], slices.Clone(p)) - paths[k] = append(paths[k], p) - locations[k] = append(locations[k], v.Locations()...) - return v, nil - }) + locationsByKey[k] = append(locationsByKey[k], v.Locations()...) + return v, nil + }, + ) if err != nil { - diags = append(diags, diag.FromErr(err)...) + return diag.FromErr(err) } - for k, ps := range paths { - if len(ps) <= 1 { + for k, locations := range locationsByKey { + if len(locations) <= 1 { continue } - // TODO: What happens on target overrides? Ensure they do not misbehave. - // 1. What was the previous behaviour for target overrides? - // 2. What if a completely new resource with a conflicting key is defined - // in a target override. - // // If there are multiple resources with the same key, report an error. - // NOTE: This includes if the same resource is defined in multiple files as - // TODO: continue this comment. diags = append(diags, diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s", k), - Locations: locations[k], - Paths: ps, + Locations: locations, + Paths: pathsByKey[k], }) } + return diags } diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index a6eab67715..a32de2c561 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -5,7 +5,6 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" - "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/deploy/metadata" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/permissions" @@ -20,7 +19,6 @@ func Initialize() bundle.Mutator { return newPhase( "initialize", []bundle.Mutator{ - validate.PreInitialize(), mutator.RewriteSyncPaths(), mutator.MergeJobClusters(), mutator.MergeJobTasks(), diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go index f5da1024dd..f4bb4477a4 100644 --- a/bundle/tests/validate_test.go +++ b/bundle/tests/validate_test.go @@ -6,41 +6,107 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/validate" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestValidateUniqueResourceIdentifiers(t *testing.T) { tcases := []struct { - name string - errorMsg string + name string + diagnostics diag.Diagnostics }{ - // { - // name: "duplicate_resource_names_in_root_job_and_pipeline", - // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml:10:7, pipelines.foo at validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml:13:7)", - // }, - // { - // name: "duplicate_resource_names_in_root_job_and_experiment", - // errorMsg: "multiple resources named foo (experiments.foo at validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml:18:7, jobs.foo at validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml:10:7)", - // }, - // { - // name: "duplicate_resource_name_in_subconfiguration", - // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_name_in_subconfiguration/databricks.yml:13:7, pipelines.foo at validate/duplicate_resource_name_in_subconfiguration/resources.yml:4:7)", - // }, { - name: "duplicate_resource_name_in_subconfiguration_job_and_job", - errorMsg: "multiple resources have been defined with the same key: foo (jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml:4:7, jobs.foo at validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml:13:7)", + name: "duplicate_resource_names_in_root_job_and_pipeline", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml", Line: 10, Column: 7}, + {File: "validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml", Line: 13, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("jobs.foo"), + dyn.MustPathFromString("pipelines.foo"), + }, + }, + }, + }, + { + name: "duplicate_resource_names_in_root_job_and_experiment", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml", Line: 18, Column: 7}, + {File: "validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml", Line: 10, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("experiments.foo"), + dyn.MustPathFromString("jobs.foo"), + }, + }, + }, + }, + { + name: "duplicate_resource_name_in_subconfiguration", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_name_in_subconfiguration/databricks.yml", Line: 13, Column: 7}, + {File: "validate/duplicate_resource_name_in_subconfiguration/resources.yml", Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("jobs.foo"), + dyn.MustPathFromString("pipelines.foo"), + }, + }, + }, + }, + { + name: "duplicate_resource_name_in_subconfiguration_job_and_job", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml", Line: 13, Column: 7}, + {File: "validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml", Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("jobs.foo"), + }, + }, + }, + }, + { + name: "duplicate_resource_names_in_different_subconfiguations", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml", Line: 4, Column: 7}, + {File: "validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml", Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("jobs.foo"), + dyn.MustPathFromString("pipelines.foo"), + }, + }, + }, }, - // { - // name: "duplicate_resource_names_in_different_subconfiguations", - // errorMsg: "multiple resources named foo (jobs.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml:4:7, pipelines.foo at validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml:4:7)", - // }, } for _, tc := range tcases { t.Run(tc.name, func(t *testing.T) { b := load(t, "./validate/"+tc.name) - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.UniqueResourceKeys()) - assert.ErrorContains(t, diags.Error(), tc.errorMsg) + diags := bundle.Apply(context.Background(), b, validate.UniqueResourceKeys()) + assert.Equal(t, tc.diagnostics, diags) }) } } diff --git a/libs/dyn/mapping.go b/libs/dyn/mapping.go index 668f57ecc4..551571d18a 100644 --- a/libs/dyn/mapping.go +++ b/libs/dyn/mapping.go @@ -4,6 +4,7 @@ import ( "fmt" "maps" "slices" + "sort" ) // Pair represents a single key-value pair in a Mapping. @@ -46,9 +47,15 @@ func newMappingFromGoMap(vin map[string]Value) Mapping { return m } -// Pairs returns all the key-value pairs in the Mapping. +// Pairs returns all the key-value pairs in the Mapping. The pairs are sorted by +// their key in lexicographic order. func (m Mapping) Pairs() []Pair { - return m.pairs + pairs := make([]Pair, len(m.pairs)) + copy(pairs, m.pairs) + sort.Slice(pairs, func(i, j int) bool { + return pairs[i].Key.MustString() < pairs[j].Key.MustString() + }) + return pairs } // Len returns the number of key-value pairs in the Mapping. diff --git a/libs/dyn/walk.go b/libs/dyn/walk.go index c51a11e22c..ad651d5cd7 100644 --- a/libs/dyn/walk.go +++ b/libs/dyn/walk.go @@ -1,6 +1,8 @@ package dyn -import "errors" +import ( + "errors" +) // WalkValueFunc is the type of the function called by Walk to traverse the configuration tree. type WalkValueFunc func(p Path, v Value) (Value, error) From ef2b64d207f3cc7cced1f9c7d82c720eff275f4f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 24 Jul 2024 15:49:03 +0200 Subject: [PATCH 25/75] add comment for the validation mutator --- bundle/config/mutator/mutator.go | 5 +++++ bundle/config/validate/unique_resource_keys.go | 15 +++++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 52f85eeb8e..0458beff44 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/loader" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" + "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/scripts" ) @@ -26,5 +27,9 @@ func DefaultMutators() []bundle.Mutator { DefineDefaultTarget(), LoadGitDetails(), pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseLoad), + + // Note: This mutator must run before the target overrides are merged. + // See the mutator for more details. + validate.UniqueResourceKeys(), } } diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 71e2d8d922..1e1b1eb4e3 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -10,11 +10,23 @@ import ( "github.com/databricks/cli/libs/dyn" ) +// This mutator validates that: +// +// 1. Each resource key is unique across different resource types. No two resources +// of the same type can have the same key. This is because command like "bundle run" +// rely on the resource key to identify the resource to run. +// Eg: jobs.foo and pipelines.foo are not allowed simultaneously. +// +// 2. Each resource definition is contained within a single file, and is not spread +// across multiple files. Note: This is not applicable to resource configuration +// defined in a target override. That is why this mutator MUST run before the target +// overrides are merged. +// +// TODO: Ensure adequate test coverage. func UniqueResourceKeys() bundle.Mutator { return &uniqueResourceKeys{} } -// TODO: Comment why this mutator needs to be run before target overrides. type uniqueResourceKeys struct{} func (m *uniqueResourceKeys) Name() string { @@ -29,7 +41,6 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.D locationsByKey := map[string][]dyn.Location{} // Gather the paths and locations of all resources. - // TODO: confirm MapByPattern behaves as I expect it to. _, err := dyn.MapByPattern( b.Config.Value().Get("resources"), dyn.NewPattern(dyn.AnyKey(), dyn.AnyKey()), From 76ed78d6182aa6677e93fe9f5ab01db1e58487b4 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 24 Jul 2024 18:18:24 +0200 Subject: [PATCH 26/75] coverage for the multiple resources case --- .../config/validate/unique_resource_keys.go | 2 -- .../databricks.yml | 13 ++++++++ .../resources1.yml | 8 +++++ .../resources2.yml | 8 +++++ bundle/tests/validate_test.go | 32 +++++++++++++++++-- 5 files changed, 58 insertions(+), 5 deletions(-) create mode 100644 bundle/tests/validate/duplicate_resource_name_in_multiple_locations/databricks.yml create mode 100644 bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources1.yml create mode 100644 bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources2.yml diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 1e1b1eb4e3..03b4ebe264 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -21,8 +21,6 @@ import ( // across multiple files. Note: This is not applicable to resource configuration // defined in a target override. That is why this mutator MUST run before the target // overrides are merged. -// -// TODO: Ensure adequate test coverage. func UniqueResourceKeys() bundle.Mutator { return &uniqueResourceKeys{} } diff --git a/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/databricks.yml b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/databricks.yml new file mode 100644 index 0000000000..ebb1f90053 --- /dev/null +++ b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/databricks.yml @@ -0,0 +1,13 @@ +bundle: + name: test + +workspace: + profile: test + +include: + - ./*.yml + +resources: + jobs: + foo: + name: job foo 1 diff --git a/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources1.yml b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources1.yml new file mode 100644 index 0000000000..deb81caa1c --- /dev/null +++ b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources1.yml @@ -0,0 +1,8 @@ +resources: + jobs: + foo: + name: job foo 2 + + pipelines: + foo: + name: pipeline foo diff --git a/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources2.yml b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources2.yml new file mode 100644 index 0000000000..4e0a342b30 --- /dev/null +++ b/bundle/tests/validate/duplicate_resource_name_in_multiple_locations/resources2.yml @@ -0,0 +1,8 @@ +resources: + jobs: + foo: + name: job foo 3 + + experiments: + foo: + name: experiment foo diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go index f4bb4477a4..a00827b639 100644 --- a/bundle/tests/validate_test.go +++ b/bundle/tests/validate_test.go @@ -5,10 +5,11 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/validate" + "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestValidateUniqueResourceIdentifiers(t *testing.T) { @@ -100,12 +101,37 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { }, }, }, + { + name: "duplicate_resource_name_in_multiple_locations", + diagnostics: diag.Diagnostics{ + { + Severity: diag.Error, + Summary: "multiple resources have been defined with the same key: foo", + Locations: []dyn.Location{ + {File: "validate/duplicate_resource_name_in_multiple_locations/resources2.yml", Line: 8, Column: 7}, + {File: "validate/duplicate_resource_name_in_multiple_locations/databricks.yml", Line: 13, Column: 7}, + {File: "validate/duplicate_resource_name_in_multiple_locations/resources1.yml", Line: 4, Column: 7}, + {File: "validate/duplicate_resource_name_in_multiple_locations/resources2.yml", Line: 4, Column: 7}, + {File: "validate/duplicate_resource_name_in_multiple_locations/resources1.yml", Line: 8, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("experiments.foo"), + dyn.MustPathFromString("jobs.foo"), + dyn.MustPathFromString("pipelines.foo"), + }, + }, + }, + }, } for _, tc := range tcases { t.Run(tc.name, func(t *testing.T) { - b := load(t, "./validate/"+tc.name) - diags := bundle.Apply(context.Background(), b, validate.UniqueResourceKeys()) + ctx := context.Background() + b, err := bundle.Load(ctx, "./validate/"+tc.name) + require.NoError(t, err) + + // The UniqueResourceKeys mutator is run as part of the Load phase. + diags := bundle.Apply(ctx, b, phases.Load()) assert.Equal(t, tc.diagnostics, diags) }) } From 4f0aaf07c86b58a2b145ca689ed28ef07d3f0602 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 25 Jul 2024 14:14:35 +0200 Subject: [PATCH 27/75] fmt --- .../mutator/python/python_diagnostics_test.go | 6 +- bundle/config/mutator/run_as.go | 6 +- libs/dyn/convert/normalize_test.go | 60 +++++++++---------- 3 files changed, 36 insertions(+), 36 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 1358ed412a..2eb6380c1a 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -54,9 +54,9 @@ func TestParsePythonDiagnostics(t *testing.T) { input: `{"severity": "error", "summary": "error summary", "path": "resources.jobs.job0.name"}`, expected: diag.Diagnostics{ { - Severity: diag.Error, - Summary: "error summary", - Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, + Severity: diag.Error, + Summary: "error summary", + Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, Locations: []dyn.Location{{}}, }, }, diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index a37bd52379..423bc38e2d 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -178,9 +178,9 @@ func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { setRunAsForJobs(b) return diag.Diagnostics{ { - Severity: diag.Warning, - Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", - Paths: []dyn.Path{dyn.MustPathFromString("experimental.use_legacy_run_as")}, + Severity: diag.Warning, + Summary: "You are using the legacy mode of run_as. The support for this mode is experimental and might be removed in a future release of the CLI. In order to run the DLT pipelines in your DAB as the run_as user this mode changes the owners of the pipelines to the run_as identity, which requires the user deploying the bundle to be a workspace admin, and also a Metastore admin if the pipeline target is in UC.", + Paths: []dyn.Path{dyn.MustPathFromString("experimental.use_legacy_run_as")}, Locations: b.Config.GetLocations("experimental.use_legacy_run_as"), }, } diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index b75dffd2ab..c2256615e9 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -40,10 +40,10 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, + Severity: diag.Warning, + Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -79,7 +79,7 @@ func TestNormalizeStructUnknownField(t *testing.T) { {File: "hello.yaml", Line: 1, Column: 1}, {File: "world.yaml", Line: 2, Column: 2}, }, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) // The field that can be mapped to the struct field is retained. @@ -113,7 +113,7 @@ func TestNormalizeStructError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Get("foo").Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -258,7 +258,7 @@ func TestNormalizeStructRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -275,7 +275,7 @@ func TestNormalizeStructIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -304,7 +304,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -330,7 +330,7 @@ func TestNormalizeMapError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -385,7 +385,7 @@ func TestNormalizeMapRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -398,7 +398,7 @@ func TestNormalizeMapIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected map, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -425,10 +425,10 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { vout, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ - Severity: diag.Warning, - Summary: `expected string, found map`, + Severity: diag.Warning, + Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.NewPath(dyn.Index(2))}, + Paths: []dyn.Path{dyn.NewPath(dyn.Index(2))}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -452,7 +452,7 @@ func TestNormalizeSliceError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -507,7 +507,7 @@ func TestNormalizeSliceRandomStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -520,7 +520,7 @@ func TestNormalizeSliceIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected sequence, found int`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -541,7 +541,7 @@ func TestNormalizeStringNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a string value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -578,7 +578,7 @@ func TestNormalizeStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected string, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -599,7 +599,7 @@ func TestNormalizeBoolNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a bool value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -641,7 +641,7 @@ func TestNormalizeBoolFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found string`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -654,7 +654,7 @@ func TestNormalizeBoolError(t *testing.T) { Severity: diag.Warning, Summary: `expected bool, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -675,7 +675,7 @@ func TestNormalizeIntNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a int value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -696,7 +696,7 @@ func TestNormalizeIntFromFloatError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "1.5" as integer due to precision loss`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -725,7 +725,7 @@ func TestNormalizeIntFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as an integer`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -738,7 +738,7 @@ func TestNormalizeIntError(t *testing.T) { Severity: diag.Warning, Summary: `expected int, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -759,7 +759,7 @@ func TestNormalizeFloatNil(t *testing.T) { Severity: diag.Warning, Summary: `expected a float value, found null`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -784,7 +784,7 @@ func TestNormalizeFloatFromIntError(t *testing.T) { Severity: diag.Warning, Summary: `cannot accurately represent "9007199254740993" as floating point number due to precision loss`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -813,7 +813,7 @@ func TestNormalizeFloatFromStringError(t *testing.T) { Severity: diag.Warning, Summary: `cannot parse "abc" as a floating point number`, Locations: []dyn.Location{vin.Location()}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } @@ -826,7 +826,7 @@ func TestNormalizeFloatError(t *testing.T) { Severity: diag.Warning, Summary: `expected float, found map`, Locations: []dyn.Location{{}}, - Paths: []dyn.Path{dyn.EmptyPath}, + Paths: []dyn.Path{dyn.EmptyPath}, }, err[0]) } From b4c937943e7738e79a0bc33f8faf451631cb0006 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 25 Jul 2024 14:19:13 +0200 Subject: [PATCH 28/75] cleanup --- bundle/config/mutator/python/python_diagnostics_test.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 2eb6380c1a..0e9b411999 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -57,7 +57,6 @@ func TestParsePythonDiagnostics(t *testing.T) { Severity: diag.Error, Summary: "error summary", Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, - Locations: []dyn.Location{{}}, }, }, }, @@ -79,7 +78,6 @@ func TestParsePythonDiagnostics(t *testing.T) { Severity: diag.Warning, Summary: "warning summary", Detail: "warning detail", - Locations: []dyn.Location{{}}, }, }, }, @@ -91,12 +89,10 @@ func TestParsePythonDiagnostics(t *testing.T) { { Severity: diag.Error, Summary: "error summary (1)", - Locations: []dyn.Location{{}}, }, { Severity: diag.Error, Summary: "error summary (2)", - Locations: []dyn.Location{{}}, }, }, }, From ee68e78a1571c2b05adc1b9f9c154f6351aaaadf Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 25 Jul 2024 14:19:47 +0200 Subject: [PATCH 29/75] - --- .../mutator/python/python_diagnostics_test.go | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index 0e9b411999..b73b0f73cd 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -54,9 +54,9 @@ func TestParsePythonDiagnostics(t *testing.T) { input: `{"severity": "error", "summary": "error summary", "path": "resources.jobs.job0.name"}`, expected: diag.Diagnostics{ { - Severity: diag.Error, - Summary: "error summary", - Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, + Severity: diag.Error, + Summary: "error summary", + Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.job0.name")}, }, }, }, @@ -75,9 +75,9 @@ func TestParsePythonDiagnostics(t *testing.T) { input: `{"severity": "warning", "summary": "warning summary", "detail": "warning detail"}`, expected: diag.Diagnostics{ { - Severity: diag.Warning, - Summary: "warning summary", - Detail: "warning detail", + Severity: diag.Warning, + Summary: "warning summary", + Detail: "warning detail", }, }, }, @@ -87,12 +87,12 @@ func TestParsePythonDiagnostics(t *testing.T) { `{"severity": "error", "summary": "error summary (2)"}`, expected: diag.Diagnostics{ { - Severity: diag.Error, - Summary: "error summary (1)", + Severity: diag.Error, + Summary: "error summary (1)", }, { - Severity: diag.Error, - Summary: "error summary (2)", + Severity: diag.Error, + Summary: "error summary (2)", }, }, }, From 0c2238bd0fe2659102aa9f4491848cef669622b6 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 25 Jul 2024 17:52:58 +0200 Subject: [PATCH 30/75] nit --- libs/dyn/walk.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/libs/dyn/walk.go b/libs/dyn/walk.go index ad651d5cd7..c51a11e22c 100644 --- a/libs/dyn/walk.go +++ b/libs/dyn/walk.go @@ -1,8 +1,6 @@ package dyn -import ( - "errors" -) +import "errors" // WalkValueFunc is the type of the function called by Walk to traverse the configuration tree. type WalkValueFunc func(p Path, v Value) (Value, error) From 04b270b02aeb430f520d64b178c9f794860a0ae0 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 13:32:11 +0200 Subject: [PATCH 31/75] remove unnecessary unit tests --- bundle/tests/conflicting_resource_ids_test.go | 42 ------------------- 1 file changed, 42 deletions(-) delete mode 100644 bundle/tests/conflicting_resource_ids_test.go diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go deleted file mode 100644 index e7f0aa28f2..0000000000 --- a/bundle/tests/conflicting_resource_ids_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package config_tests - -import ( - "context" - "fmt" - "path/filepath" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/phases" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestConflictingResourceIdsNoSubconfig(t *testing.T) { - ctx := context.Background() - b, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations") - require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.Load()) - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml") - assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) -} - -func TestConflictingResourceIdsOneSubconfig(t *testing.T) { - ctx := context.Background() - b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration") - require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.Load()) - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") - resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") - assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) -} - -func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) { - ctx := context.Background() - b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations") - require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.Load()) - resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml") - resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml") - assert.ErrorContains(t, diags.Error(), fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) -} From 29cad8c8e945598e400afd3d5a6ec291525e8e17 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 16:11:26 +0200 Subject: [PATCH 32/75] fix basic test --- bundle/config/validate/unique_resource_keys.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 03b4ebe264..32365f5900 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -38,9 +38,16 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.D pathsByKey := map[string][]dyn.Path{} locationsByKey := map[string][]dyn.Location{} + rv := b.Config.Value().Get("resources") + + // return early if no resources are defined or the resources block is empty. + if rv.Kind() == dyn.KindInvalid || rv.Kind() == dyn.KindNil { + return diags + } + // Gather the paths and locations of all resources. _, err := dyn.MapByPattern( - b.Config.Value().Get("resources"), + rv, dyn.NewPattern(dyn.AnyKey(), dyn.AnyKey()), func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // The key for the resource. Eg: "my_job" for jobs.my_job. From f1ffa59de81e6539674f6d39c92c039830104223 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 16:23:32 +0200 Subject: [PATCH 33/75] some cleanup --- bundle/config/validate/unique_resource_keys.go | 2 +- .../resources.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index 32365f5900..742ec12f60 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -34,7 +34,7 @@ func (m *uniqueResourceKeys) Name() string { func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} - // Map of resource key to the pathsByKey and locations the resource is defined at. + // Maps of resource key to the paths and locations the resource is defined at. pathsByKey := map[string][]dyn.Path{} locationsByKey := map[string][]dyn.Location{} diff --git a/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml index f644bc848b..83fb75735c 100644 --- a/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml +++ b/bundle/tests/validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml @@ -1,4 +1,4 @@ resources: jobs: foo: - name: pipeline foo + name: job foo 2 From 8298f7dffdd240071585a973cbf2dbc54dde6f4d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 16:31:41 +0200 Subject: [PATCH 34/75] fix windows tests --- bundle/tests/validate_test.go | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go index a00827b639..be78a25f67 100644 --- a/bundle/tests/validate_test.go +++ b/bundle/tests/validate_test.go @@ -2,6 +2,7 @@ package config_tests import ( "context" + "path/filepath" "testing" "github.com/databricks/cli/bundle" @@ -24,8 +25,8 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml", Line: 10, Column: 7}, - {File: "validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml", Line: 13, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml"), Line: 10, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_root_job_and_pipeline/databricks.yml"), Line: 13, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("jobs.foo"), @@ -41,8 +42,8 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml", Line: 18, Column: 7}, - {File: "validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml", Line: 10, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml"), Line: 18, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_root_job_and_experiment/databricks.yml"), Line: 10, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("experiments.foo"), @@ -58,8 +59,8 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_name_in_subconfiguration/databricks.yml", Line: 13, Column: 7}, - {File: "validate/duplicate_resource_name_in_subconfiguration/resources.yml", Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_subconfiguration/databricks.yml"), Line: 13, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_subconfiguration/resources.yml"), Line: 4, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("jobs.foo"), @@ -75,8 +76,8 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml", Line: 13, Column: 7}, - {File: "validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml", Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_subconfiguration_job_and_job/databricks.yml"), Line: 13, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_subconfiguration_job_and_job/resources.yml"), Line: 4, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("jobs.foo"), @@ -91,8 +92,8 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml", Line: 4, Column: 7}, - {File: "validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml", Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_different_subconfiguations/resources1.yml"), Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_names_in_different_subconfiguations/resources2.yml"), Line: 4, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("jobs.foo"), @@ -108,11 +109,11 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { Severity: diag.Error, Summary: "multiple resources have been defined with the same key: foo", Locations: []dyn.Location{ - {File: "validate/duplicate_resource_name_in_multiple_locations/resources2.yml", Line: 8, Column: 7}, - {File: "validate/duplicate_resource_name_in_multiple_locations/databricks.yml", Line: 13, Column: 7}, - {File: "validate/duplicate_resource_name_in_multiple_locations/resources1.yml", Line: 4, Column: 7}, - {File: "validate/duplicate_resource_name_in_multiple_locations/resources2.yml", Line: 4, Column: 7}, - {File: "validate/duplicate_resource_name_in_multiple_locations/resources1.yml", Line: 8, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_multiple_locations/resources2.yml"), Line: 8, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_multiple_locations/databricks.yml"), Line: 13, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_multiple_locations/resources1.yml"), Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_multiple_locations/resources2.yml"), Line: 4, Column: 7}, + {File: filepath.FromSlash("validate/duplicate_resource_name_in_multiple_locations/resources1.yml"), Line: 8, Column: 7}, }, Paths: []dyn.Path{ dyn.MustPathFromString("experiments.foo"), From 5e687c29f2f09db8961d38a949b3835f9d2b98b7 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 2 May 2024 18:39:01 +0200 Subject: [PATCH 35/75] getting started scaffold --- bundle/config/resources.go | 1 + bundle/config/resources/schema.go | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 bundle/config/resources/schema.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 062e38ed51..6c7a927f26 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -18,6 +18,7 @@ type Resources struct { ModelServingEndpoints map[string]*resources.ModelServingEndpoint `json:"model_serving_endpoints,omitempty"` RegisteredModels map[string]*resources.RegisteredModel `json:"registered_models,omitempty"` QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"` + Schemas map[string]*resources.Schema `json:"schemas,omitempty"` } type resource struct { diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go new file mode 100644 index 0000000000..6d688add66 --- /dev/null +++ b/bundle/config/resources/schema.go @@ -0,0 +1,20 @@ +package resources + +import ( + "github.com/databricks/databricks-sdk-go/service/catalog" +) + +type Schema struct { + // List of grants to apply on this schema. + Grants []Grant `json:"grants,omitempty"` + + // This represents the id which is the full name of the schema + // (catalog_name.schema_name) that can be used + // as a reference in other resources. This value is returned by terraform. + // TODO: verify the accuracy of this comment, it just might be the schema name + ID string `json:"id,omitempty" bundle:"readonly"` + + *catalog.CreateSchema + + ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` +} From 1b5c6a7fbb44ed7e2aed197ab6376d0872f4581f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 2 May 2024 20:01:17 +0200 Subject: [PATCH 36/75] added more conversion --- bundle/config/mutator/run_as_test.go | 2 ++ bundle/deploy/terraform/convert.go | 30 ++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/bundle/config/mutator/run_as_test.go b/bundle/config/mutator/run_as_test.go index 67bf7bcc2a..e6cef9ba45 100644 --- a/bundle/config/mutator/run_as_test.go +++ b/bundle/config/mutator/run_as_test.go @@ -39,6 +39,7 @@ func allResourceTypes(t *testing.T) []string { "pipelines", "quality_monitors", "registered_models", + "schemas", }, resourceTypes, ) @@ -136,6 +137,7 @@ func TestRunAsErrorForUnsupportedResources(t *testing.T) { "models", "registered_models", "experiments", + "schemas", } base := config.Root{ diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index a6ec04d9a2..6b38c9c684 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -229,6 +229,20 @@ func BundleToTerraform(config *config.Root) *schema.Root { tfroot.Resource.QualityMonitor[k] = &dst } + for k, src := range config.Resources.Schemas { + noResources = false + var dst schema.ResourceSchema + conv(src, &dst) + tfroot.Resource.Schema[k] = &dst + + // Configure permissions for this resource. + if rp := convGrants(src.Grants); rp != nil { + // TODO: test that this works to allocate grants. + rp.Schema = fmt.Sprintf("${databricks_schema.%s.id}", k) + tfroot.Resource.Grants["schema_"+k] = rp + } + } + // We explicitly set "resource" to nil to omit it from a JSON encoding. // This is required because the terraform CLI requires >= 1 resources defined // if the "resource" property is used in a .tf.json file. @@ -382,6 +396,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { } cur.ID = instance.Attributes.ID config.Resources.QualityMonitors[resource.Name] = cur + case "databricks_schema": + if config.Resources.Schemas == nil { + config.Resources.Schemas = make(map[string]*resources.Schema) + } + cur := config.Resources.Schemas[resource.Name] + if cur == nil { + cur = &resources.Schema{ModifiedStatus: resources.ModifiedStatusDeleted} + } + cur.ID = instance.Attributes.ID + config.Resources.Schemas[resource.Name] = cur case "databricks_permissions": case "databricks_grants": // Ignore; no need to pull these back into the configuration. @@ -426,6 +450,12 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { src.ModifiedStatus = resources.ModifiedStatusCreated } } + // TODO: Add test for this. + for _, src := range config.Resources.Schemas { + if src.ModifiedStatus == "" && src.ID == "" { + src.ModifiedStatus = resources.ModifiedStatusCreated + } + } return nil } From abd892405fc6f3b6f8af2be5e2471e5a258dec29 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 3 May 2024 16:08:09 +0200 Subject: [PATCH 37/75] add conversion for schemas --- bundle/deploy/terraform/convert.go | 6 +- bundle/deploy/terraform/convert_test.go | 57 ++++++++++++++++ .../deploy/terraform/tfdyn/convert_schema.go | 45 +++++++++++++ .../terraform/tfdyn/convert_schema_test.go | 66 +++++++++++++++++++ 4 files changed, 172 insertions(+), 2 deletions(-) create mode 100644 bundle/deploy/terraform/tfdyn/convert_schema.go create mode 100644 bundle/deploy/terraform/tfdyn/convert_schema_test.go diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 6b38c9c684..54cf789914 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -66,8 +66,10 @@ func convGrants(acl []resources.Grant) *schema.ResourceGrants { // BundleToTerraform converts resources in a bundle configuration // to the equivalent Terraform JSON representation. // -// NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO -// CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT. +// Note: This function is an older implementation of the conversion logic. It is +// no longer used in any code paths and is kept around to be used in tests. +// New resources do not need to modify this functions, and can instead use the +// new tfdyn package to define the conversion logic. func BundleToTerraform(config *config.Root) *schema.Root { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 7ea4485388..e4ef6114a9 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -655,6 +655,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) { {Attributes: stateInstanceAttributes{ID: "1"}}, }, }, + { + Type: "databricks_schema", + Mode: "managed", + Name: "test_schema", + Instances: []stateResourceInstance{ + {Attributes: stateInstanceAttributes{ID: "1"}}, + }, + }, }, } err := TerraformToBundle(&tfState, &config) @@ -681,6 +689,9 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) { assert.Equal(t, "1", config.Resources.QualityMonitors["test_monitor"].ID) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.QualityMonitors["test_monitor"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Schemas["test_schema"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Schemas["test_schema"].ModifiedStatus) + AssertFullResourceCoverage(t, &config) } @@ -736,6 +747,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { }, }, }, + Schemas: map[string]*resources.Schema{ + "test_schema": { + CreateSchema: &catalog.CreateSchema{ + Name: "test_schema", + }, + }, + }, }, } var tfState = resourcesState{ @@ -765,6 +783,9 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { assert.Equal(t, "", config.Resources.QualityMonitors["test_monitor"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.QualityMonitors["test_monitor"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Schemas["test_schema"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Schemas["test_schema"].ModifiedStatus) + AssertFullResourceCoverage(t, &config) } @@ -855,6 +876,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { }, }, }, + Schemas: map[string]*resources.Schema{ + "test_schema": { + CreateSchema: &catalog.CreateSchema{ + Name: "test_schema", + }, + }, + "test_schema_new": { + CreateSchema: &catalog.CreateSchema{ + Name: "test_schema_new", + }, + }, + }, }, } var tfState = resourcesState{ @@ -971,6 +1004,22 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { {Attributes: stateInstanceAttributes{ID: "test_monitor_old"}}, }, }, + { + Type: "databricks_schema", + Mode: "managed", + Name: "test_schema", + Instances: []stateResourceInstance{ + {Attributes: stateInstanceAttributes{ID: "1"}}, + }, + }, + { + Type: "databricks_schema", + Mode: "managed", + Name: "test_schema_old", + Instances: []stateResourceInstance{ + {Attributes: stateInstanceAttributes{ID: "2"}}, + }, + }, }, } err := TerraformToBundle(&tfState, &config) @@ -1024,6 +1073,14 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.QualityMonitors["test_monitor_old"].ModifiedStatus) assert.Equal(t, "", config.Resources.QualityMonitors["test_monitor_new"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.QualityMonitors["test_monitor_new"].ModifiedStatus) + + assert.Equal(t, "1", config.Resources.Schemas["test_schema"].ID) + assert.Equal(t, "", config.Resources.Schemas["test_schema"].ModifiedStatus) + assert.Equal(t, "2", config.Resources.Schemas["test_schema_old"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Schemas["test_schema_old"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Schemas["test_schema_new"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Schemas["test_schema_new"].ModifiedStatus) + AssertFullResourceCoverage(t, &config) } diff --git a/bundle/deploy/terraform/tfdyn/convert_schema.go b/bundle/deploy/terraform/tfdyn/convert_schema.go new file mode 100644 index 0000000000..0b6d904152 --- /dev/null +++ b/bundle/deploy/terraform/tfdyn/convert_schema.go @@ -0,0 +1,45 @@ +package tfdyn + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle/internal/tf/schema" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/cli/libs/log" +) + +func convertSchemaResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) { + // Normalize the output value to the target schema. + vout, diags := convert.Normalize(schema.ResourceSchema{}, vin) + for _, diag := range diags { + log.Debugf(ctx, "schema normalization diagnostic: %s", diag.Summary) + } + + return vout, nil +} + +type schemaConverter struct{} + +func (schemaConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error { + vout, err := convertSchemaResource(ctx, vin) + if err != nil { + return err + } + + // Add the converted resource to the output. + out.Schema[key] = vout.AsAny() + + // Configure grants for this resource. + if grants := convertGrantsResource(ctx, vin); grants != nil { + grants.Schema = fmt.Sprintf("${databricks_schema.%s.id}", key) + out.Grants["schema_"+key] = grants + } + + return nil +} + +func init() { + registerConverter("schemas", schemaConverter{}) +} diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go new file mode 100644 index 0000000000..b1ba62cfbf --- /dev/null +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -0,0 +1,66 @@ +package tfdyn + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/bundle/internal/tf/schema" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestConvertSchema(t *testing.T) { + var src = resources.Schema{ + CreateSchema: &catalog.CreateSchema{ + Name: "name", + CatalogName: "catalog", + Comment: "comment", + Properties: map[string]string{ + "k1": "v1", + "k2": "v2", + }, + StorageRoot: "root", + }, + Grants: []resources.Grant{ + { + Privileges: []string{"EXECUTE"}, + Principal: "jack@gmail.com", + }, + }, + } + + vin, err := convert.FromTyped(src, dyn.NilValue) + require.NoError(t, err) + + ctx := context.Background() + out := schema.NewResources() + err = schemaConverter{}.Convert(ctx, "my_schema", vin, out) + require.NoError(t, err) + + // Assert equality on the schema + assert.Equal(t, map[string]any{ + "name": "name", + "catalog_name": "catalog", + "comment": "comment", + "properties": map[string]any{ + "k1": "v1", + "k2": "v2", + }, + "storage_root": "root", + }, out.Schema["my_schema"]) + + // Assert equality on the grants + assert.Equal(t, &schema.ResourceGrants{ + Schema: "${databricks_schema.my_schema.id}", + Grant: []schema.ResourceGrantsGrant{ + { + Privileges: []string{"EXECUTE"}, + Principal: "jack@gmail.com", + }, + }, + }, out.Grants["schema_my_schema"]) +} From c61a96989edfe608f5cf08918e7df1f8335f0c84 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 3 May 2024 18:43:50 +0200 Subject: [PATCH 38/75] added integration test --- bundle/deploy/terraform/interpolate.go | 2 + bundle/deploy/terraform/interpolate_test.go | 2 + internal/acc/workspace.go | 28 +++++++++ .../uc_schema/databricks_template_schema.json | 8 +++ .../uc_schema/template/databricks.yml.tmpl | 22 +++++++ .../bundle/bundles/uc_schema/template/nb.sql | 2 + internal/bundle/deploy_test.go | 57 +++++++++++++++++++ 7 files changed, 121 insertions(+) create mode 100644 internal/bundle/bundles/uc_schema/databricks_template_schema.json create mode 100644 internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl create mode 100644 internal/bundle/bundles/uc_schema/template/nb.sql create mode 100644 internal/bundle/deploy_test.go diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index 608f1c7957..faa098e1cc 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -56,6 +56,8 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.D path = dyn.NewPath(dyn.Key("databricks_registered_model")).Append(path[2:]...) case dyn.Key("quality_monitors"): path = dyn.NewPath(dyn.Key("databricks_quality_monitor")).Append(path[2:]...) + case dyn.Key("schemas"): + path = dyn.NewPath(dyn.Key("databricks_schema")).Append(path[2:]...) default: // Trigger "key not found" for unknown resource types. return dyn.GetByPath(root, path) diff --git a/bundle/deploy/terraform/interpolate_test.go b/bundle/deploy/terraform/interpolate_test.go index 9af4a1443c..5ceb243bcd 100644 --- a/bundle/deploy/terraform/interpolate_test.go +++ b/bundle/deploy/terraform/interpolate_test.go @@ -30,6 +30,7 @@ func TestInterpolate(t *testing.T) { "other_experiment": "${resources.experiments.other_experiment.id}", "other_model_serving": "${resources.model_serving_endpoints.other_model_serving.id}", "other_registered_model": "${resources.registered_models.other_registered_model.id}", + "other_schema": "${resources.schemas.other_schema.id}", }, Tasks: []jobs.Task{ { @@ -65,6 +66,7 @@ func TestInterpolate(t *testing.T) { assert.Equal(t, "${databricks_mlflow_experiment.other_experiment.id}", j.Tags["other_experiment"]) assert.Equal(t, "${databricks_model_serving.other_model_serving.id}", j.Tags["other_model_serving"]) assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"]) + assert.Equal(t, "${databricks_schema.other_schema.id}", j.Tags["other_schema"]) m := b.Config.Resources.Models["my_model"] assert.Equal(t, "my_model", m.Model.Name) diff --git a/internal/acc/workspace.go b/internal/acc/workspace.go index 8944e199f9..39374f229e 100644 --- a/internal/acc/workspace.go +++ b/internal/acc/workspace.go @@ -2,6 +2,7 @@ package acc import ( "context" + "os" "testing" "github.com/databricks/databricks-sdk-go" @@ -38,6 +39,33 @@ func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { return wt.ctx, wt } +// Run the workspace test only on UC workspaces. +func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { + loadDebugEnvIfRunFromIDE(t, "workspace") + + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + if os.Getenv("TEST_METASTORE_ID") == "" { + t.Skipf("Skipping on non-UC workspaces") + } + if os.Getenv("DATABRICKS_ACCOUNT_ID") != "" { + t.Skipf("Skipping on accounts") + } + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + wt := &WorkspaceT{ + T: t, + + W: w, + + ctx: context.Background(), + } + + return wt.ctx, wt +} + func (t *WorkspaceT) TestClusterID() string { clusterID := GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID") err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID) diff --git a/internal/bundle/bundles/uc_schema/databricks_template_schema.json b/internal/bundle/bundles/uc_schema/databricks_template_schema.json new file mode 100644 index 0000000000..762f4470c2 --- /dev/null +++ b/internal/bundle/bundles/uc_schema/databricks_template_schema.json @@ -0,0 +1,8 @@ +{ + "properties": { + "unique_id": { + "type": "string", + "description": "Unique ID for the schema and pipeline names" + } + } +} diff --git a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl new file mode 100644 index 0000000000..4383fb5bb1 --- /dev/null +++ b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl @@ -0,0 +1,22 @@ +bundle: + name: "bundle-playground" + +resources: + pipelines: + foo: + name: test-pipeline-{{.unique_id}} + libraries: + - notebook: + path: ./nb.sql + development: true + catalog: main + target: ${resources.schemas.bar.id} + + schemas: + bar: + name: test-schema-{{.unique_id}} + catalog_name: main + comment: This schema was created from DABs + + # TODO: Add grants here, and test that. + # TODO: test for development mode etc? diff --git a/internal/bundle/bundles/uc_schema/template/nb.sql b/internal/bundle/bundles/uc_schema/template/nb.sql new file mode 100644 index 0000000000..199ff50788 --- /dev/null +++ b/internal/bundle/bundles/uc_schema/template/nb.sql @@ -0,0 +1,2 @@ +-- Databricks notebook source +select 1 diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go new file mode 100644 index 0000000000..60e70d6131 --- /dev/null +++ b/internal/bundle/deploy_test.go @@ -0,0 +1,57 @@ +package bundle + +import ( + "errors" + "testing" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAccBundleDeployUcSchema(t *testing.T) { + ctx, wt := acc.UcWorkspaceTest(t) + w := wt.W + + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, ctx, "uc_schema", map[string]any{ + "unique_id": uniqueId, + }) + require.NoError(t, err) + + err = deployBundle(t, ctx, bundleRoot) + require.NoError(t, err) + + t.Cleanup(func() {a + destroyBundle(t, ctx, bundleRoot) + }) + + // Assert the schema is created + // TODO: Skip this test on non-uc workspaces. Need a new filter function for it? + schemaName := "main.test-schema-" + uniqueId + schema, err := w.Schemas.GetByFullName(ctx, schemaName) + require.NoError(t, err) + assert.Equal(t, schema.FullName, schemaName) + assert.Equal(t, schema.Comment, "This schema was created from DABs") + + // Assert the pipeline is created, and it uses the specified schema + pipelineName := "test-pipeline-" + uniqueId + pipeline, err := w.Pipelines.GetByName(ctx, pipelineName) + require.NoError(t, err) + assert.Equal(t, pipeline.Name, pipelineName) + id := pipeline.PipelineId + + // Assert the pipeline uses the schema + i, err := w.Pipelines.GetByPipelineId(ctx, id) + require.NoError(t, err) + assert.Equal(t, i.Spec.Catalog, "main") + assert.Equal(t, i.Spec.Target, "test-schema-"+uniqueId) + + // Assert the schema is deleted + _, err = w.Schemas.GetByFullName(ctx, schemaName) + apiErr := &apierr.APIError{} + assert.True(t, errors.As(err, &apiErr)) + assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode) +} From 5f8d6cda4871a74d38d0d20b6765f2e708bff45f Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 6 May 2024 11:18:47 +0200 Subject: [PATCH 39/75] lint --- internal/bundle/deploy_test.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index 60e70d6131..bec480c012 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -24,12 +24,11 @@ func TestAccBundleDeployUcSchema(t *testing.T) { err = deployBundle(t, ctx, bundleRoot) require.NoError(t, err) - t.Cleanup(func() {a + t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) }) // Assert the schema is created - // TODO: Skip this test on non-uc workspaces. Need a new filter function for it? schemaName := "main.test-schema-" + uniqueId schema, err := w.Schemas.GetByFullName(ctx, schemaName) require.NoError(t, err) From 782f903c5da77c876b9a967f045d647509301571 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 6 May 2024 13:48:36 +0200 Subject: [PATCH 40/75] - --- bundle/deploy/terraform/convert.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 54cf789914..66ce326123 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -69,7 +69,7 @@ func convGrants(acl []resources.Grant) *schema.ResourceGrants { // Note: This function is an older implementation of the conversion logic. It is // no longer used in any code paths and is kept around to be used in tests. // New resources do not need to modify this functions, and can instead use the -// new tfdyn package to define the conversion logic. +// tfdyn package to define the conversion logic. func BundleToTerraform(config *config.Root) *schema.Root { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() From 86d5a334abb705f160fcfacc834f0e51480dcada Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 6 May 2024 15:11:06 +0200 Subject: [PATCH 41/75] fix failing test --- bundle/config/mutator/process_target_mode.go | 7 +++++++ bundle/config/mutator/process_target_mode_test.go | 7 +++++++ bundle/deploy/terraform/tfdyn/convert_schema_test.go | 8 ++++++++ 3 files changed, 22 insertions(+) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index b50716fd67..104e69825f 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -111,6 +111,13 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno r.QualityMonitors[i].Schedule = nil } } + // TODO: test this manually + for i := range r.Schemas { + prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" + r.Schemas[i].Name = prefix + r.Schemas[i].Name + // TODO: Do schemas really not support tags? + // (schemas don't yet support tags) + } return nil } diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 03da64e77f..0cb1bf096c 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -113,6 +113,10 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, }, }, + } + { + Schemas: map[string]*resources.Schema{ + "schema1": {CreateSchema: &catalog.CreateSchema{Name: "schema1"}}, }, }, }, @@ -167,6 +171,9 @@ func TestProcessTargetModeDevelopment(t *testing.T) { assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName) assert.Nil(t, b.Config.Resources.QualityMonitors["qualityMonitor2"].Schedule) assert.Equal(t, catalog.MonitorCronSchedulePauseStatusUnpaused, b.Config.Resources.QualityMonitors["qualityMonitor3"].Schedule.PauseStatus) + + // Schema 1 + assert.Equal(t, "dev_lennart_schema1", b.Config.Resources.Schemas["schema1"].Name) } func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go index b1ba62cfbf..14281fa330 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -30,6 +30,10 @@ func TestConvertSchema(t *testing.T) { Privileges: []string{"EXECUTE"}, Principal: "jack@gmail.com", }, + { + Privileges: []string{"RUN"}, + Principal: "jane@gmail.com", + }, }, } @@ -61,6 +65,10 @@ func TestConvertSchema(t *testing.T) { Privileges: []string{"EXECUTE"}, Principal: "jack@gmail.com", }, + { + Privileges: []string{"RUN"}, + Principal: "jane@gmail.com", + }, }, }, out.Grants["schema_my_schema"]) } From 3b9cb988cffc4544badddb5eaccda05c57f334d0 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 6 May 2024 15:26:53 +0200 Subject: [PATCH 42/75] remove todo --- bundle/config/mutator/process_target_mode.go | 1 - 1 file changed, 1 deletion(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index 104e69825f..2cde68d9b3 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -115,7 +115,6 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno for i := range r.Schemas { prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" r.Schemas[i].Name = prefix + r.Schemas[i].Name - // TODO: Do schemas really not support tags? // (schemas don't yet support tags) } From cbd018c5447a8bdcb161fc91ed8eca782ba59276 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 6 May 2024 16:16:58 +0200 Subject: [PATCH 43/75] cleanup comments --- bundle/config/mutator/process_target_mode.go | 1 + bundle/config/resources/schema.go | 6 ++---- bundle/deploy/terraform/convert.go | 6 +++--- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index 2cde68d9b3..d47d271690 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -111,6 +111,7 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno r.QualityMonitors[i].Schedule = nil } } + // TODO: test this manually for i := range r.Schemas { prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index 6d688add66..7c541cb2b9 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -8,10 +8,8 @@ type Schema struct { // List of grants to apply on this schema. Grants []Grant `json:"grants,omitempty"` - // This represents the id which is the full name of the schema - // (catalog_name.schema_name) that can be used - // as a reference in other resources. This value is returned by terraform. - // TODO: verify the accuracy of this comment, it just might be the schema name + // Full name of the schema (catalog_name.schema_name). This value is read from + // the terraform state after deployment succeeds. ID string `json:"id,omitempty" bundle:"readonly"` *catalog.CreateSchema diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 66ce326123..131de656d5 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -67,9 +67,9 @@ func convGrants(acl []resources.Grant) *schema.ResourceGrants { // to the equivalent Terraform JSON representation. // // Note: This function is an older implementation of the conversion logic. It is -// no longer used in any code paths and is kept around to be used in tests. -// New resources do not need to modify this functions, and can instead use the -// tfdyn package to define the conversion logic. +// no longer used in any code paths. It is kept around to be used in tests. +// New resources do not need to modify this function and can instead can define +// the conversion login in the tfdyn package. func BundleToTerraform(config *config.Root) *schema.Root { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() From c8d6ff95006833ec4b2b61670df040bd18c0a5e9 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 7 May 2024 11:41:11 +0200 Subject: [PATCH 44/75] fix schema --- bundle/config/resources/schema.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index 7c541cb2b9..7ab00495a8 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -1,6 +1,7 @@ package resources import ( + "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/catalog" ) @@ -16,3 +17,11 @@ type Schema struct { ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` } + +func (s *Schema) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, s) +} + +func (s Schema) MarshalJSON() ([]byte, error) { + return marshal.Marshal(s) +} From 4cf193b35ed73d813cb84d613290465d28e426f1 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 7 May 2024 12:53:27 +0200 Subject: [PATCH 45/75] cleanup todo --- internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl | 3 --- 1 file changed, 3 deletions(-) diff --git a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl index 4383fb5bb1..be36e91a61 100644 --- a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl @@ -17,6 +17,3 @@ resources: name: test-schema-{{.unique_id}} catalog_name: main comment: This schema was created from DABs - - # TODO: Add grants here, and test that. - # TODO: test for development mode etc? From 7faca0803e580239dbf6c6bd2c2d4a994ad36bce Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 7 May 2024 13:20:28 +0200 Subject: [PATCH 46/75] - --- internal/bundle/deploy_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index bec480c012..161ecc8b91 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -35,7 +35,7 @@ func TestAccBundleDeployUcSchema(t *testing.T) { assert.Equal(t, schema.FullName, schemaName) assert.Equal(t, schema.Comment, "This schema was created from DABs") - // Assert the pipeline is created, and it uses the specified schema + // Assert the pipeline is created pipelineName := "test-pipeline-" + uniqueId pipeline, err := w.Pipelines.GetByName(ctx, pipelineName) require.NoError(t, err) From 8735a8a32e67381a98ba7d54480f6ed1f5cf8682 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 23 Jul 2024 16:44:42 +0200 Subject: [PATCH 47/75] add paths to schema --- bundle/config/resources/schema.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index 7ab00495a8..fe060bc5c1 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -1,6 +1,7 @@ package resources import ( + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/catalog" ) @@ -15,6 +16,10 @@ type Schema struct { *catalog.CreateSchema + // Path to config file where the resource is defined. All bundle resources + // include this for interpolation purposes. + paths.Paths + ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` } From 0c02ed10e13a567875c746ead3a47a6224741a3c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 8 Jul 2024 19:39:59 +0200 Subject: [PATCH 48/75] Return early in bundle destroy if no deployment exists --- bundle/destroy/assert_root_path_exists.go | 38 +++++++++++++++++++++++ bundle/phases/destroy.go | 1 + bundle/seq.go | 17 +++++++++- libs/diag/diagnostic.go | 21 +++++++++++++ 4 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 bundle/destroy/assert_root_path_exists.go diff --git a/bundle/destroy/assert_root_path_exists.go b/bundle/destroy/assert_root_path_exists.go new file mode 100644 index 0000000000..60307a2e56 --- /dev/null +++ b/bundle/destroy/assert_root_path_exists.go @@ -0,0 +1,38 @@ +package destroy + +import ( + "context" + "errors" + "net/http" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go/apierr" +) + +type assertRootPathExists struct{} + +func AssertRootPathExists() bundle.Mutator { + return &assertRootPathExists{} +} + +func (m *assertRootPathExists) Name() string { + return "destroy:assert_root_path_exists" +} + +func (m *assertRootPathExists) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + w := b.WorkspaceClient() + _, err := w.Workspace.GetStatusByPath(ctx, b.Config.Workspace.RootPath) + + if err != nil { + var aerr *apierr.APIError + if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { + log.Debugf(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) + cmdio.LogString(ctx, "No active deployment found to destroy!") + return bundle.DiagnosticBreakSequence + } + } + return nil +} diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index bd99af789b..269cc5ed37 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -89,6 +89,7 @@ func Destroy() bundle.Mutator { ) destroyMutator := bundle.Seq( + destroy.AssertRootPathExists(), lock.Acquire(), bundle.Defer( bundle.Seq( diff --git a/bundle/seq.go b/bundle/seq.go index c1260a3f08..968bca6075 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -2,10 +2,15 @@ package bundle import ( "context" + "errors" "github.com/databricks/cli/libs/diag" ) +// Control signal error that can be used to break out of a sequence of mutators. +var ErrorBreakSequence = errors.New("break sequence") +var DiagnosticBreakSequence = diag.FromErr(ErrorBreakSequence) + type seqMutator struct { mutators []Mutator } @@ -17,8 +22,17 @@ func (s *seqMutator) Name() string { func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { var diags diag.Diagnostics for _, m := range s.mutators { - diags = diags.Extend(Apply(ctx, b, m)) + nd := Apply(ctx, b, m) + + // Break out of the sequence. Filter the ErrorBreakSequence error so that + // it does not show up to the user. + if nd.ContainsError(ErrorBreakSequence) { + diags.Extend(nd.FilterError(ErrorBreakSequence)) + break + } + if diags.HasError() { + diags.Extend(nd) break } } @@ -28,3 +42,4 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { func Seq(ms ...Mutator) Mutator { return &seqMutator{mutators: ms} } + diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 93334c067a..2657282b2c 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -112,3 +112,24 @@ func (ds Diagnostics) Filter(severity Severity) Diagnostics { } return out } + +// Returns true if the diagnostics contain the specified error +func (ds Diagnostics) ContainsError(err error) bool { + for _, d := range ds { + if d.Severity == Error && d.Summary == err.Error() { + return true + } + } + return false +} + +// Filter returns a new list of diagnostics that do not contain the specified error +func (ds Diagnostics) FilterError(err error) Diagnostics { + var out Diagnostics + for _, d := range ds { + if d.Severity != Error || d.Summary != err.Error() { + out = append(out, d) + } + } + return out +} From 6060989d989290c8845c9f430e977ce9218b46d8 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 8 Jul 2024 19:52:23 +0200 Subject: [PATCH 49/75] - --- bundle/destroy/assert_root_path_exists.go | 2 +- bundle/seq.go | 3 +- libs/diag/diagnostic.go | 2 +- libs/diag/diagnostic_test.go | 89 +++++++++++++++++++++++ 4 files changed, 92 insertions(+), 4 deletions(-) create mode 100644 libs/diag/diagnostic_test.go diff --git a/bundle/destroy/assert_root_path_exists.go b/bundle/destroy/assert_root_path_exists.go index 60307a2e56..63b51469ae 100644 --- a/bundle/destroy/assert_root_path_exists.go +++ b/bundle/destroy/assert_root_path_exists.go @@ -29,7 +29,7 @@ func (m *assertRootPathExists) Apply(ctx context.Context, b *bundle.Bundle) diag if err != nil { var aerr *apierr.APIError if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { - log.Debugf(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) + log.Infof(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) cmdio.LogString(ctx, "No active deployment found to destroy!") return bundle.DiagnosticBreakSequence } diff --git a/bundle/seq.go b/bundle/seq.go index 968bca6075..d2c67f9ecf 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -27,7 +27,7 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { // Break out of the sequence. Filter the ErrorBreakSequence error so that // it does not show up to the user. if nd.ContainsError(ErrorBreakSequence) { - diags.Extend(nd.FilterError(ErrorBreakSequence)) + diags.Extend(nd.RemoveError(ErrorBreakSequence)) break } @@ -42,4 +42,3 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { func Seq(ms ...Mutator) Mutator { return &seqMutator{mutators: ms} } - diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 2657282b2c..44e5822df6 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -124,7 +124,7 @@ func (ds Diagnostics) ContainsError(err error) bool { } // Filter returns a new list of diagnostics that do not contain the specified error -func (ds Diagnostics) FilterError(err error) Diagnostics { +func (ds Diagnostics) RemoveError(err error) Diagnostics { var out Diagnostics for _, d := range ds { if d.Severity != Error || d.Summary != err.Error() { diff --git a/libs/diag/diagnostic_test.go b/libs/diag/diagnostic_test.go new file mode 100644 index 0000000000..6c79c47931 --- /dev/null +++ b/libs/diag/diagnostic_test.go @@ -0,0 +1,89 @@ +package diag + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDiagnosticContainsError(t *testing.T) { + diags := Diagnostics{ + { + Severity: Error, + Summary: "error 1", + }, + { + Severity: Error, + Summary: "error 2", + }, + { + Severity: Warning, + Summary: "warning 1", + }, + } + + assert.True(t, diags.ContainsError(errors.New("error 1"))) + assert.True(t, diags.ContainsError(errors.New("error 2"))) + assert.False(t, diags.ContainsError(errors.New("error 3"))) +} + +func TestDiagnosticRemoveError(t *testing.T) { + diags := Diagnostics{ + { + Severity: Error, + Summary: "error 1", + }, + { + Severity: Error, + Summary: "error 2", + }, + { + Severity: Warning, + Summary: "warning 1", + }, + } + + filtered := diags.RemoveError(errors.New("error 1")) + assert.Len(t, filtered, 2) + assert.Equal(t, Diagnostics{ + { + Severity: Error, + Summary: "error 2", + }, + { + Severity: Warning, + Summary: "warning 1", + }, + }, filtered) + + filtered = diags.RemoveError(errors.New("error 2")) + assert.Len(t, filtered, 2) + assert.Equal(t, Diagnostics{ + { + Severity: Error, + Summary: "error 1", + }, + { + Severity: Warning, + Summary: "warning 1", + }, + }, filtered) + + filtered = diags.RemoveError(errors.New("warning 1")) + assert.Len(t, filtered, 3) + assert.Equal(t, Diagnostics{ + { + Severity: Error, + Summary: "error 1", + }, + { + Severity: Error, + Summary: "error 2", + }, + { + Severity: Warning, + Summary: "warning 1", + }, + }, filtered) +} From 0b26cbc9ddb29562ca988bc4b01381350d2e5dcb Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 8 Jul 2024 19:58:00 +0200 Subject: [PATCH 50/75] add test for the mutator --- .../destroy/assert_root_path_exists_test.go | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 bundle/destroy/assert_root_path_exists_test.go diff --git a/bundle/destroy/assert_root_path_exists_test.go b/bundle/destroy/assert_root_path_exists_test.go new file mode 100644 index 0000000000..ee2dec5572 --- /dev/null +++ b/bundle/destroy/assert_root_path_exists_test.go @@ -0,0 +1,34 @@ +package destroy + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/experimental/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestAssertRootPathExists(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Workspace: config.Workspace{ + RootPath: "/path/to/root", + }, + }, + } + + m := mocks.NewMockWorkspaceClient(t) + b.SetWorkpaceClient(m.WorkspaceClient) + workspaceApi := m.GetMockWorkspaceAPI() + workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/path/to/root").Return(nil, &apierr.APIError{ + StatusCode: 404, + ErrorCode: "RESOURCE_DOES_NOT_EXIST", + }) + + diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) + assert.Equal(t, bundle.DiagnosticBreakSequence, diags) +} From f57ec1eae1fdb28b3f750261f9e5e73277b32eb2 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 11:01:09 +0200 Subject: [PATCH 51/75] fix seq --- bundle/seq.go | 16 ++++++++++------ libs/diag/diagnostic.go | 1 + 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/bundle/seq.go b/bundle/seq.go index d2c67f9ecf..b217deb2cf 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -8,6 +8,7 @@ import ( ) // Control signal error that can be used to break out of a sequence of mutators. +// TODO: Are better names possible? var ErrorBreakSequence = errors.New("break sequence") var DiagnosticBreakSequence = diag.FromErr(ErrorBreakSequence) @@ -23,16 +24,19 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { var diags diag.Diagnostics for _, m := range s.mutators { nd := Apply(ctx, b, m) + hasError := nd.HasError() - // Break out of the sequence. Filter the ErrorBreakSequence error so that - // it does not show up to the user. + // Remove the ErrorBreakSequence error from the diagnostics. It's a control + // signal and should not be shown to the user. if nd.ContainsError(ErrorBreakSequence) { - diags.Extend(nd.RemoveError(ErrorBreakSequence)) - break + nd.RemoveError(ErrorBreakSequence) } - if diags.HasError() { - diags.Extend(nd) + // Extend the diagnostics with the diagnostics from the current mutator. + diags = diags.Extend(nd) + + // Break out of the sequence if there is an error. + if hasError { break } } diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 44e5822df6..b338957c01 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -124,6 +124,7 @@ func (ds Diagnostics) ContainsError(err error) bool { } // Filter returns a new list of diagnostics that do not contain the specified error +// Rename this to filter back again? func (ds Diagnostics) RemoveError(err error) Diagnostics { var out Diagnostics for _, d := range ds { From 6da6e0cb4bfb73fb7bf6d3332a5bd6f258519f69 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 11:07:11 +0200 Subject: [PATCH 52/75] some cleanup --- bundle/destroy/assert_root_path_exists.go | 2 +- bundle/destroy/assert_root_path_exists_test.go | 2 +- bundle/seq.go | 15 ++++++++------- libs/diag/diagnostic.go | 1 - 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bundle/destroy/assert_root_path_exists.go b/bundle/destroy/assert_root_path_exists.go index 63b51469ae..1eb44019ff 100644 --- a/bundle/destroy/assert_root_path_exists.go +++ b/bundle/destroy/assert_root_path_exists.go @@ -31,7 +31,7 @@ func (m *assertRootPathExists) Apply(ctx context.Context, b *bundle.Bundle) diag if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { log.Infof(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) cmdio.LogString(ctx, "No active deployment found to destroy!") - return bundle.DiagnosticBreakSequence + return bundle.DiagnosticSequenceBreak } } return nil diff --git a/bundle/destroy/assert_root_path_exists_test.go b/bundle/destroy/assert_root_path_exists_test.go index ee2dec5572..ae258dda10 100644 --- a/bundle/destroy/assert_root_path_exists_test.go +++ b/bundle/destroy/assert_root_path_exists_test.go @@ -30,5 +30,5 @@ func TestAssertRootPathExists(t *testing.T) { }) diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) - assert.Equal(t, bundle.DiagnosticBreakSequence, diags) + assert.Equal(t, bundle.DiagnosticSequenceBreak, diags) } diff --git a/bundle/seq.go b/bundle/seq.go index b217deb2cf..e86f1d8505 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -7,10 +7,11 @@ import ( "github.com/databricks/cli/libs/diag" ) -// Control signal error that can be used to break out of a sequence of mutators. -// TODO: Are better names possible? -var ErrorBreakSequence = errors.New("break sequence") -var DiagnosticBreakSequence = diag.FromErr(ErrorBreakSequence) +// Control signal error that can be returned by a mutator to break out of a sequence. +var ErrorSequenceBreak = errors.New("break sequence") + +// Convenient diagnostic that wraps ErrorSequenceBreak. +var DiagnosticSequenceBreak = diag.FromErr(ErrorSequenceBreak) type seqMutator struct { mutators []Mutator @@ -26,10 +27,10 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { nd := Apply(ctx, b, m) hasError := nd.HasError() - // Remove the ErrorBreakSequence error from the diagnostics. It's a control + // Remove the ErrorSequenceBreak error from the diagnostics. It's a control // signal and should not be shown to the user. - if nd.ContainsError(ErrorBreakSequence) { - nd.RemoveError(ErrorBreakSequence) + if nd.ContainsError(ErrorSequenceBreak) { + nd.RemoveError(ErrorSequenceBreak) } // Extend the diagnostics with the diagnostics from the current mutator. diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index b338957c01..44e5822df6 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -124,7 +124,6 @@ func (ds Diagnostics) ContainsError(err error) bool { } // Filter returns a new list of diagnostics that do not contain the specified error -// Rename this to filter back again? func (ds Diagnostics) RemoveError(err error) Diagnostics { var out Diagnostics for _, d := range ds { From 028d9c4d275b48efb35c967c55da78870f85a7ea Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 11:28:12 +0200 Subject: [PATCH 53/75] add tests for the break signal --- bundle/seq.go | 2 +- bundle/seq_test.go | 81 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) diff --git a/bundle/seq.go b/bundle/seq.go index e86f1d8505..adbca63d89 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -30,7 +30,7 @@ func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { // Remove the ErrorSequenceBreak error from the diagnostics. It's a control // signal and should not be shown to the user. if nd.ContainsError(ErrorSequenceBreak) { - nd.RemoveError(ErrorSequenceBreak) + nd = nd.RemoveError(ErrorSequenceBreak) } // Extend the diagnostics with the diagnostics from the current mutator. diff --git a/bundle/seq_test.go b/bundle/seq_test.go index 74f975ed8f..9eb5e8f64b 100644 --- a/bundle/seq_test.go +++ b/bundle/seq_test.go @@ -89,3 +89,84 @@ func TestSeqWithErrorInsideFinallyStage(t *testing.T) { assert.Equal(t, 1, errorMut.applyCalled) assert.Equal(t, 0, m3.applyCalled) } + +func TestSeqWithErrorSequenceBreak(t *testing.T) { + errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} + m1 := &testMutator{} + m2 := &testMutator{} + m3 := &testMutator{} + seqMutator := Seq(m1, m2, errorMut, m3) + + b := &Bundle{} + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) + + assert.Equal(t, 1, m1.applyCalled) + assert.Equal(t, 1, m2.applyCalled) + assert.Equal(t, 1, errorMut.applyCalled) + + // m3 is not called because the error mutator returns a break control signal. + assert.Equal(t, 0, m3.applyCalled) +} + +func TestSeqWithErrorSequenceBreakInsideDeferFirst(t *testing.T) { + errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} + m1 := &testMutator{} + m2 := &testMutator{} + m3 := &testMutator{} + seqMutator := Seq(m1, Defer(errorMut, m2), m3) + + b := &Bundle{} + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) + + assert.Equal(t, 1, m1.applyCalled) + assert.Equal(t, 1, errorMut.applyCalled) + + // m2 should still be called because it's inside a Defer + assert.Equal(t, 1, m2.applyCalled) + assert.Equal(t, 0, m3.applyCalled) +} + +func TestSeqWithErrorSequenceBreakInsideDeferSecond(t *testing.T) { + errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} + m1 := &testMutator{} + m2 := &testMutator{} + m3 := &testMutator{} + seqMutator := Seq(m1, Defer(m2, errorMut), m3) + + b := &Bundle{} + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) + + assert.Equal(t, 1, m1.applyCalled) + assert.Equal(t, 1, m2.applyCalled) + assert.Equal(t, 1, errorMut.applyCalled) + + // m3 is not called because the defer mutator returns a break control signal. + assert.Equal(t, 0, m3.applyCalled) +} + +func TestSeqErrorSequenceBreakDoesNotBreakMultipleSequences(t *testing.T) { + errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} + m1 := &testMutator{} + m2 := &testMutator{} + m3 := &testMutator{} + m4 := &testMutator{} + seqMutator := Seq(Seq(m1, errorMut, m2), Seq(m3, m4)) + + b := &Bundle{} + diags := Apply(context.Background(), b, seqMutator) + assert.NoError(t, diags.Error()) + + assert.Equal(t, 1, m1.applyCalled) + assert.Equal(t, 1, errorMut.applyCalled) + + // m2 is not applied because the error mutator returns a break control signal. + assert.Equal(t, 0, m2.applyCalled) + + // m3 and m4 are still applied because the break control signal error should + // only break the current sequence, not the top level one. + assert.Equal(t, 1, m3.applyCalled) + assert.Equal(t, 1, m4.applyCalled) +} From 25d8a0b33f9413d59f614afe9cb79b7e169f1d3e Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 11:42:06 +0200 Subject: [PATCH 54/75] more coverage for assert root path --- bundle/destroy/assert_root_path_exists.go | 15 +++++++------ .../destroy/assert_root_path_exists_test.go | 21 ++++++++++++++++++- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/bundle/destroy/assert_root_path_exists.go b/bundle/destroy/assert_root_path_exists.go index 1eb44019ff..3a81458c9b 100644 --- a/bundle/destroy/assert_root_path_exists.go +++ b/bundle/destroy/assert_root_path_exists.go @@ -3,6 +3,7 @@ package destroy import ( "context" "errors" + "fmt" "net/http" "github.com/databricks/cli/bundle" @@ -26,13 +27,11 @@ func (m *assertRootPathExists) Apply(ctx context.Context, b *bundle.Bundle) diag w := b.WorkspaceClient() _, err := w.Workspace.GetStatusByPath(ctx, b.Config.Workspace.RootPath) - if err != nil { - var aerr *apierr.APIError - if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { - log.Infof(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) - cmdio.LogString(ctx, "No active deployment found to destroy!") - return bundle.DiagnosticSequenceBreak - } + var aerr *apierr.APIError + if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { + log.Infof(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) + cmdio.LogString(ctx, "No active deployment found to destroy!") + return bundle.DiagnosticSequenceBreak } - return nil + return diag.FromErr(fmt.Errorf("cannot assert root path exists: %w", err)) } diff --git a/bundle/destroy/assert_root_path_exists_test.go b/bundle/destroy/assert_root_path_exists_test.go index ae258dda10..87013a2bc8 100644 --- a/bundle/destroy/assert_root_path_exists_test.go +++ b/bundle/destroy/assert_root_path_exists_test.go @@ -2,6 +2,7 @@ package destroy import ( "context" + "errors" "testing" "github.com/databricks/cli/bundle" @@ -30,5 +31,23 @@ func TestAssertRootPathExists(t *testing.T) { }) diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) - assert.Equal(t, bundle.DiagnosticSequenceBreak, diags) + assert.Equal(t, bundle.ErrorSequenceBreak, diags.Error()) +} + +func TestAssertRootPathExistsIgnoresNon404Errors(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Workspace: config.Workspace{ + RootPath: "/path/to/root", + }, + }, + } + + m := mocks.NewMockWorkspaceClient(t) + b.SetWorkpaceClient(m.WorkspaceClient) + workspaceApi := m.GetMockWorkspaceAPI() + workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/path/to/root").Return(nil, errors.New("wsfs API failed")) + + diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) + assert.EqualError(t, diags.Error(), "cannot assert root path exists: wsfs API failed") } From 76eea2027bf5db2e071f1999a68e1569f5adeeb5 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 15:36:53 +0200 Subject: [PATCH 55/75] use if mutator instead --- bundle/config/mutator/if.go | 41 +++++++++ bundle/destroy/assert_root_path_exists.go | 37 -------- .../destroy/assert_root_path_exists_test.go | 53 ----------- bundle/phases/destroy.go | 1 - bundle/python/transform.go | 4 + bundle/seq.go | 23 +---- bundle/seq_test.go | 81 ----------------- libs/diag/diagnostic.go | 21 ----- libs/diag/diagnostic_test.go | 89 ------------------- 9 files changed, 47 insertions(+), 303 deletions(-) create mode 100644 bundle/config/mutator/if.go delete mode 100644 bundle/destroy/assert_root_path_exists.go delete mode 100644 bundle/destroy/assert_root_path_exists_test.go delete mode 100644 libs/diag/diagnostic_test.go diff --git a/bundle/config/mutator/if.go b/bundle/config/mutator/if.go new file mode 100644 index 0000000000..f4469efc1e --- /dev/null +++ b/bundle/config/mutator/if.go @@ -0,0 +1,41 @@ +package mutator + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" +) + +type ifMutator struct { + condition func(context.Context, *bundle.Bundle) (bool, error) + onTrueMutator bundle.Mutator + onFalseMutator bundle.Mutator +} + +func If( + condition func(context.Context, *bundle.Bundle) (bool, error), + onTrueMutator bundle.Mutator, + onFalseMutator bundle.Mutator, +) bundle.Mutator { + return &ifMutator{ + condition, onTrueMutator, onFalseMutator, + } +} + +func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + v, err := m.condition(ctx, b) + if err != nil { + return diag.FromErr(err) + } + + if v { + return bundle.Apply(ctx, b, m.onTrueMutator) + } else { + return bundle.Apply(ctx, b, m.onFalseMutator) + } +} + +func (m *ifMutator) Name() string { + return "If" +} diff --git a/bundle/destroy/assert_root_path_exists.go b/bundle/destroy/assert_root_path_exists.go deleted file mode 100644 index 3a81458c9b..0000000000 --- a/bundle/destroy/assert_root_path_exists.go +++ /dev/null @@ -1,37 +0,0 @@ -package destroy - -import ( - "context" - "errors" - "fmt" - "net/http" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/diag" - "github.com/databricks/cli/libs/log" - "github.com/databricks/databricks-sdk-go/apierr" -) - -type assertRootPathExists struct{} - -func AssertRootPathExists() bundle.Mutator { - return &assertRootPathExists{} -} - -func (m *assertRootPathExists) Name() string { - return "destroy:assert_root_path_exists" -} - -func (m *assertRootPathExists) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - w := b.WorkspaceClient() - _, err := w.Workspace.GetStatusByPath(ctx, b.Config.Workspace.RootPath) - - var aerr *apierr.APIError - if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { - log.Infof(ctx, "No active deployment found. %s does not exist. Skipping destroy.", b.Config.Workspace.RootPath) - cmdio.LogString(ctx, "No active deployment found to destroy!") - return bundle.DiagnosticSequenceBreak - } - return diag.FromErr(fmt.Errorf("cannot assert root path exists: %w", err)) -} diff --git a/bundle/destroy/assert_root_path_exists_test.go b/bundle/destroy/assert_root_path_exists_test.go deleted file mode 100644 index 87013a2bc8..0000000000 --- a/bundle/destroy/assert_root_path_exists_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package destroy - -import ( - "context" - "errors" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/databricks-sdk-go/apierr" - "github.com/databricks/databricks-sdk-go/experimental/mocks" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" -) - -func TestAssertRootPathExists(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Workspace: config.Workspace{ - RootPath: "/path/to/root", - }, - }, - } - - m := mocks.NewMockWorkspaceClient(t) - b.SetWorkpaceClient(m.WorkspaceClient) - workspaceApi := m.GetMockWorkspaceAPI() - workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/path/to/root").Return(nil, &apierr.APIError{ - StatusCode: 404, - ErrorCode: "RESOURCE_DOES_NOT_EXIST", - }) - - diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) - assert.Equal(t, bundle.ErrorSequenceBreak, diags.Error()) -} - -func TestAssertRootPathExistsIgnoresNon404Errors(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Workspace: config.Workspace{ - RootPath: "/path/to/root", - }, - }, - } - - m := mocks.NewMockWorkspaceClient(t) - b.SetWorkpaceClient(m.WorkspaceClient) - workspaceApi := m.GetMockWorkspaceAPI() - workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/path/to/root").Return(nil, errors.New("wsfs API failed")) - - diags := bundle.Apply(context.Background(), b, AssertRootPathExists()) - assert.EqualError(t, diags.Error(), "cannot assert root path exists: wsfs API failed") -} diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 269cc5ed37..bd99af789b 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -89,7 +89,6 @@ func Destroy() bundle.Mutator { ) destroyMutator := bundle.Seq( - destroy.AssertRootPathExists(), lock.Acquire(), bundle.Defer( bundle.Seq( diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 9d3b1ab6a5..552bb8cf94 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -64,7 +64,11 @@ dbutils.notebook.exit(s) // which installs uploaded wheels using %pip and then calling corresponding // entry point. func TransformWheelTask() bundle.Mutator { +<<<<<<< HEAD return bundle.If( +======= + return mutator.If( +>>>>>>> 27b09ceb (use if mutator instead) func(_ context.Context, b *bundle.Bundle) (bool, error) { res := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper return res, nil diff --git a/bundle/seq.go b/bundle/seq.go index adbca63d89..c1260a3f08 100644 --- a/bundle/seq.go +++ b/bundle/seq.go @@ -2,17 +2,10 @@ package bundle import ( "context" - "errors" "github.com/databricks/cli/libs/diag" ) -// Control signal error that can be returned by a mutator to break out of a sequence. -var ErrorSequenceBreak = errors.New("break sequence") - -// Convenient diagnostic that wraps ErrorSequenceBreak. -var DiagnosticSequenceBreak = diag.FromErr(ErrorSequenceBreak) - type seqMutator struct { mutators []Mutator } @@ -24,20 +17,8 @@ func (s *seqMutator) Name() string { func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { var diags diag.Diagnostics for _, m := range s.mutators { - nd := Apply(ctx, b, m) - hasError := nd.HasError() - - // Remove the ErrorSequenceBreak error from the diagnostics. It's a control - // signal and should not be shown to the user. - if nd.ContainsError(ErrorSequenceBreak) { - nd = nd.RemoveError(ErrorSequenceBreak) - } - - // Extend the diagnostics with the diagnostics from the current mutator. - diags = diags.Extend(nd) - - // Break out of the sequence if there is an error. - if hasError { + diags = diags.Extend(Apply(ctx, b, m)) + if diags.HasError() { break } } diff --git a/bundle/seq_test.go b/bundle/seq_test.go index 9eb5e8f64b..74f975ed8f 100644 --- a/bundle/seq_test.go +++ b/bundle/seq_test.go @@ -89,84 +89,3 @@ func TestSeqWithErrorInsideFinallyStage(t *testing.T) { assert.Equal(t, 1, errorMut.applyCalled) assert.Equal(t, 0, m3.applyCalled) } - -func TestSeqWithErrorSequenceBreak(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, m2, errorMut, m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - - // m3 is not called because the error mutator returns a break control signal. - assert.Equal(t, 0, m3.applyCalled) -} - -func TestSeqWithErrorSequenceBreakInsideDeferFirst(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, Defer(errorMut, m2), m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - - // m2 should still be called because it's inside a Defer - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 0, m3.applyCalled) -} - -func TestSeqWithErrorSequenceBreakInsideDeferSecond(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, Defer(m2, errorMut), m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - - // m3 is not called because the defer mutator returns a break control signal. - assert.Equal(t, 0, m3.applyCalled) -} - -func TestSeqErrorSequenceBreakDoesNotBreakMultipleSequences(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: ErrorSequenceBreak.Error()} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - m4 := &testMutator{} - seqMutator := Seq(Seq(m1, errorMut, m2), Seq(m3, m4)) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - - // m2 is not applied because the error mutator returns a break control signal. - assert.Equal(t, 0, m2.applyCalled) - - // m3 and m4 are still applied because the break control signal error should - // only break the current sequence, not the top level one. - assert.Equal(t, 1, m3.applyCalled) - assert.Equal(t, 1, m4.applyCalled) -} diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index 44e5822df6..93334c067a 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -112,24 +112,3 @@ func (ds Diagnostics) Filter(severity Severity) Diagnostics { } return out } - -// Returns true if the diagnostics contain the specified error -func (ds Diagnostics) ContainsError(err error) bool { - for _, d := range ds { - if d.Severity == Error && d.Summary == err.Error() { - return true - } - } - return false -} - -// Filter returns a new list of diagnostics that do not contain the specified error -func (ds Diagnostics) RemoveError(err error) Diagnostics { - var out Diagnostics - for _, d := range ds { - if d.Severity != Error || d.Summary != err.Error() { - out = append(out, d) - } - } - return out -} diff --git a/libs/diag/diagnostic_test.go b/libs/diag/diagnostic_test.go deleted file mode 100644 index 6c79c47931..0000000000 --- a/libs/diag/diagnostic_test.go +++ /dev/null @@ -1,89 +0,0 @@ -package diag - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDiagnosticContainsError(t *testing.T) { - diags := Diagnostics{ - { - Severity: Error, - Summary: "error 1", - }, - { - Severity: Error, - Summary: "error 2", - }, - { - Severity: Warning, - Summary: "warning 1", - }, - } - - assert.True(t, diags.ContainsError(errors.New("error 1"))) - assert.True(t, diags.ContainsError(errors.New("error 2"))) - assert.False(t, diags.ContainsError(errors.New("error 3"))) -} - -func TestDiagnosticRemoveError(t *testing.T) { - diags := Diagnostics{ - { - Severity: Error, - Summary: "error 1", - }, - { - Severity: Error, - Summary: "error 2", - }, - { - Severity: Warning, - Summary: "warning 1", - }, - } - - filtered := diags.RemoveError(errors.New("error 1")) - assert.Len(t, filtered, 2) - assert.Equal(t, Diagnostics{ - { - Severity: Error, - Summary: "error 2", - }, - { - Severity: Warning, - Summary: "warning 1", - }, - }, filtered) - - filtered = diags.RemoveError(errors.New("error 2")) - assert.Len(t, filtered, 2) - assert.Equal(t, Diagnostics{ - { - Severity: Error, - Summary: "error 1", - }, - { - Severity: Warning, - Summary: "warning 1", - }, - }, filtered) - - filtered = diags.RemoveError(errors.New("warning 1")) - assert.Len(t, filtered, 3) - assert.Equal(t, Diagnostics{ - { - Severity: Error, - Summary: "error 1", - }, - { - Severity: Error, - Summary: "error 2", - }, - { - Severity: Warning, - Summary: "warning 1", - }, - }, filtered) -} From 1f872d9d4d9669bac58769e718baadbb7db74596 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 15:41:56 +0200 Subject: [PATCH 56/75] move if to bundle package --- bundle/config/mutator/if.go | 41 ------------------------------------- bundle/python/transform.go | 4 ---- 2 files changed, 45 deletions(-) delete mode 100644 bundle/config/mutator/if.go diff --git a/bundle/config/mutator/if.go b/bundle/config/mutator/if.go deleted file mode 100644 index f4469efc1e..0000000000 --- a/bundle/config/mutator/if.go +++ /dev/null @@ -1,41 +0,0 @@ -package mutator - -import ( - "context" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/diag" -) - -type ifMutator struct { - condition func(context.Context, *bundle.Bundle) (bool, error) - onTrueMutator bundle.Mutator - onFalseMutator bundle.Mutator -} - -func If( - condition func(context.Context, *bundle.Bundle) (bool, error), - onTrueMutator bundle.Mutator, - onFalseMutator bundle.Mutator, -) bundle.Mutator { - return &ifMutator{ - condition, onTrueMutator, onFalseMutator, - } -} - -func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - v, err := m.condition(ctx, b) - if err != nil { - return diag.FromErr(err) - } - - if v { - return bundle.Apply(ctx, b, m.onTrueMutator) - } else { - return bundle.Apply(ctx, b, m.onFalseMutator) - } -} - -func (m *ifMutator) Name() string { - return "If" -} diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 552bb8cf94..9d3b1ab6a5 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -64,11 +64,7 @@ dbutils.notebook.exit(s) // which installs uploaded wheels using %pip and then calling corresponding // entry point. func TransformWheelTask() bundle.Mutator { -<<<<<<< HEAD return bundle.If( -======= - return mutator.If( ->>>>>>> 27b09ceb (use if mutator instead) func(_ context.Context, b *bundle.Bundle) (bool, error) { res := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper return res, nil From 3e0b0d0d01b85d3ea5325cb2cf5d9b025ce6885e Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 19:36:57 +0200 Subject: [PATCH 57/75] Move to a single prompt during `bundle destroy` --- bundle/bundle.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bundle/bundle.go b/bundle/bundle.go index 032d98abc8..d5f6ba8a56 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -69,6 +69,11 @@ type Bundle struct { // files AutoApprove bool + // If true, we require user approval to deploy. This is + // TODO: On both destroy and deploy, error with suggesting `--auto-approve` + // if operating from a non-tty. + RequireApprovalForDeploy bool + // Tagging is used to normalize tag keys and values. // The implementation depends on the cloud being targeted. Tagging tags.Cloud From bc064f38b9c7271a93f03cd52206ba835a5e9b50 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 9 Jul 2024 19:38:31 +0200 Subject: [PATCH 58/75] remove unnecessary flag --- bundle/bundle.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/bundle/bundle.go b/bundle/bundle.go index d5f6ba8a56..032d98abc8 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -69,11 +69,6 @@ type Bundle struct { // files AutoApprove bool - // If true, we require user approval to deploy. This is - // TODO: On both destroy and deploy, error with suggesting `--auto-approve` - // if operating from a non-tty. - RequireApprovalForDeploy bool - // Tagging is used to normalize tag keys and values. // The implementation depends on the cloud being targeted. Tagging tags.Cloud From f62b875ed787aafeb276e5a6d53105562d8b20aa Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 23 Jul 2024 18:14:21 +0200 Subject: [PATCH 59/75] wip adding support for prompting in UC schema creation --- bundle/phases/deploy.go | 114 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 103 insertions(+), 11 deletions(-) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 46c3891895..70343f81d7 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -1,6 +1,9 @@ package phases import ( + "context" + "fmt" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/config" @@ -14,10 +17,102 @@ import ( "github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/python" "github.com/databricks/cli/bundle/scripts" + "github.com/databricks/cli/libs/cmdio" + terraformlib "github.com/databricks/cli/libs/terraform" ) +func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { + if b.AutoApprove { + return true, nil + } + + tf := b.Terraform + if tf == nil { + return false, fmt.Errorf("terraform not initialized") + } + + // read plan file + plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) + if err != nil { + return false, err + } + + // TODO: Is schema recreation possible? Would that be destructive to the data? + deleteOrRecreateSchema := make([]terraformlib.Action, 0) + for _, rc := range plan.ResourceChanges { + if rc.Change.Actions.Delete() && rc.Type == "databricks_schema" { + deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ + Action: "delete", + ResourceType: rc.Type, + ResourceName: rc.Name, + }) + } + + if rc.Change.Actions.Replace() && rc.Type == "databricks_schema" { + deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ + Action: "recreate", + ResourceType: rc.Type, + ResourceName: rc.Name, + }) + } + } + + // No need for approval if the plan does not include destroying or recreating + // any schema resources. + if len(deleteOrRecreateSchema) == 0 { + return true, nil + } + + // TODO: Return early with error here if the `--force-schema (or something)` delete + // flag is not specified. + for _, action := range deleteOrRecreateSchema { + cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data will be lost:") + + if len(deleteActions) > 0 { + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "The following resources will be deleted:") + for _, a := range deleteActions { + cmdio.Log(ctx, a) + } + } + + if len(recreateActions) > 0 { + cmdio.LogString(ctx, "") + cmdio.LogString(ctx, "The following resources will be recreated. Note that recreation can be lossy and may lead to lost metadata or data:") + for _, a := range recreateActions { + cmdio.Log(ctx, a) + } + cmdio.LogString(ctx, "") + } + + if !cmdio.IsPromptSupported(ctx) { + return false, fmt.Errorf("the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") + } + + cmdio.LogString(ctx, "") + approved, err := cmdio.AskYesOrNo(ctx, "Would you like to proceed?") + if err != nil { + return false, err + } + + return approved, nil +} + // The deploy phase deploys artifacts and resources. func Deploy() bundle.Mutator { + // Core mutators that CRUD resources and modify deployment state. These + // mutators need informed consent if they are potentially destructive. + deployCore := bundle.Defer( + terraform.Apply(), + bundle.Seq( + terraform.StatePush(), + terraform.Load(), + metadata.Compute(), + metadata.Upload(), + bundle.LogString("Deployment complete!"), + ), + ) + deployMutator := bundle.Seq( scripts.Execute(config.ScriptPreDeploy), lock.Acquire(), @@ -37,20 +132,17 @@ func Deploy() bundle.Mutator { terraform.Interpolate(), terraform.Write(), terraform.CheckRunningResource(), - bundle.Defer( - terraform.Apply(), - bundle.Seq( - terraform.StatePush(), - terraform.Load(), - metadata.Compute(), - metadata.Upload(), - ), + terraform.Plan(terraform.PlanGoal("deploy")), + terraform.Load(), + bundle.If( + approvalForDeploy, + deployCore, + bundle.LogString("Deployment cancelled!"), ), + lock.Release(lock.GoalDeploy), ), - lock.Release(lock.GoalDeploy), + scripts.Execute(config.ScriptPostDeploy), ), - scripts.Execute(config.ScriptPostDeploy), - bundle.LogString("Deployment complete!"), ) return newPhase( From 694a84a94c697e0978ad87f2591a04afcd30a7f9 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 16:55:41 +0200 Subject: [PATCH 60/75] fix build and dmt --- .../mutator/process_target_mode_test.go | 3 +- bundle/phases/deploy.go | 153 +++++++++--------- 2 files changed, 77 insertions(+), 79 deletions(-) diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 0cb1bf096c..f0c8aee9ea 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -113,8 +113,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, }, }, - } - { + }, Schemas: map[string]*resources.Schema{ "schema1": {CreateSchema: &catalog.CreateSchema{Name: "schema1"}}, }, diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 70343f81d7..45be0411f3 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -2,7 +2,6 @@ package phases import ( "context" - "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" @@ -17,85 +16,85 @@ import ( "github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/python" "github.com/databricks/cli/bundle/scripts" - "github.com/databricks/cli/libs/cmdio" - terraformlib "github.com/databricks/cli/libs/terraform" ) func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { - if b.AutoApprove { - return true, nil - } - - tf := b.Terraform - if tf == nil { - return false, fmt.Errorf("terraform not initialized") - } - - // read plan file - plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) - if err != nil { - return false, err - } - - // TODO: Is schema recreation possible? Would that be destructive to the data? - deleteOrRecreateSchema := make([]terraformlib.Action, 0) - for _, rc := range plan.ResourceChanges { - if rc.Change.Actions.Delete() && rc.Type == "databricks_schema" { - deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ - Action: "delete", - ResourceType: rc.Type, - ResourceName: rc.Name, - }) - } - - if rc.Change.Actions.Replace() && rc.Type == "databricks_schema" { - deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ - Action: "recreate", - ResourceType: rc.Type, - ResourceName: rc.Name, - }) - } - } - - // No need for approval if the plan does not include destroying or recreating - // any schema resources. - if len(deleteOrRecreateSchema) == 0 { - return true, nil - } - - // TODO: Return early with error here if the `--force-schema (or something)` delete - // flag is not specified. - for _, action := range deleteOrRecreateSchema { - cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data will be lost:") - - if len(deleteActions) > 0 { - cmdio.LogString(ctx, "") - cmdio.LogString(ctx, "The following resources will be deleted:") - for _, a := range deleteActions { - cmdio.Log(ctx, a) - } - } - - if len(recreateActions) > 0 { - cmdio.LogString(ctx, "") - cmdio.LogString(ctx, "The following resources will be recreated. Note that recreation can be lossy and may lead to lost metadata or data:") - for _, a := range recreateActions { - cmdio.Log(ctx, a) - } - cmdio.LogString(ctx, "") - } - - if !cmdio.IsPromptSupported(ctx) { - return false, fmt.Errorf("the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") - } - - cmdio.LogString(ctx, "") - approved, err := cmdio.AskYesOrNo(ctx, "Would you like to proceed?") - if err != nil { - return false, err - } - - return approved, nil + // if b.AutoApprove { + // return true, nil + // } + + // tf := b.Terraform + // if tf == nil { + // return false, fmt.Errorf("terraform not initialized") + // } + + // // read plan file + // plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) + // if err != nil { + // return false, err + // } + + // // TODO: Is schema recreation possible? Would that be destructive to the data? + // deleteOrRecreateSchema := make([]terraformlib.Action, 0) + // for _, rc := range plan.ResourceChanges { + // if rc.Change.Actions.Delete() && rc.Type == "databricks_schema" { + // deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ + // Action: "delete", + // ResourceType: rc.Type, + // ResourceName: rc.Name, + // }) + // } + + // if rc.Change.Actions.Replace() && rc.Type == "databricks_schema" { + // deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ + // Action: "recreate", + // ResourceType: rc.Type, + // ResourceName: rc.Name, + // }) + // } + // } + + // // No need for approval if the plan does not include destroying or recreating + // // any schema resources. + // if len(deleteOrRecreateSchema) == 0 { + // return true, nil + // } + + // // TODO: Return early with error here if the `--force-schema (or something)` delete + // // flag is not specified. + // // for _, action := range deleteOrRecreateSchema { + // // cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data will be lost:") + // // } + + // if len(deleteActions) > 0 { + // cmdio.LogString(ctx, "") + // cmdio.LogString(ctx, "The following resources will be deleted:") + // for _, a := range deleteActions { + // cmdio.Log(ctx, a) + // } + // } + + // if len(recreateActions) > 0 { + // cmdio.LogString(ctx, "") + // cmdio.LogString(ctx, "The following resources will be recreated. Note that recreation can be lossy and may lead to lost metadata or data:") + // for _, a := range recreateActions { + // cmdio.Log(ctx, a) + // } + // cmdio.LogString(ctx, "") + // } + + // if !cmdio.IsPromptSupported(ctx) { + // return false, fmt.Errorf("the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") + // } + + // cmdio.LogString(ctx, "") + // approved, err := cmdio.AskYesOrNo(ctx, "Would you like to proceed?") + // if err != nil { + // return false, err + // } + + // return app0roved, nil + return true, nil } // The deploy phase deploys artifacts and resources. From 3127843cd82fd14679958a4c0ff0a8a7e3d40e78 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 17:45:09 +0200 Subject: [PATCH 61/75] cleanup todos --- bundle/config/mutator/process_target_mode.go | 1 - bundle/deploy/terraform/convert.go | 2 -- 2 files changed, 3 deletions(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index d47d271690..fcefa27213 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -112,7 +112,6 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno } } - // TODO: test this manually for i := range r.Schemas { prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" r.Schemas[i].Name = prefix + r.Schemas[i].Name diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 131de656d5..edf1eb60c4 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -239,7 +239,6 @@ func BundleToTerraform(config *config.Root) *schema.Root { // Configure permissions for this resource. if rp := convGrants(src.Grants); rp != nil { - // TODO: test that this works to allocate grants. rp.Schema = fmt.Sprintf("${databricks_schema.%s.id}", k) tfroot.Resource.Grants["schema_"+k] = rp } @@ -452,7 +451,6 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error { src.ModifiedStatus = resources.ModifiedStatusCreated } } - // TODO: Add test for this. for _, src := range config.Resources.Schemas { if src.ModifiedStatus == "" && src.ID == "" { src.ModifiedStatus = resources.ModifiedStatusCreated From 6c8390351b5042c91c72b7971cdac9a19ae5e0f3 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 26 Jul 2024 17:46:08 +0200 Subject: [PATCH 62/75] remove paths from schema resources --- bundle/config/resources/schema.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/bundle/config/resources/schema.go b/bundle/config/resources/schema.go index fe060bc5c1..7ab00495a8 100644 --- a/bundle/config/resources/schema.go +++ b/bundle/config/resources/schema.go @@ -1,7 +1,6 @@ package resources import ( - "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/catalog" ) @@ -16,10 +15,6 @@ type Schema struct { *catalog.CreateSchema - // Path to config file where the resource is defined. All bundle resources - // include this for interpolation purposes. - paths.Paths - ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` } From eeafcb4dac961c68a334fff589380bdcc92b1ede Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 29 Jul 2024 15:55:24 +0200 Subject: [PATCH 63/75] cleanup --- bundle/config/validate/unique_resource_keys.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/bundle/config/validate/unique_resource_keys.go b/bundle/config/validate/unique_resource_keys.go index d9a941fae8..d6212b0acf 100644 --- a/bundle/config/validate/unique_resource_keys.go +++ b/bundle/config/validate/unique_resource_keys.go @@ -4,10 +4,7 @@ import ( "context" "fmt" "slices" -<<<<<<< HEAD -======= "sort" ->>>>>>> origin "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" @@ -112,7 +109,7 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.D Summary: fmt.Sprintf("multiple resources have been defined with the same key: %s", k), Locations: v.locations, Paths: v.paths, -gs }) + }) } return diags From 52889fb588b8f7bc51cf0a4c8385cc1f60f675b6 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 29 Jul 2024 17:17:22 +0200 Subject: [PATCH 64/75] modify schema tag comment --- bundle/config/mutator/process_target_mode.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index fcefa27213..9db97907dd 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -115,7 +115,8 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno for i := range r.Schemas { prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" r.Schemas[i].Name = prefix + r.Schemas[i].Name - // (schemas don't yet support tags) + // HTTP API for schemas doesn't yet support tags. It's only supported in + // the Databricks UI and via the SQL API. } return nil From a2b9a77453e8955d9ccf974d18579213bfa18115 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 29 Jul 2024 18:51:07 +0200 Subject: [PATCH 65/75] prompting done (initial) --- bundle/deploy/terraform/convert.go | 5 + bundle/phases/deploy.go | 142 +++++++++++++---------------- 2 files changed, 70 insertions(+), 77 deletions(-) diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index edf1eb60c4..a3fc496a0c 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -237,6 +237,11 @@ func BundleToTerraform(config *config.Root) *schema.Root { conv(src, &dst) tfroot.Resource.Schema[k] = &dst + // We always set force destroy as it allows DABs to manage the lifecycle + // of the schema. It's the responsibility of the CLI to ensure the user + // is adequately warned when they try to delete a UC schema. + dst.ForceDestroy = true + // Configure permissions for this resource. if rp := convGrants(src.Grants); rp != nil { rp.Schema = fmt.Sprintf("${databricks_schema.%s.id}", k) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 45be0411f3..81665a0471 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -2,6 +2,7 @@ package phases import ( "context" + "fmt" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" @@ -16,85 +17,73 @@ import ( "github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/python" "github.com/databricks/cli/bundle/scripts" + "github.com/databricks/cli/libs/cmdio" + terraformlib "github.com/databricks/cli/libs/terraform" ) func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { - // if b.AutoApprove { - // return true, nil - // } - - // tf := b.Terraform - // if tf == nil { - // return false, fmt.Errorf("terraform not initialized") - // } - - // // read plan file - // plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) - // if err != nil { - // return false, err - // } - - // // TODO: Is schema recreation possible? Would that be destructive to the data? - // deleteOrRecreateSchema := make([]terraformlib.Action, 0) - // for _, rc := range plan.ResourceChanges { - // if rc.Change.Actions.Delete() && rc.Type == "databricks_schema" { - // deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ - // Action: "delete", - // ResourceType: rc.Type, - // ResourceName: rc.Name, - // }) - // } - - // if rc.Change.Actions.Replace() && rc.Type == "databricks_schema" { - // deleteOrRecreateSchema = append(deleteOrRecreateSchema, terraformlib.Action{ - // Action: "recreate", - // ResourceType: rc.Type, - // ResourceName: rc.Name, - // }) - // } - // } - - // // No need for approval if the plan does not include destroying or recreating - // // any schema resources. - // if len(deleteOrRecreateSchema) == 0 { - // return true, nil - // } - - // // TODO: Return early with error here if the `--force-schema (or something)` delete - // // flag is not specified. - // // for _, action := range deleteOrRecreateSchema { - // // cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data will be lost:") - // // } - - // if len(deleteActions) > 0 { - // cmdio.LogString(ctx, "") - // cmdio.LogString(ctx, "The following resources will be deleted:") - // for _, a := range deleteActions { - // cmdio.Log(ctx, a) - // } - // } - - // if len(recreateActions) > 0 { - // cmdio.LogString(ctx, "") - // cmdio.LogString(ctx, "The following resources will be recreated. Note that recreation can be lossy and may lead to lost metadata or data:") - // for _, a := range recreateActions { - // cmdio.Log(ctx, a) - // } - // cmdio.LogString(ctx, "") - // } - - // if !cmdio.IsPromptSupported(ctx) { - // return false, fmt.Errorf("the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") - // } - - // cmdio.LogString(ctx, "") - // approved, err := cmdio.AskYesOrNo(ctx, "Would you like to proceed?") - // if err != nil { - // return false, err - // } - - // return app0roved, nil - return true, nil + if b.AutoApprove { + return true, nil + } + + tf := b.Terraform + if tf == nil { + return false, fmt.Errorf("terraform not initialized") + } + + // read plan file + plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) + if err != nil { + return false, err + } + + actions := make([]terraformlib.Action, 0) + for _, rc := range plan.ResourceChanges { + var actionType terraformlib.ActionType + + switch { + case rc.Change.Actions.Delete(): + actionType = terraformlib.ActionTypeDelete + case rc.Change.Actions.Replace(): + actionType = terraformlib.ActionTypeRecreate + default: + // We don't need a prompt for non-destructive actions like creating + // or updating a schema. + continue + } + + actions = append(actions, terraformlib.Action{ + Action: actionType, + ResourceType: rc.Type, + ResourceName: rc.Name, + }) + } + + // No restricted actions planned. No need for approval. + if len(actions) == 0 { + return true, nil + } + + cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data may be lost:") + for _, action := range actions { + cmdio.Log(ctx, action) + } + + if b.AutoApprove { + return true, nil + } + + if !cmdio.IsPromptSupported(ctx) { + return false, fmt.Errorf("the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") + } + + cmdio.LogString(ctx, "") + approved, err := cmdio.AskYesOrNo(ctx, "Would you like to proceed?") + if err != nil { + return false, err + } + + return approved, nil } // The deploy phase deploys artifacts and resources. @@ -132,7 +121,6 @@ func Deploy() bundle.Mutator { terraform.Write(), terraform.CheckRunningResource(), terraform.Plan(terraform.PlanGoal("deploy")), - terraform.Load(), bundle.If( approvalForDeploy, deployCore, From fc031e7881543b1a1f468440dde1d41bbb9d5280 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 29 Jul 2024 20:11:27 +0200 Subject: [PATCH 66/75] fix lock releases and setting force_destroy --- bundle/deploy/terraform/convert.go | 18 ------------------ .../deploy/terraform/tfdyn/convert_schema.go | 10 +++++++++- bundle/phases/deploy.go | 5 +++-- 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index a3fc496a0c..f13c241cee 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -231,24 +231,6 @@ func BundleToTerraform(config *config.Root) *schema.Root { tfroot.Resource.QualityMonitor[k] = &dst } - for k, src := range config.Resources.Schemas { - noResources = false - var dst schema.ResourceSchema - conv(src, &dst) - tfroot.Resource.Schema[k] = &dst - - // We always set force destroy as it allows DABs to manage the lifecycle - // of the schema. It's the responsibility of the CLI to ensure the user - // is adequately warned when they try to delete a UC schema. - dst.ForceDestroy = true - - // Configure permissions for this resource. - if rp := convGrants(src.Grants); rp != nil { - rp.Schema = fmt.Sprintf("${databricks_schema.%s.id}", k) - tfroot.Resource.Grants["schema_"+k] = rp - } - } - // We explicitly set "resource" to nil to omit it from a JSON encoding. // This is required because the terraform CLI requires >= 1 resources defined // if the "resource" property is used in a .tf.json file. diff --git a/bundle/deploy/terraform/tfdyn/convert_schema.go b/bundle/deploy/terraform/tfdyn/convert_schema.go index 0b6d904152..b5e6a88c0d 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema.go @@ -12,11 +12,19 @@ import ( func convertSchemaResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) { // Normalize the output value to the target schema. - vout, diags := convert.Normalize(schema.ResourceSchema{}, vin) + v, diags := convert.Normalize(schema.ResourceSchema{}, vin) for _, diag := range diags { log.Debugf(ctx, "schema normalization diagnostic: %s", diag.Summary) } + // We always set force destroy as it allows DABs to manage the lifecycle + // of the schema. It's the responsibility of the CLI to ensure the user + // is adequately warned when they try to delete a UC schema. + vout, err := dyn.SetByPath(v, dyn.MustPathFromString("force_destroy"), dyn.V(true)) + if err != nil { + return dyn.InvalidValue, err + } + return vout, nil } diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 81665a0471..64090f4f66 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -126,10 +126,11 @@ func Deploy() bundle.Mutator { deployCore, bundle.LogString("Deployment cancelled!"), ), - lock.Release(lock.GoalDeploy), ), - scripts.Execute(config.ScriptPostDeploy), + // TODO: Ensure releasing locks and post deploy scripts work as expected. + lock.Release(lock.GoalDeploy), ), + scripts.Execute(config.ScriptPostDeploy), ) return newPhase( From 5575ca8f1cb9bcca513df455e4b6921a18552356 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 30 Jul 2024 12:41:00 +0200 Subject: [PATCH 67/75] fix TestConvertSchema --- bundle/deploy/terraform/tfdyn/convert_schema_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go index 14281fa330..2efbf3e430 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -54,7 +54,8 @@ func TestConvertSchema(t *testing.T) { "k1": "v1", "k2": "v2", }, - "storage_root": "root", + "force_destroy": true, + "storage_root": "root", }, out.Schema["my_schema"]) // Assert equality on the grants From aa5f4fa73e95fdd2f4438f0eefd59549c9faf8dc Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 30 Jul 2024 12:45:33 +0200 Subject: [PATCH 68/75] cleanup --- bundle/phases/deploy.go | 1 - 1 file changed, 1 deletion(-) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 64090f4f66..5570e7879e 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -127,7 +127,6 @@ func Deploy() bundle.Mutator { bundle.LogString("Deployment cancelled!"), ), ), - // TODO: Ensure releasing locks and post deploy scripts work as expected. lock.Release(lock.GoalDeploy), ), scripts.Execute(config.ScriptPostDeploy), From 9752431839cdd251383de09707e2be628c08543a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 30 Jul 2024 15:22:03 +0200 Subject: [PATCH 69/75] support for auto-approve --- cmd/bundle/deploy.go | 5 +- .../uc_schema/template/databricks.yml.tmpl | 12 +-- .../uc_schema/template/schema.yml.tmpl | 13 ++++ internal/bundle/deploy_test.go | 73 +++++++++++++++---- internal/bundle/helpers.go | 2 +- 5 files changed, 84 insertions(+), 21 deletions(-) create mode 100644 internal/bundle/bundles/uc_schema/template/schema.yml.tmpl diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 1232c8de51..1166875ab3 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -24,10 +24,12 @@ func newDeployCommand() *cobra.Command { var forceLock bool var failOnActiveRuns bool var computeID string + var autoApprove bool cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation.") cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") cmd.Flags().BoolVar(&failOnActiveRuns, "fail-on-active-runs", false, "Fail if there are running jobs or pipelines in the deployment.") cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") + cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals that might be required for deployment.") cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() @@ -37,10 +39,11 @@ func newDeployCommand() *cobra.Command { bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics { b.Config.Bundle.Force = force b.Config.Bundle.Deployment.Lock.Force = forceLock + b.AutoApprove = autoApprove + if cmd.Flag("compute-id").Changed { b.Config.Bundle.ComputeID = computeID } - if cmd.Flag("fail-on-active-runs").Changed { b.Config.Bundle.Deployment.FailOnActiveRuns = failOnActiveRuns } diff --git a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl index be36e91a61..961af25e86 100644 --- a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl @@ -10,10 +10,10 @@ resources: path: ./nb.sql development: true catalog: main - target: ${resources.schemas.bar.id} - schemas: - bar: - name: test-schema-{{.unique_id}} - catalog_name: main - comment: This schema was created from DABs +include: + - "*.yml" + +targets: + development: + default: true diff --git a/internal/bundle/bundles/uc_schema/template/schema.yml.tmpl b/internal/bundle/bundles/uc_schema/template/schema.yml.tmpl new file mode 100644 index 0000000000..50067036e6 --- /dev/null +++ b/internal/bundle/bundles/uc_schema/template/schema.yml.tmpl @@ -0,0 +1,13 @@ +resources: + schemas: + bar: + name: test-schema-{{.unique_id}} + catalog_name: main + comment: This schema was created from DABs + +targets: + development: + resources: + pipelines: + foo: + target: ${resources.schemas.bar.id} diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index 161ecc8b91..4301b8ed34 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -1,21 +1,26 @@ package bundle import ( + "context" "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" "testing" "github.com/databricks/cli/internal/acc" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/databricks-sdk-go/service/files" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccBundleDeployUcSchema(t *testing.T) { - ctx, wt := acc.UcWorkspaceTest(t) - w := wt.W - - uniqueId := uuid.New().String() +func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.WorkspaceClient, uniqueId string) string { bundleRoot, err := initTestTemplate(t, ctx, "uc_schema", map[string]any{ "unique_id": uniqueId, }) @@ -29,27 +34,69 @@ func TestAccBundleDeployUcSchema(t *testing.T) { }) // Assert the schema is created - schemaName := "main.test-schema-" + uniqueId - schema, err := w.Schemas.GetByFullName(ctx, schemaName) + catalogName := "main" + schemaName := "test-schema-" + uniqueId + schema, err := w.Schemas.GetByFullName(ctx, strings.Join([]string{catalogName, schemaName}, ".")) require.NoError(t, err) - assert.Equal(t, schema.FullName, schemaName) - assert.Equal(t, schema.Comment, "This schema was created from DABs") + require.Equal(t, strings.Join([]string{catalogName, schemaName}, "."), schema.FullName) + require.Equal(t, "This schema was created from DABs", schema.Comment) // Assert the pipeline is created pipelineName := "test-pipeline-" + uniqueId pipeline, err := w.Pipelines.GetByName(ctx, pipelineName) require.NoError(t, err) - assert.Equal(t, pipeline.Name, pipelineName) + require.Equal(t, pipelineName, pipeline.Name) id := pipeline.PipelineId // Assert the pipeline uses the schema i, err := w.Pipelines.GetByPipelineId(ctx, id) require.NoError(t, err) - assert.Equal(t, i.Spec.Catalog, "main") - assert.Equal(t, i.Spec.Target, "test-schema-"+uniqueId) + require.Equal(t, catalogName, i.Spec.Catalog) + require.Equal(t, strings.Join([]string{catalogName, schemaName}, "."), i.Spec.Target) + + // Create a volume in the schema, and add a file to it. This ensure that the + // schema as some data in it and deletion will fail unless the generated + // terraform configuration has force_destroy set to true. + volumeName := "test-volume-" + uniqueId + volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{ + CatalogName: catalogName, + SchemaName: schemaName, + Name: volumeName, + VolumeType: catalog.VolumeTypeManaged, + }) + require.NoError(t, err) + require.Equal(t, volume.Name, volumeName) + + fileName := "test-file-" + uniqueId + err = w.Files.Upload(ctx, files.UploadRequest{ + Contents: io.NopCloser(strings.NewReader("Hello, world!")), + FilePath: fmt.Sprintf("/Volumes/%s/%s/%s/%s", catalogName, schemaName, volumeName, fileName), + }) + require.NoError(t, err) + + return bundleRoot +} + +func TestAccBundleDeployUcSchema(t *testing.T) { + ctx, wt := acc.UcWorkspaceTest(t) + w := wt.W + + uniqueId := uuid.New().String() + schemaName := "test-schema-" + uniqueId + catalogName := "main" + + bundleRoot := setupUcSchemaBundle(t, ctx, w, uniqueId) + + // Remove the UC schema from the resource configuration. + err := os.Remove(filepath.Join(bundleRoot, "schema.yml")) + require.NoError(t, err) + + // Redeploy the bundle + err = deployBundle(t, ctx, bundleRoot) + require.NoError(t, err) // Assert the schema is deleted - _, err = w.Schemas.GetByFullName(ctx, schemaName) + _, err = w.Schemas.GetByFullName(ctx, strings.Join([]string{catalogName, schemaName}, ".")) apiErr := &apierr.APIError{} assert.True(t, errors.As(err, &apiErr)) assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode) diff --git a/internal/bundle/helpers.go b/internal/bundle/helpers.go index c33c153313..1910a0148f 100644 --- a/internal/bundle/helpers.go +++ b/internal/bundle/helpers.go @@ -64,7 +64,7 @@ func validateBundle(t *testing.T, ctx context.Context, path string) ([]byte, err func deployBundle(t *testing.T, ctx context.Context, path string) error { t.Setenv("BUNDLE_ROOT", path) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") + c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve") _, _, err := c.Run() return err } From caf318d291fb915e72cfb9405826499dad51956d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 30 Jul 2024 15:35:31 +0200 Subject: [PATCH 70/75] add test for non-tty error --- internal/bundle/deploy_test.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index 4301b8ed34..c6820ea520 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -10,6 +10,8 @@ import ( "strings" "testing" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" @@ -101,3 +103,23 @@ func TestAccBundleDeployUcSchema(t *testing.T) { assert.True(t, errors.As(err, &apiErr)) assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode) } + +func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { + ctx, wt := acc.UcWorkspaceTest(t) + w := wt.W + + uniqueId := uuid.New().String() + bundleRoot := setupUcSchemaBundle(t, ctx, w, uniqueId) + + // Remove the UC schema from the resource configuration. + err := os.Remove(filepath.Join(bundleRoot, "schema.yml")) + require.NoError(t, err) + + // Redeploy the bundle + t.Setenv("BUNDLE_ROOT", bundleRoot) + t.Setenv("TERM", "dumb") + c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") + stdout, _, err := c.Run() + assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) + assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") +} From 7969a7766e8f8449d58a3009e86ae6a9de606630 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 30 Jul 2024 15:44:51 +0200 Subject: [PATCH 71/75] undo sorted pairs change --- internal/bundle/deploy_test.go | 4 ++-- libs/dyn/mapping.go | 8 +------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index c6820ea520..3da885705d 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -56,8 +56,8 @@ func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.Worksp require.Equal(t, catalogName, i.Spec.Catalog) require.Equal(t, strings.Join([]string{catalogName, schemaName}, "."), i.Spec.Target) - // Create a volume in the schema, and add a file to it. This ensure that the - // schema as some data in it and deletion will fail unless the generated + // Create a volume in the schema, and add a file to it. This ensures that the + // schema has some data in it and deletion will fail unless the generated // terraform configuration has force_destroy set to true. volumeName := "test-volume-" + uniqueId volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{ diff --git a/libs/dyn/mapping.go b/libs/dyn/mapping.go index 551571d18a..f9f2d2e97e 100644 --- a/libs/dyn/mapping.go +++ b/libs/dyn/mapping.go @@ -4,7 +4,6 @@ import ( "fmt" "maps" "slices" - "sort" ) // Pair represents a single key-value pair in a Mapping. @@ -50,12 +49,7 @@ func newMappingFromGoMap(vin map[string]Value) Mapping { // Pairs returns all the key-value pairs in the Mapping. The pairs are sorted by // their key in lexicographic order. func (m Mapping) Pairs() []Pair { - pairs := make([]Pair, len(m.pairs)) - copy(pairs, m.pairs) - sort.Slice(pairs, func(i, j int) bool { - return pairs[i].Key.MustString() < pairs[j].Key.MustString() - }) - return pairs + return m.pairs } // Len returns the number of key-value pairs in the Mapping. From 89f3e304c8106ebb36cca1038142477d34863148 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 31 Jul 2024 12:50:37 +0200 Subject: [PATCH 72/75] address comments --- bundle/phases/deploy.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 5570e7879e..52afebb0a7 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -22,9 +22,6 @@ import ( ) func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { - if b.AutoApprove { - return true, nil - } tf := b.Terraform if tf == nil { @@ -39,6 +36,11 @@ func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { actions := make([]terraformlib.Action, 0) for _, rc := range plan.ResourceChanges { + // We only care about destructive actions on UC schema resources. + if rc.Type != "databricks_schema" { + continue + } + var actionType terraformlib.ActionType switch { From e3826e894aac6bf5e78f011af9bb4cd7738145c7 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 31 Jul 2024 14:05:58 +0200 Subject: [PATCH 73/75] rename approvalForDeploy -> approvalForUcSchemaDelete --- bundle/phases/deploy.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 52afebb0a7..c68153f2df 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -21,8 +21,7 @@ import ( terraformlib "github.com/databricks/cli/libs/terraform" ) -func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { - +func approvalForUcSchemaDelete(ctx context.Context, b *bundle.Bundle) (bool, error) { tf := b.Terraform if tf == nil { return false, fmt.Errorf("terraform not initialized") @@ -124,7 +123,7 @@ func Deploy() bundle.Mutator { terraform.CheckRunningResource(), terraform.Plan(terraform.PlanGoal("deploy")), bundle.If( - approvalForDeploy, + approvalForUcSchemaDelete, deployCore, bundle.LogString("Deployment cancelled!"), ), From 03d5f1daf32ab27b8dc12c8a82447697798e8451 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 31 Jul 2024 14:18:55 +0200 Subject: [PATCH 74/75] Use precomputed terraform plan for `bundle deploy` --- bundle/deploy/terraform/apply.go | 21 ++++++++------ bundle/deploy/terraform/destroy.go | 46 ------------------------------ bundle/phases/deploy.go | 5 +++- bundle/phases/destroy.go | 2 +- 4 files changed, 17 insertions(+), 57 deletions(-) delete mode 100644 bundle/deploy/terraform/destroy.go diff --git a/bundle/deploy/terraform/apply.go b/bundle/deploy/terraform/apply.go index e4acda852f..e52d0ca8f1 100644 --- a/bundle/deploy/terraform/apply.go +++ b/bundle/deploy/terraform/apply.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" "github.com/hashicorp/terraform-exec/tfexec" @@ -17,28 +16,32 @@ func (w *apply) Name() string { } func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + // return early if plan is empty + if b.Plan.IsEmpty { + log.Debugf(ctx, "No changes in plan. Skipping terraform apply.") + return nil + } + tf := b.Terraform if tf == nil { return diag.Errorf("terraform not initialized") } - cmdio.LogString(ctx, "Deploying resources...") - - err := tf.Init(ctx, tfexec.Upgrade(true)) - if err != nil { - return diag.Errorf("terraform init: %v", err) + if b.Plan.Path == "" { + return diag.Errorf("no plan found") } - err = tf.Apply(ctx) + // Apply terraform according to the computed plan + err := tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.Path)) if err != nil { return diag.Errorf("terraform apply: %v", err) } - log.Infof(ctx, "Resource deployment completed") + log.Infof(ctx, "terraform apply completed") return nil } -// Apply returns a [bundle.Mutator] that runs the equivalent of `terraform apply` +// Apply returns a [bundle.Mutator] that runs the equivalent of `terraform apply ./plan` // from the bundle's ephemeral working directory for Terraform. func Apply() bundle.Mutator { return &apply{} diff --git a/bundle/deploy/terraform/destroy.go b/bundle/deploy/terraform/destroy.go deleted file mode 100644 index 9c63a0b379..0000000000 --- a/bundle/deploy/terraform/destroy.go +++ /dev/null @@ -1,46 +0,0 @@ -package terraform - -import ( - "context" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/diag" - "github.com/databricks/cli/libs/log" - "github.com/hashicorp/terraform-exec/tfexec" -) - -type destroy struct{} - -func (w *destroy) Name() string { - return "terraform.Destroy" -} - -func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - // return early if plan is empty - if b.Plan.IsEmpty { - log.Debugf(ctx, "No resources to destroy in plan. Skipping destroy.") - return nil - } - - tf := b.Terraform - if tf == nil { - return diag.Errorf("terraform not initialized") - } - - if b.Plan.Path == "" { - return diag.Errorf("no plan found") - } - - // Apply terraform according to the computed destroy plan - err := tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.Path)) - if err != nil { - return diag.Errorf("terraform destroy: %v", err) - } - return nil -} - -// Destroy returns a [bundle.Mutator] that runs the conceptual equivalent of -// `terraform destroy ./plan` from the bundle's ephemeral working directory for Terraform. -func Destroy() bundle.Mutator { - return &destroy{} -} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index c68153f2df..6929f74baf 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -92,7 +92,10 @@ func Deploy() bundle.Mutator { // Core mutators that CRUD resources and modify deployment state. These // mutators need informed consent if they are potentially destructive. deployCore := bundle.Defer( - terraform.Apply(), + bundle.Seq( + bundle.LogString("Deploying resources..."), + terraform.Apply(), + ), bundle.Seq( terraform.StatePush(), terraform.Load(), diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index bd99af789b..01b2766708 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -82,7 +82,7 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) { func Destroy() bundle.Mutator { // Core destructive mutators for destroy. These require informed user consent. destroyCore := bundle.Seq( - terraform.Destroy(), + terraform.Apply(), terraform.StatePush(), files.Delete(), bundle.LogString("Destroy complete!"), From 223d2e89b78200b874dd92044602eb28da5dbb2d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 31 Jul 2024 15:21:37 +0200 Subject: [PATCH 75/75] handle tf state being absent --- bundle/deploy/terraform/state_push.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/bundle/deploy/terraform/state_push.go b/bundle/deploy/terraform/state_push.go index b50983bd4b..6cdde13716 100644 --- a/bundle/deploy/terraform/state_push.go +++ b/bundle/deploy/terraform/state_push.go @@ -2,6 +2,8 @@ package terraform import ( "context" + "errors" + "io/fs" "os" "path/filepath" @@ -34,6 +36,12 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic // Expect the state file to live under dir. local, err := os.Open(filepath.Join(dir, TerraformStateFileName)) + if errors.Is(err, fs.ErrNotExist) { + // The state file can be absent if terraform apply is skipped because + // there are no changes to apply in the plan. + log.Debugf(ctx, "Local terraform state file does not exist.") + return nil + } if err != nil { return diag.FromErr(err) }