diff --git a/bundle/lsp/completion.go b/bundle/lsp/completion.go new file mode 100644 index 0000000000..d544c832c7 --- /dev/null +++ b/bundle/lsp/completion.go @@ -0,0 +1,407 @@ +package lsp + +import ( + "fmt" + "sort" + "strings" + + "github.com/databricks/cli/libs/dyn" +) + +const ( + completionKindVariable = 6 // LSP CompletionItemKind.Variable + completionKindField = 5 // LSP CompletionItemKind.Field + completionKindModule = 9 // LSP CompletionItemKind.Module + completionKindValue = 12 // LSP CompletionItemKind.Value +) + +// CompletionContext holds the parsed state of a partial interpolation at the cursor. +type CompletionContext struct { + // Start is the character offset of the `${` opening in the line. + Start int + // PartialPath is the text between `${` and the cursor (e.g., "var.clust"). + PartialPath string +} + +// FindCompletionContext locates a partial `${...` interpolation at the cursor position. +// Returns the context and true if the cursor is inside an incomplete interpolation. +func FindCompletionContext(lines []string, pos Position) (CompletionContext, bool) { + if pos.Line < 0 || pos.Line >= len(lines) { + return CompletionContext{}, false + } + + line := lines[pos.Line] + if pos.Character > len(line) { + return CompletionContext{}, false + } + + // Look backwards from cursor for the nearest unmatched "${". + textBefore := line[:pos.Character] + dollarBrace := strings.LastIndex(textBefore, "${") + if dollarBrace < 0 { + return CompletionContext{}, false + } + + // Make sure there's no closing "}" between "${" and cursor. + afterOpen := textBefore[dollarBrace+2:] + if strings.Contains(afterOpen, "}") { + return CompletionContext{}, false + } + + return CompletionContext{ + Start: dollarBrace, + PartialPath: afterOpen, + }, true +} + +// CompleteInterpolation returns completion items for a partial interpolation path. +// editRange is the range to replace (from after "${" to cursor position). +func CompleteInterpolation(tree dyn.Value, partial string, editRange *Range) []CompletionItem { + if !tree.IsValid() { + return nil + } + + // Rewrite "var" / "var." shorthand to "variables" / "variables." for tree lookup. + lookupPath := partial + isVarShorthand := strings.HasPrefix(partial, "var.") || partial == "var" + if partial == "var" { + lookupPath = "variables." + } else if isVarShorthand { + lookupPath = "variables." + strings.TrimPrefix(partial, "var.") + } + + // Check if the user is typing an index like "foo.bar[" or "foo.bar[0". + // In that case, navigate to "foo.bar" and suggest indices. + if basePath, ok := parsePartialIndex(lookupPath); ok { + return completeSequenceIndices(tree, basePath, partial, isVarShorthand, editRange) + } + + // Split into parent path and the prefix the user is currently typing. + parentPath, prefix := splitPartialPath(lookupPath) + + // Navigate to the parent node in the tree. + parent := tree + parentFound := true + if parentPath != "" { + p, err := dyn.NewPathFromString(parentPath) + if err != nil { + parentFound = false + } else { + parent, err = dyn.GetByPath(tree, p) + if err != nil { + parentFound = false + } + } + } + + // If parent is a sequence, suggest indexed access (e.g., path[0], path[1]). + if parentFound && parent.Kind() == dyn.KindSequence { + return completeSequenceIndices(tree, parentPath, partial, isVarShorthand, editRange) + } + + // Collect child keys from the parent map. + var items []CompletionItem + if parentFound && parent.Kind() == dyn.KindMap { + m, ok := parent.AsMap() + if ok { + for _, pair := range m.Pairs() { + key := pair.Key.MustString() + if prefix != "" && !strings.HasPrefix(key, prefix) { + continue + } + + child := pair.Value + + // Expand sequences inline: instead of showing "tasks" (list), + // show "tasks[0]", "tasks[1]", etc. directly. + if child.Kind() == dyn.KindSequence { + seq, ok := child.AsSequence() + if ok { + basePath := buildDisplayPath(parentPath, key, isVarShorthand) + for i, elem := range seq { + indexedPath := fmt.Sprintf("%s[%d]", basePath, i) + // Use dot-separated filter text so VSCode's completion + // engine can match it (brackets confuse the fuzzy matcher). + filterPath := fmt.Sprintf("%s.%d", basePath, i) + kind, detail := classifyValue(elem) + item := CompletionItem{ + Label: indexedPath, + Kind: kind, + Detail: detail, + FilterText: filterPath, + } + applyTextEdit(&item, indexedPath, editRange) + items = append(items, item) + } + } + continue + } + + displayPath := buildDisplayPath(parentPath, key, isVarShorthand) + kind, detail := classifyValue(child) + + item := CompletionItem{ + Label: displayPath, + Kind: kind, + Detail: detail, + FilterText: displayPath, + } + applyTextEdit(&item, displayPath, editRange) + items = append(items, item) + } + } + } + + // Merge computed keys that match the partial path. + items = mergeComputedItems(items, partial, editRange) + + if len(items) == 0 { + return nil + } + + sort.Slice(items, func(i, j int) bool { + return items[i].Label < items[j].Label + }) + return items +} + +// mergeComputedItems appends computed completion items that don't duplicate existing tree items. +func mergeComputedItems(items []CompletionItem, partial string, editRange *Range) []CompletionItem { + computed := computedCompletions(partial, editRange) + existing := make(map[string]bool, len(items)) + for _, it := range items { + existing[it.Label] = true + } + for _, c := range computed { + if !existing[c.Label] { + items = append(items, c) + } + } + return items +} + +// completeSequenceIndices suggests [0], [1], ... for a sequence node. +func completeSequenceIndices(tree dyn.Value, seqPath, partial string, isVarShorthand bool, editRange *Range) []CompletionItem { + node := tree + if seqPath != "" { + p, err := dyn.NewPathFromString(seqPath) + if err != nil { + return nil + } + node, err = dyn.GetByPath(tree, p) + if err != nil { + return nil + } + } + + seq, ok := node.AsSequence() + if !ok { + return nil + } + + // Build the display prefix (rewrite variables back to var shorthand). + displayPrefix := seqPath + if isVarShorthand { + displayPrefix = rewriteToVarShorthand(seqPath) + } + + var items []CompletionItem + for i, elem := range seq { + displayPath := fmt.Sprintf("%s[%d]", displayPrefix, i) + // Use dot-separated filter text so VSCode's completion + // engine can match it (brackets confuse the fuzzy matcher). + filterPath := fmt.Sprintf("%s.%d", displayPrefix, i) + kind, detail := classifyValue(elem) + + item := CompletionItem{ + Label: displayPath, + Kind: kind, + Detail: detail, + FilterText: filterPath, + } + applyTextEdit(&item, displayPath, editRange) + items = append(items, item) + } + return items +} + +// parsePartialIndex checks if the path ends with a partial index like "foo.bar[" or "foo.bar[1". +// Returns the base path ("foo.bar") and true if so. +func parsePartialIndex(path string) (string, bool) { + bracketIdx := strings.LastIndex(path, "[") + if bracketIdx < 0 { + return "", false + } + + // Only match if "[" is the last bracket and there's no closing "]" after it. + after := path[bracketIdx:] + if strings.Contains(after, "]") { + return "", false + } + + return path[:bracketIdx], true +} + +// TopLevelCompletions returns completions for when the user just typed "${" with no path yet. +func TopLevelCompletions(tree dyn.Value, editRange *Range) []CompletionItem { + items := CompleteInterpolation(tree, "", editRange) + + // Add "var" shorthand if "variables" exists in the tree. + vars := tree.Get("variables") + if vars.Kind() == dyn.KindMap { + item := CompletionItem{ + Label: "var", + Kind: completionKindVariable, + Detail: "variable shorthand", + FilterText: "var", + } + applyTextEdit(&item, "var", editRange) + items = append(items, item) + } + + sort.Slice(items, func(i, j int) bool { + return items[i].Label < items[j].Label + }) + return items +} + +// splitPartialPath splits "resources.jobs.my_j" into parent="resources.jobs" and prefix="my_j". +// If there's no dot, parent="" and prefix is the whole string. +// Handles paths with index suffixes like "foo.bar[0].baz" correctly. +func splitPartialPath(partial string) (parent, prefix string) { + idx := strings.LastIndex(partial, ".") + if idx < 0 { + return "", partial + } + return partial[:idx], partial[idx+1:] +} + +// buildDisplayPath constructs the full display path, handling var shorthand. +func buildDisplayPath(parentPath, key string, isVarShorthand bool) string { + if parentPath == "" { + return key + } + if isVarShorthand { + return rewriteToVarShorthand(parentPath) + "." + key + } + return parentPath + "." + key +} + +// rewriteToVarShorthand rewrites a "variables..." path back to "var..." for display. +func rewriteToVarShorthand(path string) string { + if path == "variables" { + return "var" + } + if strings.HasPrefix(path, "variables.") { + return "var." + strings.TrimPrefix(path, "variables.") + } + if strings.HasPrefix(path, "variables[") { + return "var" + strings.TrimPrefix(path, "variables") + } + return path +} + +// classifyValue returns the completion kind and detail string for a dyn.Value. +func classifyValue(v dyn.Value) (int, string) { + switch v.Kind() { + case dyn.KindMap: + return completionKindModule, "map" + case dyn.KindSequence: + return completionKindModule, "list" + case dyn.KindString: + if s, ok := v.AsString(); ok { + return completionKindValue, s + } + case dyn.KindBool: + if b, ok := v.AsBool(); ok { + if b { + return completionKindValue, "true" + } + return completionKindValue, "false" + } + case dyn.KindInt, dyn.KindFloat: + return completionKindValue, "number" + case dyn.KindInvalid, dyn.KindNil, dyn.KindTime: + // These kinds are not expected in bundle YAML but are handled + // for exhaustiveness. + } + return completionKindField, "" +} + +// computedKeys are keys that exist at runtime but are not present in YAML config files. +// Users can reference these in ${...} interpolation expressions. +var computedKeys = []string{ + "bundle.target", + "bundle.environment", + "bundle.git.branch", + "bundle.git.origin_url", + "bundle.git.commit", + "bundle.git.actual_branch", + "bundle.git.bundle_root_path", + "workspace.current_user.short_name", + "workspace.current_user.user_name", + "workspace.root_path", + "workspace.file_path", + "workspace.resource_path", + "workspace.artifact_path", + "workspace.state_path", +} + +// computedCompletions returns completion items for computed keys matching the partial path prefix. +func computedCompletions(partial string, editRange *Range) []CompletionItem { + var items []CompletionItem + for _, key := range computedKeys { + if partial != "" && !strings.HasPrefix(key, partial) { + continue + } + // Only show exact-depth children: if partial is "bundle.", show "bundle.target" + // but not "bundle.git.commit" (that requires "bundle.git." first). + suffix := strings.TrimPrefix(key, partial) + if dotIdx := strings.Index(suffix, "."); dotIdx >= 0 { + // The computed key has more depth; show the intermediate segment instead. + // e.g., for partial="bundle." and key="bundle.git.commit", show "bundle.git". + intermediate := partial + suffix[:dotIdx] + // Check if we already added this intermediate. + found := false + for _, it := range items { + if it.Label == intermediate { + found = true + break + } + } + if !found { + item := CompletionItem{ + Label: intermediate, + Kind: completionKindModule, + Detail: "computed", + FilterText: intermediate, + } + applyTextEdit(&item, intermediate, editRange) + items = append(items, item) + } + continue + } + + item := CompletionItem{ + Label: key, + Kind: completionKindVariable, + Detail: "computed", + FilterText: key, + } + applyTextEdit(&item, key, editRange) + items = append(items, item) + } + return items +} + +// applyTextEdit sets either a TextEdit or InsertText on the completion item. +func applyTextEdit(item *CompletionItem, text string, editRange *Range) { + if editRange != nil { + item.TextEdit = &TextEdit{ + Range: *editRange, + NewText: text, + } + } else { + item.InsertText = text + } +} diff --git a/bundle/lsp/completion_test.go b/bundle/lsp/completion_test.go new file mode 100644 index 0000000000..574a3e567a --- /dev/null +++ b/bundle/lsp/completion_test.go @@ -0,0 +1,442 @@ +package lsp_test + +import ( + "strings" + "testing" + + "github.com/databricks/cli/bundle/lsp" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFindCompletionContextAfterDollarBrace(t *testing.T) { + lines := []string{` name: "${"`} + // Cursor is right after "${" which starts at index 9 ($), so partial starts at 11. + ctx, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 11}) + require.True(t, ok) + assert.Equal(t, 9, ctx.Start) + assert.Equal(t, "", ctx.PartialPath) +} + +func TestFindCompletionContextPartialPath(t *testing.T) { + lines := []string{` name: "${var.clust"`} + ctx, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 20}) + require.True(t, ok) + assert.Equal(t, "var.clust", ctx.PartialPath) +} + +func TestFindCompletionContextAfterDot(t *testing.T) { + // 0123456789012345678901234567 + lines := []string{` name: "${resources.jobs."`} + // "${" starts at 9, partial path starts at 11, cursor at 26 (after trailing dot) + ctx, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 26}) + require.True(t, ok) + assert.Equal(t, "resources.jobs.", ctx.PartialPath) +} + +func TestFindCompletionContextNotInInterpolation(t *testing.T) { + lines := []string{` name: "hello world"`} + _, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 15}) + assert.False(t, ok) +} + +func TestFindCompletionContextClosedBrace(t *testing.T) { + // 01234567890123456789 + lines := []string{` name: "${var.foo}"`} + // Cursor at 19, after the closing "}" + _, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 19}) + assert.False(t, ok) +} + +func TestCompleteInterpolationTopLevelKeys(t *testing.T) { + yaml := ` +bundle: + name: test +variables: + foo: + default: bar +resources: + jobs: + my_job: + name: hello +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "bundle") + assert.Contains(t, labels, "variables") + assert.Contains(t, labels, "resources") +} + +func TestCompleteInterpolationVarShorthand(t *testing.T) { + yaml := ` +variables: + cluster_id: + default: "abc" + cluster_name: + default: "my-cluster" + warehouse_id: + default: "def" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "var.", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "var.cluster_id") + assert.Contains(t, labels, "var.cluster_name") + assert.Contains(t, labels, "var.warehouse_id") +} + +func TestCompleteInterpolationVarShorthandWithPrefix(t *testing.T) { + yaml := ` +variables: + cluster_id: + default: "abc" + cluster_name: + default: "my-cluster" + warehouse_id: + default: "def" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "var.cluster", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "var.cluster_id") + assert.Contains(t, labels, "var.cluster_name") + assert.NotContains(t, labels, "var.warehouse_id") +} + +func TestCompleteInterpolationVarWithoutTrailingDot(t *testing.T) { + yaml := ` +variables: + cluster_id: + default: "abc" + cluster_name: + default: "my-cluster" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // Typing "${var" (no trailing dot) should produce "var.cluster_id", not ".cluster_id". + items := lsp.CompleteInterpolation(v, "var", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "var.cluster_id") + assert.Contains(t, labels, "var.cluster_name") + assert.NotContains(t, labels, ".cluster_id") + assert.NotContains(t, labels, "variables") +} + +func TestCompleteInterpolationResourceJobs(t *testing.T) { + yaml := ` +resources: + jobs: + etl_job: + name: "ETL" + report_job: + name: "Report" + pipelines: + dlt: + name: "DLT" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "resources.jobs.", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "resources.jobs.etl_job") + assert.Contains(t, labels, "resources.jobs.report_job") + assert.Len(t, items, 2) +} + +func TestCompleteInterpolationResourceTypes(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "hello" + pipelines: + my_pipeline: + name: "world" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "resources.", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "resources.jobs") + assert.Contains(t, labels, "resources.pipelines") +} + +func TestCompleteInterpolationNoMatch(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "nonexistent.", nil) + assert.Empty(t, items) +} + +func TestTopLevelCompletionsIncludesVarShorthand(t *testing.T) { + yaml := ` +bundle: + name: test +variables: + foo: + default: bar +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.TopLevelCompletions(v, nil) + labels := extractLabels(items) + assert.Contains(t, labels, "var") + assert.Contains(t, labels, "bundle") + assert.Contains(t, labels, "variables") +} + +func TestTopLevelCompletionsNoVarWithoutVariables(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.TopLevelCompletions(v, nil) + labels := extractLabels(items) + assert.NotContains(t, labels, "var") + assert.Contains(t, labels, "bundle") +} + +func TestCompleteInterpolationSequenceExpandedInline(t *testing.T) { + yaml := ` +resources: + pipelines: + dlt: + name: "DLT" + libraries: + - notebook: + path: ./a.py + - notebook: + path: ./b.py +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // Typing "${resources.pipelines.dlt." should expand libraries inline as [0], [1]. + items := lsp.CompleteInterpolation(v, "resources.pipelines.dlt.", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "resources.pipelines.dlt.name") + assert.Contains(t, labels, "resources.pipelines.dlt.libraries[0]") + assert.Contains(t, labels, "resources.pipelines.dlt.libraries[1]") +} + +func TestCompleteInterpolationSequenceWithPartialIndex(t *testing.T) { + yaml := ` +resources: + pipelines: + dlt: + libraries: + - notebook: + path: ./a.py + - notebook: + path: ./b.py +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // Typing "${resources.pipelines.dlt.libraries[" should still suggest indices. + items := lsp.CompleteInterpolation(v, "resources.pipelines.dlt.libraries[", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "resources.pipelines.dlt.libraries[0]") + assert.Contains(t, labels, "resources.pipelines.dlt.libraries[1]") + assert.Len(t, items, 2) +} + +func TestCompleteInterpolationAfterIndex(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + tasks: + - task_key: ingest + notebook_task: + notebook_path: ./a.py +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // Typing "${resources.jobs.my_job.tasks[0]." should show keys of the first task. + items := lsp.CompleteInterpolation(v, "resources.jobs.my_job.tasks[0].", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "resources.jobs.my_job.tasks[0].task_key") + assert.Contains(t, labels, "resources.jobs.my_job.tasks[0].notebook_task") +} + +func TestCompleteInterpolationComputedBundleKeys(t *testing.T) { + yaml := ` +bundle: + name: test + git: + origin_url: "https://example.com" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "bundle.", nil) + labels := extractLabels(items) + + // Tree-based keys. + assert.Contains(t, labels, "bundle.name") + assert.Contains(t, labels, "bundle.git") + + // Computed keys. + assert.Contains(t, labels, "bundle.target") + assert.Contains(t, labels, "bundle.environment") + + // bundle.git.commit should not appear at this depth; "bundle.git" is the intermediate. + assert.NotContains(t, labels, "bundle.git.commit") + + // Verify computed items have the right detail. + for _, item := range items { + if item.Label == "bundle.target" { + assert.Equal(t, "computed", item.Detail) + assert.Equal(t, 6, item.Kind) // completionKindVariable + } + } +} + +func TestCompleteInterpolationComputedGitSubkeys(t *testing.T) { + yaml := ` +bundle: + name: test + git: + origin_url: "https://example.com" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "bundle.git.", nil) + labels := extractLabels(items) + + // Tree-based key. + assert.Contains(t, labels, "bundle.git.origin_url") + + // Computed keys at this depth. + assert.Contains(t, labels, "bundle.git.commit") + assert.Contains(t, labels, "bundle.git.actual_branch") + assert.Contains(t, labels, "bundle.git.bundle_root_path") +} + +func TestCompleteInterpolationComputedWorkspaceCurrentUser(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // workspace doesn't exist in the tree, but computed keys should still appear. + items := lsp.CompleteInterpolation(v, "workspace.current_user.", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "workspace.current_user.short_name") + assert.Contains(t, labels, "workspace.current_user.user_name") +} + +func TestCompleteInterpolationComputedFilterByPrefix(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "bundle.t", nil) + labels := extractLabels(items) + + // "bundle.target" starts with "bundle.t". + assert.Contains(t, labels, "bundle.target") + + // "bundle.environment" and "bundle.git.*" do not start with "bundle.t". + assert.NotContains(t, labels, "bundle.environment") + assert.NotContains(t, labels, "bundle.git.commit") + assert.NotContains(t, labels, "bundle.git") +} + +func TestCompleteInterpolationComputedTopLevel(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // At top level, "workspace" should appear as a computed intermediate. + items := lsp.CompleteInterpolation(v, "", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "bundle") + assert.Contains(t, labels, "workspace") +} + +func TestCompleteInterpolationComputedNoDuplicates(t *testing.T) { + yaml := ` +bundle: + name: test + git: + origin_url: "https://example.com" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + items := lsp.CompleteInterpolation(v, "bundle.", nil) + // "bundle.git" should appear only once even though it exists in the tree + // and would also be generated as a computed intermediate. + count := 0 + for _, item := range items { + if item.Label == "bundle.git" { + count++ + } + } + assert.Equal(t, 1, count) +} + +func TestFindCompletionContextInsideClosedInterpolation(t *testing.T) { + // 0123456789012345 + lines := []string{`${bundle.target}`} + // Cursor is at position 15, right before the closing "}". + ctx, ok := lsp.FindCompletionContext(lines, lsp.Position{Line: 0, Character: 15}) + require.True(t, ok) + assert.Equal(t, 0, ctx.Start) + assert.Equal(t, "bundle.target", ctx.PartialPath) +} + +func TestCompleteInterpolationExactComputedKey(t *testing.T) { + yaml := ` +bundle: + name: test +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + // When the user has typed "bundle.target" exactly, the completion should + // still return "bundle.target" as a match (exact prefix match). + items := lsp.CompleteInterpolation(v, "bundle.target", nil) + labels := extractLabels(items) + assert.Contains(t, labels, "bundle.target") +} + +func extractLabels(items []lsp.CompletionItem) []string { + labels := make([]string, len(items)) + for i, item := range items { + labels[i] = item.Label + } + return labels +} diff --git a/bundle/lsp/definition.go b/bundle/lsp/definition.go new file mode 100644 index 0000000000..8f1806f771 --- /dev/null +++ b/bundle/lsp/definition.go @@ -0,0 +1,130 @@ +package lsp + +import ( + "fmt" + "regexp" + "strings" + + "github.com/databricks/cli/libs/dyn" +) + +// InterpolationRe matches ${...} interpolation expressions in strings. +// Copied from libs/dyn/dynvar/ref.go to avoid coupling LSP to dynvar internals. +var InterpolationRe = regexp.MustCompile( + fmt.Sprintf(`\$\{(%s(\.%s(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`, + `[a-zA-Z]+([-_]*[a-zA-Z0-9]+)*`, + `[a-zA-Z]+([-_]*[a-zA-Z0-9]+)*`, + ), +) + +// InterpolationRef represents a ${...} reference found at a cursor position. +type InterpolationRef struct { + Path string // e.g., "resources.jobs.my_job.name" + Range Range // range of the full "${...}" token +} + +// FindInterpolationAtPosition finds the ${...} expression the cursor is inside. +func FindInterpolationAtPosition(lines []string, pos Position) (InterpolationRef, bool) { + if pos.Line < 0 || pos.Line >= len(lines) { + return InterpolationRef{}, false + } + + line := lines[pos.Line] + matches := InterpolationRe.FindAllStringSubmatchIndex(line, -1) + for _, m := range matches { + // m[0]:m[1] is the full match "${...}" + // m[2]:m[3] is the first capture group (the path inside ${}) + start := m[0] + end := m[1] + if pos.Character >= start && pos.Character < end { + path := line[m[2]:m[3]] + return InterpolationRef{ + Path: path, + Range: Range{ + Start: Position{Line: pos.Line, Character: start}, + End: Position{Line: pos.Line, Character: end}, + }, + }, true + } + } + return InterpolationRef{}, false +} + +// ResolveDefinition resolves a path string against the merged tree and returns its source location. +func ResolveDefinition(tree dyn.Value, pathStr string) (dyn.Location, bool) { + if !tree.IsValid() { + return dyn.Location{}, false + } + + // Handle var.X shorthand: rewrite to variables.X. + if strings.HasPrefix(pathStr, "var.") { + pathStr = "variables." + strings.TrimPrefix(pathStr, "var.") + } + + p, err := dyn.NewPathFromString(pathStr) + if err != nil { + return dyn.Location{}, false + } + + v, err := dyn.GetByPath(tree, p) + if err != nil { + return dyn.Location{}, false + } + + loc := v.Location() + if loc.File == "" { + return dyn.Location{}, false + } + return loc, true +} + +// InterpolationReference records a ${...} reference found in the merged tree. +type InterpolationReference struct { + Path string // dyn path where the reference was found + Location dyn.Location // source location of the string containing the reference + RefStr string // the full "${...}" expression +} + +// FindInterpolationReferences walks the merged tree to find all ${...} string values +// whose reference path starts with the given resource path prefix. +func FindInterpolationReferences(tree dyn.Value, resourcePath string) []InterpolationReference { + if !tree.IsValid() { + return nil + } + + var refs []InterpolationReference + dyn.Walk(tree, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { //nolint:errcheck + s, ok := v.AsString() + if !ok { + return v, nil + } + + matches := InterpolationRe.FindAllStringSubmatch(s, -1) + for _, m := range matches { + refPath := m[1] + if refPath == resourcePath || strings.HasPrefix(refPath, resourcePath+".") { + refs = append(refs, InterpolationReference{ + Path: p.String(), + Location: v.Location(), + RefStr: m[0], + }) + } + } + return v, nil + }) + + return refs +} + +// DynLocationToLSPLocation converts a 1-based dyn.Location to a 0-based LSPLocation. +func DynLocationToLSPLocation(loc dyn.Location) LSPLocation { + line := max(loc.Line-1, 0) + col := max(loc.Column-1, 0) + return LSPLocation{ + URI: PathToURI(loc.File), + Range: Range{ + Start: Position{Line: line, Character: col}, + End: Position{Line: line, Character: col}, + }, + } +} diff --git a/bundle/lsp/definition_test.go b/bundle/lsp/definition_test.go new file mode 100644 index 0000000000..c55662b791 --- /dev/null +++ b/bundle/lsp/definition_test.go @@ -0,0 +1,142 @@ +package lsp_test + +import ( + "strings" + "testing" + + "github.com/databricks/cli/bundle/lsp" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFindInterpolationAtPositionBasic(t *testing.T) { + lines := []string{` name: "${resources.jobs.my_job.name}"`} + // Cursor inside the interpolation. + ref, ok := lsp.FindInterpolationAtPosition(lines, lsp.Position{Line: 0, Character: 15}) + require.True(t, ok) + assert.Equal(t, "resources.jobs.my_job.name", ref.Path) +} + +func TestFindInterpolationAtPositionMultiple(t *testing.T) { + lines := []string{`value: "${a.b} and ${c.d}"`} + // Cursor on the second interpolation. + ref, ok := lsp.FindInterpolationAtPosition(lines, lsp.Position{Line: 0, Character: 21}) + require.True(t, ok) + assert.Equal(t, "c.d", ref.Path) +} + +func TestFindInterpolationAtPositionOutside(t *testing.T) { + lines := []string{`value: "${a.b} plain text ${c.d}"`} + // Cursor on "plain text" between the two interpolations. + _, ok := lsp.FindInterpolationAtPosition(lines, lsp.Position{Line: 0, Character: 16}) + assert.False(t, ok) +} + +func TestFindInterpolationAtPositionAtDollar(t *testing.T) { + lines := []string{`name: "${var.foo}"`} + // Cursor on the "$" character. + idx := strings.Index(lines[0], "$") + ref, ok := lsp.FindInterpolationAtPosition(lines, lsp.Position{Line: 0, Character: idx}) + require.True(t, ok) + assert.Equal(t, "var.foo", ref.Path) +} + +func TestFindInterpolationAtPositionNone(t *testing.T) { + lines := []string{`name: "plain string"`} + _, ok := lsp.FindInterpolationAtPosition(lines, lsp.Position{Line: 0, Character: 10}) + assert.False(t, ok) +} + +func TestResolveDefinition(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + loc, ok := lsp.ResolveDefinition(tree, "resources.jobs.my_job") + require.True(t, ok) + assert.Equal(t, "test.yml", loc.File) + assert.Positive(t, loc.Line) +} + +func TestResolveDefinitionVarShorthand(t *testing.T) { + yaml := ` +variables: + foo: + default: "bar" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + loc, ok := lsp.ResolveDefinition(tree, "var.foo") + require.True(t, ok) + assert.Equal(t, "test.yml", loc.File) +} + +func TestResolveDefinitionInvalid(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + _, ok := lsp.ResolveDefinition(tree, "resources.jobs.nonexistent") + assert.False(t, ok) +} + +func TestFindInterpolationReferences(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" + pipelines: + my_pipeline: + name: "${resources.jobs.my_job.name}" + settings: + target: "${resources.jobs.my_job.id}" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + refs := lsp.FindInterpolationReferences(tree, "resources.jobs.my_job") + require.Len(t, refs, 2) + assert.Contains(t, refs[0].RefStr, "resources.jobs.my_job") + assert.Contains(t, refs[1].RefStr, "resources.jobs.my_job") +} + +func TestFindInterpolationReferencesNoMatch(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "${var.name}" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + refs := lsp.FindInterpolationReferences(tree, "resources.jobs.my_job") + assert.Empty(t, refs) +} + +func TestDynLocationToLSPLocation(t *testing.T) { + loc := dyn.Location{ + File: "/path/to/file.yml", + Line: 5, + Column: 10, + } + + lspLoc := lsp.DynLocationToLSPLocation(loc) + assert.Equal(t, "file:///path/to/file.yml", lspLoc.URI) + assert.Equal(t, 4, lspLoc.Range.Start.Line) + assert.Equal(t, 9, lspLoc.Range.Start.Character) +} diff --git a/bundle/lsp/diagnostics.go b/bundle/lsp/diagnostics.go new file mode 100644 index 0000000000..169dd890df --- /dev/null +++ b/bundle/lsp/diagnostics.go @@ -0,0 +1,80 @@ +package lsp + +import ( + "fmt" + "strings" + + "github.com/databricks/cli/libs/dyn" +) + +const diagnosticSource = "databricks-bundle-lsp" + +// computedPrefixes are path prefixes that are populated at deploy time and +// should not be flagged as unresolved references. Prefixes ending in "." +// match any sub-path; others require an exact match. +var computedPrefixes = []string{ + "bundle.target", + "bundle.environment", + "bundle.git.", + "workspace.current_user.", + "workspace.root_path", + "workspace.file_path", + "workspace.resource_path", + "workspace.artifact_path", + "workspace.state_path", +} + +// DiagnoseInterpolations checks all ${...} interpolation references in the document +// and returns diagnostics for references that cannot be resolved in the merged tree. +func DiagnoseInterpolations(lines []string, tree dyn.Value) []Diagnostic { + var diags []Diagnostic + for lineIdx, line := range lines { + matches := InterpolationRe.FindAllStringSubmatchIndex(line, -1) + for _, m := range matches { + // m[0]:m[1] is the full "${...}" match. + // m[2]:m[3] is the captured path inside ${}. + path := line[m[2]:m[3]] + + if isComputedPath(path) { + continue + } + + _, found := ResolveDefinition(tree, path) + if found { + continue + } + + diags = append(diags, Diagnostic{ + Range: Range{ + Start: Position{Line: lineIdx, Character: m[0]}, + End: Position{Line: lineIdx, Character: m[1]}, + }, + Severity: DiagnosticSeverityWarning, + Source: diagnosticSource, + Message: fmt.Sprintf("Cannot resolve reference %q", path), + }) + } + } + return diags +} + +// isComputedPath returns true if the path is known to be populated at deploy +// time and won't appear in the static merged tree. +func isComputedPath(path string) bool { + // var.* references are rewritten to variables.* by ResolveDefinition, + // so we only need to handle the other computed prefixes here. + for _, prefix := range computedPrefixes { + if strings.HasSuffix(prefix, ".") { + // Dot-terminated: match any sub-path. + if strings.HasPrefix(path, prefix) { + return true + } + } else { + // Exact match only. + if path == prefix { + return true + } + } + } + return false +} diff --git a/bundle/lsp/diagnostics_test.go b/bundle/lsp/diagnostics_test.go new file mode 100644 index 0000000000..aa114ffd2e --- /dev/null +++ b/bundle/lsp/diagnostics_test.go @@ -0,0 +1,159 @@ +package lsp_test + +import ( + "strings" + "testing" + + "github.com/databricks/cli/bundle/lsp" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDiagnoseInterpolationsValidReferences(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" +variables: + env: + default: "dev" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{ + `name: "${resources.jobs.my_job.name}"`, + `env: "${var.env}"`, + } + + diags := lsp.DiagnoseInterpolations(lines, tree) + assert.Empty(t, diags) +} + +func TestDiagnoseInterpolationsUnresolvableVar(t *testing.T) { + yaml := ` +variables: + env: + default: "dev" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{`name: "${var.nonexistent}"`} + + diags := lsp.DiagnoseInterpolations(lines, tree) + require.Len(t, diags, 1) + assert.Equal(t, lsp.DiagnosticSeverityWarning, diags[0].Severity) + assert.Contains(t, diags[0].Message, "var.nonexistent") + assert.Equal(t, 0, diags[0].Range.Start.Line) +} + +func TestDiagnoseInterpolationsComputedKeysSkipped(t *testing.T) { + tree := dyn.NewValue(map[string]dyn.Value{}, []dyn.Location{}) + + lines := []string{ + `target: "${bundle.target}"`, + `env: "${bundle.environment}"`, + `user: "${workspace.current_user.short_name}"`, + `name: "${workspace.current_user.user_name}"`, + `commit: "${bundle.git.commit}"`, + `branch: "${bundle.git.actual_branch}"`, + } + + diags := lsp.DiagnoseInterpolations(lines, tree) + assert.Empty(t, diags) +} + +func TestDiagnoseInterpolationsMissingResource(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{`ref: "${resources.jobs.missing}"`} + + diags := lsp.DiagnoseInterpolations(lines, tree) + require.Len(t, diags, 1) + assert.Equal(t, lsp.DiagnosticSeverityWarning, diags[0].Severity) + assert.Contains(t, diags[0].Message, "resources.jobs.missing") +} + +func TestDiagnoseInterpolationsMultipleOnSameLine(t *testing.T) { + yaml := ` +resources: + jobs: + my_job: + name: "ETL" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{`value: "${resources.jobs.my_job.name} ${resources.jobs.bad}"`} + + diags := lsp.DiagnoseInterpolations(lines, tree) + require.Len(t, diags, 1) + assert.Contains(t, diags[0].Message, "resources.jobs.bad") +} + +func TestDiagnoseInterpolationsEmptyTree(t *testing.T) { + lines := []string{`name: "${var.something}"`} + + diags := lsp.DiagnoseInterpolations(lines, dyn.InvalidValue) + require.Len(t, diags, 1) + assert.Contains(t, diags[0].Message, "var.something") +} + +func TestDiagnoseInterpolationsDiagnosticRange(t *testing.T) { + yaml := ` +variables: + env: + default: "dev" +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{` name: "${var.missing_var}"`} + + diags := lsp.DiagnoseInterpolations(lines, tree) + require.Len(t, diags, 1) + + // The range should cover the "${var.missing_var}" text. + start := strings.Index(lines[0], "${") + end := strings.Index(lines[0], "}") + 1 + assert.Equal(t, start, diags[0].Range.Start.Character) + assert.Equal(t, end, diags[0].Range.End.Character) + assert.Equal(t, "databricks-bundle-lsp", diags[0].Source) +} + +func TestDiagnoseInterpolationsTypoWithIndex(t *testing.T) { + yaml := ` +resources: + pipelines: + dlt_pipeline: + libraries: + - notebook: + path: ./a.py +` + tree, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + lines := []string{`ref: "${resources.pipelines.dlt_pipeline.librarids[0]}"`} + diags := lsp.DiagnoseInterpolations(lines, tree) + require.Len(t, diags, 1) + assert.Contains(t, diags[0].Message, "librarids") +} + +func TestDiagnoseInterpolationsNoInterpolations(t *testing.T) { + tree := dyn.NewValue(map[string]dyn.Value{}, []dyn.Location{}) + lines := []string{`name: "plain text"`, `value: 42`} + + diags := lsp.DiagnoseInterpolations(lines, tree) + assert.Empty(t, diags) +} diff --git a/bundle/lsp/documents.go b/bundle/lsp/documents.go new file mode 100644 index 0000000000..ab49dfc369 --- /dev/null +++ b/bundle/lsp/documents.go @@ -0,0 +1,116 @@ +package lsp + +import ( + "net/url" + "path/filepath" + "runtime" + "strings" + "sync" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" +) + +// Document tracks the state of an open text document. +type Document struct { + URI string + Version int + Content string + Lines []string // split by newline for position lookup + Value dyn.Value // parsed YAML (may be invalid) +} + +// DocumentStore manages open text documents. +type DocumentStore struct { + mu sync.RWMutex + docs map[string]*Document +} + +// NewDocumentStore creates an empty document store. +func NewDocumentStore() *DocumentStore { + return &DocumentStore{docs: make(map[string]*Document)} +} + +// Open registers a newly opened document. +func (s *DocumentStore) Open(uri string, version int, content string) { + doc := &Document{ + URI: uri, + Version: version, + Content: content, + Lines: strings.Split(content, "\n"), + } + doc.parse() + s.mu.Lock() + s.docs[uri] = doc + s.mu.Unlock() +} + +// Change updates the content of an already-open document. +func (s *DocumentStore) Change(uri string, version int, content string) { + s.mu.Lock() + doc, ok := s.docs[uri] + if ok { + doc.Version = version + doc.Content = content + doc.Lines = strings.Split(content, "\n") + doc.parse() + } + s.mu.Unlock() +} + +// Close removes a document from the store. +func (s *DocumentStore) Close(uri string) { + s.mu.Lock() + delete(s.docs, uri) + s.mu.Unlock() +} + +// Get returns the document for the given URI, or nil if not found. +func (s *DocumentStore) Get(uri string) *Document { + s.mu.RLock() + defer s.mu.RUnlock() + return s.docs[uri] +} + +// AllURIs returns the URIs of all open documents. +func (s *DocumentStore) AllURIs() []string { + s.mu.RLock() + defer s.mu.RUnlock() + uris := make([]string, 0, len(s.docs)) + for uri := range s.docs { + uris = append(uris, uri) + } + return uris +} + +func (doc *Document) parse() { + path := URIToPath(doc.URI) + v, err := yamlloader.LoadYAML(path, strings.NewReader(doc.Content)) + if err != nil { + doc.Value = dyn.InvalidValue + return + } + doc.Value = v +} + +// URIToPath converts a file:// URI to a filesystem path. +func URIToPath(uri string) string { + u, err := url.Parse(uri) + if err != nil { + return uri + } + p := u.Path + // On Windows, file URIs look like file:///C:/path + if runtime.GOOS == "windows" && len(p) > 0 && p[0] == '/' { + p = p[1:] + } + return p +} + +// PathToURI converts a filesystem path to a file:// URI. +func PathToURI(path string) string { + if runtime.GOOS == "windows" { + path = "/" + filepath.ToSlash(path) + } + return "file://" + path +} diff --git a/bundle/lsp/protocol.go b/bundle/lsp/protocol.go new file mode 100644 index 0000000000..848f89ff6c --- /dev/null +++ b/bundle/lsp/protocol.go @@ -0,0 +1,214 @@ +package lsp + +// InitializeParams holds the parameters sent by the client in the initialize request. +type InitializeParams struct { + ProcessID int `json:"processId"` + RootURI string `json:"rootUri"` + RootPath string `json:"rootPath"` +} + +// InitializeResult holds the response to the initialize request. +type InitializeResult struct { + Capabilities ServerCapabilities `json:"capabilities"` +} + +// ServerCapabilities describes the capabilities the server supports. +type ServerCapabilities struct { + TextDocumentSync *TextDocumentSyncOptions `json:"textDocumentSync,omitempty"` + HoverProvider bool `json:"hoverProvider,omitempty"` + DocumentLinkProvider *DocumentLinkOptions `json:"documentLinkProvider,omitempty"` + DefinitionProvider bool `json:"definitionProvider,omitempty"` + CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"` +} + +// CompletionOptions describes options for the completion provider. +type CompletionOptions struct { + TriggerCharacters []string `json:"triggerCharacters,omitempty"` +} + +// CompletionParams holds the parameters for textDocument/completion. +type CompletionParams struct { + TextDocument TextDocumentIdentifier `json:"textDocument"` + Position Position `json:"position"` +} + +// CompletionItem represents a single completion suggestion. +type CompletionItem struct { + Label string `json:"label"` + Kind int `json:"kind,omitempty"` + Detail string `json:"detail,omitempty"` + Documentation string `json:"documentation,omitempty"` + FilterText string `json:"filterText,omitempty"` + InsertText string `json:"insertText,omitempty"` + TextEdit *TextEdit `json:"textEdit,omitempty"` +} + +// TextEdit represents a text edit to be applied on completion. +type TextEdit struct { + Range Range `json:"range"` + NewText string `json:"newText"` +} + +// CompletionList represents a collection of completion items. +type CompletionList struct { + IsIncomplete bool `json:"isIncomplete"` + Items []CompletionItem `json:"items"` +} + +// DefinitionParams holds the parameters for textDocument/definition. +type DefinitionParams struct { + TextDocument TextDocumentIdentifier `json:"textDocument"` + Position Position `json:"position"` +} + +// LSPLocation represents a location in a document (used for definition results). +type LSPLocation struct { + URI string `json:"uri"` + Range Range `json:"range"` +} + +// TextDocumentSyncOptions describes how text document syncing works. +type TextDocumentSyncOptions struct { + OpenClose bool `json:"openClose"` + Change int `json:"change"` // 1 = Full, 2 = Incremental +} + +// DocumentLinkOptions describes options for the document link provider. +type DocumentLinkOptions struct { + ResolveProvider bool `json:"resolveProvider"` +} + +// TextDocumentIdentifier identifies a text document by its URI. +type TextDocumentIdentifier struct { + URI string `json:"uri"` +} + +// TextDocumentItem represents an open text document. +type TextDocumentItem struct { + URI string `json:"uri"` + LanguageID string `json:"languageId"` + Version int `json:"version"` + Text string `json:"text"` +} + +// VersionedTextDocumentIdentifier identifies a specific version of a text document. +type VersionedTextDocumentIdentifier struct { + URI string `json:"uri"` + Version int `json:"version"` +} + +// TextDocumentContentChangeEvent describes a change to a text document. +type TextDocumentContentChangeEvent struct { + Text string `json:"text"` +} + +// DidOpenTextDocumentParams holds the parameters for textDocument/didOpen. +type DidOpenTextDocumentParams struct { + TextDocument TextDocumentItem `json:"textDocument"` +} + +// DidChangeTextDocumentParams holds the parameters for textDocument/didChange. +type DidChangeTextDocumentParams struct { + TextDocument VersionedTextDocumentIdentifier `json:"textDocument"` + ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"` +} + +// DidCloseTextDocumentParams holds the parameters for textDocument/didClose. +type DidCloseTextDocumentParams struct { + TextDocument TextDocumentIdentifier `json:"textDocument"` +} + +// Position represents a zero-based line and character offset. +type Position struct { + Line int `json:"line"` + Character int `json:"character"` +} + +// Range represents a span of text in a document. +type Range struct { + Start Position `json:"start"` + End Position `json:"end"` +} + +// DocumentLinkParams holds the parameters for textDocument/documentLink. +type DocumentLinkParams struct { + TextDocument TextDocumentIdentifier `json:"textDocument"` +} + +// DocumentLink represents a clickable link in a document. +type DocumentLink struct { + Range Range `json:"range"` + Target string `json:"target"` + Tooltip string `json:"tooltip,omitempty"` +} + +// HoverParams holds the parameters for textDocument/hover. +type HoverParams struct { + TextDocument TextDocumentIdentifier `json:"textDocument"` + Position Position `json:"position"` +} + +// Hover represents the result of a hover request. +type Hover struct { + Contents MarkupContent `json:"contents"` + Range *Range `json:"range,omitempty"` +} + +// MarkupContent represents marked-up text for display. +type MarkupContent struct { + Kind string `json:"kind"` // "plaintext" or "markdown" + Value string `json:"value"` +} + +// RegistrationParams holds registrations for client/registerCapability. +type RegistrationParams struct { + Registrations []Registration `json:"registrations"` +} + +// Registration describes a capability registration. +type Registration struct { + ID string `json:"id"` + Method string `json:"method"` + RegisterOptions any `json:"registerOptions,omitempty"` +} + +// DidChangeWatchedFilesRegistrationOptions describes options for file watchers. +type DidChangeWatchedFilesRegistrationOptions struct { + Watchers []FileSystemWatcher `json:"watchers"` +} + +// FileSystemWatcher describes a single file system watcher. +type FileSystemWatcher struct { + GlobPattern string `json:"globPattern"` + Kind int `json:"kind,omitempty"` // 1=Create, 2=Change, 4=Delete; 7=all +} + +// DidChangeWatchedFilesParams holds the notification parameters. +type DidChangeWatchedFilesParams struct { + Changes []FileEvent `json:"changes"` +} + +// FileEvent describes a file change event. +type FileEvent struct { + URI string `json:"uri"` + Type int `json:"type"` // 1=Created, 2=Changed, 3=Deleted +} + +// Diagnostic severity constants. +const ( + DiagnosticSeverityWarning = 2 +) + +// Diagnostic represents a diagnostic (error, warning, etc.) in a document. +type Diagnostic struct { + Range Range `json:"range"` + Severity int `json:"severity"` + Source string `json:"source,omitempty"` + Message string `json:"message"` +} + +// PublishDiagnosticsParams holds the parameters for textDocument/publishDiagnostics. +type PublishDiagnosticsParams struct { + URI string `json:"uri"` + Diagnostics []Diagnostic `json:"diagnostics"` +} diff --git a/bundle/lsp/resource_index.go b/bundle/lsp/resource_index.go new file mode 100644 index 0000000000..0fd703f108 --- /dev/null +++ b/bundle/lsp/resource_index.go @@ -0,0 +1,68 @@ +package lsp + +import ( + "fmt" + + "github.com/databricks/cli/libs/dyn" +) + +// ResourceEntry represents a resource definition found in YAML. +type ResourceEntry struct { + Type string // e.g., "jobs", "pipelines" + Key string // e.g., "my_etl_job" + KeyRange Range // position of the key in the YAML file + Path string // e.g., "resources.jobs.my_etl_job" +} + +// IndexResources walks a parsed YAML dyn.Value and finds all resource definitions +// under "resources..". +func IndexResources(doc *Document) []ResourceEntry { + if !doc.Value.IsValid() { + return nil + } + + resources := doc.Value.Get("resources") + if resources.Kind() != dyn.KindMap { + return nil + } + + m, ok := resources.AsMap() + if !ok { + return nil + } + + var entries []ResourceEntry + for _, typePair := range m.Pairs() { + resourceType := typePair.Key.MustString() + typeVal := typePair.Value + if typeVal.Kind() != dyn.KindMap { + continue + } + + typeMap, ok := typeVal.AsMap() + if !ok { + continue + } + + for _, resPair := range typeMap.Pairs() { + key := resPair.Key.MustString() + keyLoc := resPair.Key.Location() + + // dyn.Location uses 1-based line/column; LSP uses 0-based. + startLine := max(keyLoc.Line-1, 0) + startChar := max(keyLoc.Column-1, 0) + + entries = append(entries, ResourceEntry{ + Type: resourceType, + Key: key, + KeyRange: Range{ + Start: Position{Line: startLine, Character: startChar}, + End: Position{Line: startLine, Character: startChar + len(key)}, + }, + Path: fmt.Sprintf("resources.%s.%s", resourceType, key), + }) + } + } + + return entries +} diff --git a/bundle/lsp/server.go b/bundle/lsp/server.go new file mode 100644 index 0000000000..0aabbe5f3d --- /dev/null +++ b/bundle/lsp/server.go @@ -0,0 +1,553 @@ +package lsp + +import ( + "context" + "fmt" + "os" + "path/filepath" + "sort" + "strings" + "sync" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/channel" + "github.com/creachadair/jrpc2/handler" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/merge" + "github.com/databricks/cli/libs/dyn/yamlloader" +) + +// TargetState holds deployment state for a single target. +type TargetState struct { + Host string + ResourceState map[string]ResourceInfo +} + +// Server is the DABs LSP server. +type Server struct { + mu sync.RWMutex + documents *DocumentStore + bundleRoot string + target string + workspaceHost string + resourceState map[string]ResourceInfo + mergedTree dyn.Value + allTargetState map[string]TargetState + jrpcServer *jrpc2.Server +} + +// NewServer creates a new LSP server. +func NewServer() *Server { + return &Server{ + documents: NewDocumentStore(), + resourceState: make(map[string]ResourceInfo), + allTargetState: make(map[string]TargetState), + } +} + +// Run starts the LSP server on stdin/stdout. +func (s *Server) Run(ctx context.Context) error { + mux := handler.Map{ + "initialize": handler.New(s.handleInitialize), + "initialized": handler.New(s.handleInitialized), + "shutdown": handler.New(s.handleShutdown), + "exit": handler.New(s.handleExit), + "textDocument/didOpen": handler.New(s.handleTextDocumentDidOpen), + "textDocument/didChange": handler.New(s.handleTextDocumentDidChange), + "textDocument/didClose": handler.New(s.handleTextDocumentDidClose), + "textDocument/documentLink": handler.New(s.handleDocumentLink), + "textDocument/hover": handler.New(s.handleHover), + "textDocument/definition": handler.New(s.handleDefinition), + "textDocument/completion": handler.New(s.handleCompletion), + "workspace/didChangeWatchedFiles": handler.New(s.handleDidChangeWatchedFiles), + } + + srv := jrpc2.NewServer(mux, &jrpc2.ServerOptions{ + AllowPush: true, + }) + s.jrpcServer = srv + ch := channel.LSP(os.Stdin, os.Stdout) + srv.Start(ch) + return srv.Wait() +} + +func (s *Server) handleInitialize(_ context.Context, params InitializeParams) (InitializeResult, error) { + if params.RootURI != "" { + s.bundleRoot = URIToPath(params.RootURI) + } else if params.RootPath != "" { + s.bundleRoot = params.RootPath + } + + s.loadBundleInfo() + + return InitializeResult{ + Capabilities: ServerCapabilities{ + TextDocumentSync: &TextDocumentSyncOptions{ + OpenClose: true, + Change: 1, // Full sync + }, + HoverProvider: true, + DocumentLinkProvider: &DocumentLinkOptions{ + ResolveProvider: false, + }, + DefinitionProvider: true, + CompletionProvider: &CompletionOptions{ + TriggerCharacters: []string{".", "{"}, + }, + }, + }, nil +} + +func (s *Server) handleInitialized(ctx context.Context) error { + // Register file watchers for automatic reload. + // Run in a goroutine because Callback blocks waiting for the client response. + go s.registerFileWatchers(ctx) + return nil +} + +func (s *Server) handleShutdown(_ context.Context) error { + return nil +} + +func (s *Server) handleExit(_ context.Context) error { + os.Exit(0) + return nil +} + +// registerFileWatchers asks the client to watch for changes to bundle config and deployment state. +func (s *Server) registerFileWatchers(ctx context.Context) { + if s.jrpcServer == nil { + return + } + + const watchAll = 7 // Create | Change | Delete + params := RegistrationParams{ + Registrations: []Registration{ + { + ID: "bundle-file-watcher", + Method: "workspace/didChangeWatchedFiles", + RegisterOptions: DidChangeWatchedFilesRegistrationOptions{ + Watchers: []FileSystemWatcher{ + {GlobPattern: "**/.databricks/bundle/*/resources.json", Kind: watchAll}, + {GlobPattern: "**/*.yml", Kind: watchAll}, + {GlobPattern: "**/*.yaml", Kind: watchAll}, + }, + }, + }, + }, + } + + // client/registerCapability is a request (expects response from client). + s.jrpcServer.Callback(ctx, "client/registerCapability", params) //nolint:errcheck +} + +// handleDidChangeWatchedFiles reloads bundle info when watched files change. +func (s *Server) handleDidChangeWatchedFiles(ctx context.Context, _ DidChangeWatchedFilesParams) error { + s.loadBundleInfo() + // Re-diagnose all open documents since the merged tree may have changed. + for _, uri := range s.documents.AllURIs() { + s.publishDiagnostics(ctx, uri) + } + return nil +} + +func (s *Server) handleTextDocumentDidOpen(ctx context.Context, params DidOpenTextDocumentParams) error { + s.documents.Open(params.TextDocument.URI, params.TextDocument.Version, params.TextDocument.Text) + if s.isRootConfig(params.TextDocument.URI) { + s.loadBundleInfo() + } + s.publishDiagnostics(ctx, params.TextDocument.URI) + return nil +} + +func (s *Server) handleTextDocumentDidChange(ctx context.Context, params DidChangeTextDocumentParams) error { + if len(params.ContentChanges) > 0 { + s.documents.Change(params.TextDocument.URI, params.TextDocument.Version, params.ContentChanges[len(params.ContentChanges)-1].Text) + } + s.publishDiagnostics(ctx, params.TextDocument.URI) + return nil +} + +func (s *Server) handleTextDocumentDidClose(ctx context.Context, params DidCloseTextDocumentParams) error { + s.documents.Close(params.TextDocument.URI) + // Clear diagnostics for the closed document. + s.notify(ctx, "textDocument/publishDiagnostics", PublishDiagnosticsParams{ + URI: params.TextDocument.URI, + Diagnostics: nil, + }) + return nil +} + +// publishDiagnostics computes diagnostics for a document and sends them to the client. +func (s *Server) publishDiagnostics(ctx context.Context, uri string) { + doc := s.documents.Get(uri) + if doc == nil { + return + } + + s.mu.RLock() + diags := DiagnoseInterpolations(doc.Lines, s.mergedTree) + s.mu.RUnlock() + + s.notify(ctx, "textDocument/publishDiagnostics", PublishDiagnosticsParams{ + URI: uri, + Diagnostics: diags, + }) +} + +// notify sends a notification to the client (no response expected). +func (s *Server) notify(ctx context.Context, method string, params any) { + if s.jrpcServer == nil { + return + } + s.jrpcServer.Notify(ctx, method, params) //nolint:errcheck +} + +func (s *Server) handleDocumentLink(_ context.Context, params DocumentLinkParams) ([]DocumentLink, error) { + doc := s.documents.Get(params.TextDocument.URI) + if doc == nil { + return nil, nil + } + + s.mu.RLock() + defer s.mu.RUnlock() + + entries := IndexResources(doc) + var links []DocumentLink + for _, entry := range entries { + u := s.resolveResourceURL(entry) + if u == "" { + continue + } + links = append(links, DocumentLink{ + Range: entry.KeyRange, + Target: u, + Tooltip: fmt.Sprintf("Open %s '%s' in Databricks", entry.Type, entry.Key), + }) + } + return links, nil +} + +func (s *Server) handleHover(_ context.Context, params HoverParams) (*Hover, error) { + doc := s.documents.Get(params.TextDocument.URI) + if doc == nil { + return nil, nil + } + + s.mu.RLock() + defer s.mu.RUnlock() + + entries := IndexResources(doc) + for _, entry := range entries { + if PositionInRange(params.Position, entry.KeyRange) { + content := s.buildHoverContent(entry) + return &Hover{ + Contents: MarkupContent{ + Kind: "markdown", + Value: content, + }, + Range: &entry.KeyRange, + }, nil + } + } + + return nil, nil +} + +// findRootConfig returns the path to the root bundle config file, or "" if not found. +func (s *Server) findRootConfig() string { + for _, name := range []string{"databricks.yml", "databricks.yaml", "bundle.yml", "bundle.yaml"} { + p := filepath.Join(s.bundleRoot, name) + if _, err := os.Stat(p); err == nil { + return p + } + } + return "" +} + +// loadBundleInfo reads bundle config and deployment state. +func (s *Server) loadBundleInfo() { + if s.bundleRoot == "" { + return + } + + configPath := s.findRootConfig() + if configPath == "" { + return + } + + data, err := os.ReadFile(configPath) + if err != nil { + return + } + + v, err := yamlloader.LoadYAML(configPath, strings.NewReader(string(data))) + if err != nil { + return + } + + s.mu.Lock() + defer s.mu.Unlock() + + s.workspaceHost = LoadWorkspaceHost(v) + + target := s.target + if target == "" { + target = LoadTarget(v) + s.target = target + } + + s.resourceState = LoadResourceState(s.bundleRoot, target) + populateResourceURLs(s.workspaceHost, s.resourceState) + + s.loadMergedTree(configPath, v) + s.loadAllTargetState(v) +} + +// populateResourceURLs fills in missing URLs for resources that have IDs. +func populateResourceURLs(host string, state map[string]ResourceInfo) { + if host == "" { + return + } + for key, info := range state { + if info.URL == "" && info.ID != "" { + parts := strings.SplitN(key, ".", 3) + if len(parts) == 3 { + info.URL = BuildResourceURL(host, parts[1], info.ID) + state[key] = info + } + } + } +} + +// loadMergedTree builds a merged dyn.Value from the root config and all included files. +func (s *Server) loadMergedTree(configPath string, rootValue dyn.Value) { + s.mergedTree = rootValue + + // Extract include patterns. + includes := rootValue.Get("include") + if includes.Kind() != dyn.KindSequence { + return + } + seq, ok := includes.AsSequence() + if !ok { + return + } + + // Collect and expand glob patterns. + seen := map[string]bool{configPath: true} + var paths []string + for _, item := range seq { + pattern, ok := item.AsString() + if !ok { + continue + } + matches, err := filepath.Glob(filepath.Join(s.bundleRoot, pattern)) + if err != nil { + continue + } + for _, m := range matches { + if !seen[m] { + seen[m] = true + paths = append(paths, m) + } + } + } + sort.Strings(paths) + + // Parse and merge each included file. + merged := rootValue + for _, p := range paths { + data, err := os.ReadFile(p) + if err != nil { + continue + } + v, err := yamlloader.LoadYAML(p, strings.NewReader(string(data))) + if err != nil { + continue + } + merged, _ = merge.Merge(merged, v) + } + s.mergedTree = merged +} + +const maxTargets = 10 + +// loadAllTargetState loads resource state for all targets (up to maxTargets). +func (s *Server) loadAllTargetState(v dyn.Value) { + s.allTargetState = make(map[string]TargetState) + + targets := LoadAllTargets(v) + if len(targets) > maxTargets { + targets = targets[:maxTargets] + } + + for _, t := range targets { + host := LoadTargetWorkspaceHost(v, t) + rs := LoadResourceState(s.bundleRoot, t) + populateResourceURLs(host, rs) + + s.allTargetState[t] = TargetState{ + Host: host, + ResourceState: rs, + } + } +} + +func (s *Server) resolveResourceURL(entry ResourceEntry) string { + if info, ok := s.resourceState[entry.Path]; ok { + return info.URL + } + return "" +} + +func (s *Server) handleDefinition(_ context.Context, params DefinitionParams) ([]LSPLocation, error) { + doc := s.documents.Get(params.TextDocument.URI) + if doc == nil { + return nil, nil + } + + s.mu.RLock() + defer s.mu.RUnlock() + + // Check if cursor is on a ${...} reference. + ref, ok := FindInterpolationAtPosition(doc.Lines, params.Position) + if ok { + loc, found := ResolveDefinition(s.mergedTree, ref.Path) + if !found { + return nil, nil + } + return []LSPLocation{DynLocationToLSPLocation(loc)}, nil + } + + // Check if cursor is on a resource key. + entries := IndexResources(doc) + for _, entry := range entries { + if PositionInRange(params.Position, entry.KeyRange) { + refs := FindInterpolationReferences(s.mergedTree, entry.Path) + if len(refs) == 0 { + return nil, nil + } + var locs []LSPLocation + for _, r := range refs { + locs = append(locs, DynLocationToLSPLocation(r.Location)) + } + return locs, nil + } + } + + return nil, nil +} + +func (s *Server) handleCompletion(_ context.Context, params CompletionParams) (*CompletionList, error) { + doc := s.documents.Get(params.TextDocument.URI) + if doc == nil { + return nil, nil + } + + cctx, ok := FindCompletionContext(doc.Lines, params.Position) + if !ok { + return nil, nil + } + + s.mu.RLock() + defer s.mu.RUnlock() + + // The edit range covers the text from after "${" to the cursor position. + // When a completion is accepted, this entire range is replaced with the full path. + editRange := &Range{ + Start: Position{Line: params.Position.Line, Character: cctx.Start + 2}, + End: params.Position, + } + + var items []CompletionItem + if cctx.PartialPath == "" { + items = TopLevelCompletions(s.mergedTree, editRange) + } else { + items = CompleteInterpolation(s.mergedTree, cctx.PartialPath, editRange) + } + + return &CompletionList{ + IsIncomplete: false, + Items: items, + }, nil +} + +func (s *Server) buildHoverContent(entry ResourceEntry) string { + var b strings.Builder + fmt.Fprintf(&b, "**%s** `%s`\n\n", entry.Type, entry.Key) + + // Show per-target state if available. + if len(s.allTargetState) > 0 { + hasAnyState := false + for _, ts := range s.allTargetState { + if _, ok := ts.ResourceState[entry.Path]; ok { + hasAnyState = true + break + } + } + + if hasAnyState { + b.WriteString("**Targets:**\n\n") + + // Sort target names for deterministic output. + targets := LoadAllTargets(s.mergedTree) + for _, t := range targets { + ts, ok := s.allTargetState[t] + if !ok { + continue + } + info, ok := ts.ResourceState[entry.Path] + if !ok { + continue + } + if info.URL != "" { + fmt.Fprintf(&b, "- **%s**: [Open in Databricks](%s) (ID: `%s`)\n", t, info.URL, info.ID) + } else if info.ID != "" { + fmt.Fprintf(&b, "- **%s**: ID: `%s`\n", t, info.ID) + } + } + return b.String() + } + } + + // Fall back to default target state. + info, hasState := s.resourceState[entry.Path] + if hasState && info.ID != "" { + fmt.Fprintf(&b, "**ID:** `%s`\n\n", info.ID) + } + if hasState && info.Name != "" { + fmt.Fprintf(&b, "**Name:** %s\n\n", info.Name) + } + if hasState && info.URL != "" { + fmt.Fprintf(&b, "[Open in Databricks](%s)", info.URL) + } else if !hasState || info.ID == "" { + b.WriteString("_Not yet deployed. Run `databricks bundle deploy` to create this resource._") + } + + return b.String() +} + +func (s *Server) isRootConfig(uri string) bool { + base := filepath.Base(URIToPath(uri)) + return base == "databricks.yml" || base == "databricks.yaml" || base == "bundle.yml" || base == "bundle.yaml" +} + +// SetTarget sets the target for the server. +func (s *Server) SetTarget(target string) { + s.target = target +} + +// PositionInRange checks if a position is within a range (inclusive start, exclusive end). +func PositionInRange(pos Position, r Range) bool { + if pos.Line < r.Start.Line || pos.Line > r.End.Line { + return false + } + if pos.Line == r.Start.Line && pos.Character < r.Start.Character { + return false + } + if pos.Line == r.End.Line && pos.Character >= r.End.Character { + return false + } + return true +} diff --git a/bundle/lsp/server_integration_test.go b/bundle/lsp/server_integration_test.go new file mode 100644 index 0000000000..894c4da6b0 --- /dev/null +++ b/bundle/lsp/server_integration_test.go @@ -0,0 +1,659 @@ +package lsp + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/channel" + "github.com/creachadair/jrpc2/handler" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const testBundleYAML = `bundle: + name: test-bundle +workspace: + host: "https://my-workspace.databricks.com" +targets: + dev: + default: true +resources: + jobs: + my_job: + name: "My Job" + pipelines: + my_pipeline: + name: "My Pipeline" +` + +func setupTestBundleDir(t *testing.T) string { + t.Helper() + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAML), 0o644)) + + stateDir := filepath.Join(tmpDir, ".databricks", "bundle", "dev") + require.NoError(t, os.MkdirAll(stateDir, 0o755)) + + stateJSON := `{ + "state_version": 1, + "state": { + "resources.jobs.my_job": {"__id__": "12345"}, + "resources.pipelines.my_pipeline": {"__id__": "abc-def"} + } + }` + require.NoError(t, os.WriteFile(filepath.Join(stateDir, "resources.json"), []byte(stateJSON), 0o644)) + + return tmpDir +} + +func newTestClientServer(t *testing.T, srv *Server) *jrpc2.Client { + t.Helper() + + mux := handler.Map{ + "initialize": handler.New(srv.handleInitialize), + "initialized": handler.New(srv.handleInitialized), + "shutdown": handler.New(srv.handleShutdown), + "textDocument/didOpen": handler.New(srv.handleTextDocumentDidOpen), + "textDocument/didChange": handler.New(srv.handleTextDocumentDidChange), + "textDocument/didClose": handler.New(srv.handleTextDocumentDidClose), + "textDocument/documentLink": handler.New(srv.handleDocumentLink), + "textDocument/hover": handler.New(srv.handleHover), + "textDocument/definition": handler.New(srv.handleDefinition), + "textDocument/completion": handler.New(srv.handleCompletion), + "workspace/didChangeWatchedFiles": handler.New(srv.handleDidChangeWatchedFiles), + } + + clientCh, serverCh := channel.Direct() + + jrpcSrv := jrpc2.NewServer(mux, nil) + jrpcSrv.Start(serverCh) + t.Cleanup(func() { jrpcSrv.Stop() }) + + cli := jrpc2.NewClient(clientCh, nil) + t.Cleanup(func() { cli.Close() }) + + return cli +} + +// initializeClient sends the initialize request and returns the result. +func initializeClient(ctx context.Context, t *testing.T, cli *jrpc2.Client, rootURI string) InitializeResult { + t.Helper() + var result InitializeResult + err := cli.CallResult(ctx, "initialize", InitializeParams{ + ProcessID: 1, + RootURI: rootURI, + }, &result) + require.NoError(t, err) + return result +} + +func TestServerHandleInitialize(t *testing.T) { + tmpDir := setupTestBundleDir(t) + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + result := initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + assert.True(t, result.Capabilities.HoverProvider) + assert.True(t, result.Capabilities.DefinitionProvider) + require.NotNil(t, result.Capabilities.DocumentLinkProvider) + require.NotNil(t, result.Capabilities.TextDocumentSync) + assert.True(t, result.Capabilities.TextDocumentSync.OpenClose) + assert.Equal(t, 1, result.Capabilities.TextDocumentSync.Change) +} + +func TestServerHandleDocumentLink(t *testing.T) { + tmpDir := setupTestBundleDir(t) + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAML, + }, + }) + require.NoError(t, err) + + var links []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &links) + require.NoError(t, err) + require.Len(t, links, 2) + + assert.Contains(t, links[0].Target, "/jobs/12345") + assert.Contains(t, links[0].Tooltip, "my_job") + assert.Contains(t, links[1].Target, "/pipelines/abc-def") + assert.Contains(t, links[1].Tooltip, "my_pipeline") +} + +func TestServerHandleDocumentLinkNoState(t *testing.T) { + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAML), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAML, + }, + }) + require.NoError(t, err) + + var links []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &links) + require.NoError(t, err) + assert.Empty(t, links) +} + +func TestServerHandleHoverOnResource(t *testing.T) { + tmpDir := setupTestBundleDir(t) + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAML, + }, + }) + require.NoError(t, err) + + // Get links to find the position of my_job. + var links []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &links) + require.NoError(t, err) + require.NotEmpty(t, links) + + // Hover at the position of the first link (my_job key). + var hover Hover + err = cli.CallResult(ctx, "textDocument/hover", HoverParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: links[0].Range.Start, + }, &hover) + require.NoError(t, err) + assert.Contains(t, hover.Contents.Value, "12345") + assert.Contains(t, hover.Contents.Value, "Open in Databricks") +} + +func TestServerHandleHoverOffResource(t *testing.T) { + tmpDir := setupTestBundleDir(t) + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAML, + }, + }) + require.NoError(t, err) + + // Hover at line 0, character 0 which is "bundle:" -- not a resource key. + rsp, err := cli.Call(ctx, "textDocument/hover", HoverParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: 0, Character: 0}, + }) + require.NoError(t, err) + + // The handler returns nil for non-resource positions, which is JSON null. + var hover *Hover + err = rsp.UnmarshalResult(&hover) + require.NoError(t, err) + assert.Nil(t, hover) +} + +func TestServerEndToEnd(t *testing.T) { + tmpDir := setupTestBundleDir(t) + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + // 1. Initialize. + result := initializeClient(ctx, t, cli, PathToURI(tmpDir)) + assert.True(t, result.Capabilities.HoverProvider) + + // 2. Initialized notification. + err := cli.Notify(ctx, "initialized", nil) + require.NoError(t, err) + + // 3. Open document. + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err = cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAML, + }, + }) + require.NoError(t, err) + + // 4. Get document links. + var links []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &links) + require.NoError(t, err) + require.Len(t, links, 2) + assert.Contains(t, links[0].Target, "/jobs/12345") + assert.Contains(t, links[1].Target, "/pipelines/abc-def") + + // 5. Hover on resource key. + var hover Hover + err = cli.CallResult(ctx, "textDocument/hover", HoverParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: links[0].Range.Start, + }, &hover) + require.NoError(t, err) + assert.Contains(t, hover.Contents.Value, "12345") + assert.Contains(t, hover.Contents.Value, "Open in Databricks") + + // 6. Change document content (remove pipelines). + updatedYAML := `bundle: + name: test-bundle +workspace: + host: "https://my-workspace.databricks.com" +targets: + dev: + default: true +resources: + jobs: + my_job: + name: "My Job" +` + err = cli.Notify(ctx, "textDocument/didChange", DidChangeTextDocumentParams{ + TextDocument: VersionedTextDocumentIdentifier{ + URI: docURI, + Version: 2, + }, + ContentChanges: []TextDocumentContentChangeEvent{ + {Text: updatedYAML}, + }, + }) + require.NoError(t, err) + + // 7. Document links should now return only one link. + var linksAfterChange []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &linksAfterChange) + require.NoError(t, err) + require.Len(t, linksAfterChange, 1) + assert.Contains(t, linksAfterChange[0].Target, "/jobs/12345") + + // 8. Close document. + err = cli.Notify(ctx, "textDocument/didClose", DidCloseTextDocumentParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }) + require.NoError(t, err) + + // 9. Document links should return empty after close. + var linksAfterClose []DocumentLink + err = cli.CallResult(ctx, "textDocument/documentLink", DocumentLinkParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + }, &linksAfterClose) + require.NoError(t, err) + assert.Empty(t, linksAfterClose) + + // 10. Shutdown. + _, err = cli.Call(ctx, "shutdown", nil) + require.NoError(t, err) +} + +const testBundleYAMLWithInterpolation = `bundle: + name: test-bundle +workspace: + host: "https://my-workspace.databricks.com" +targets: + dev: + default: true +variables: + my_var: + default: "hello" +resources: + jobs: + my_job: + name: "${var.my_var}" + pipelines: + my_pipeline: + name: "My Pipeline" +` + +func TestServerDefinitionOnInterpolation(t *testing.T) { + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAMLWithInterpolation), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAMLWithInterpolation, + }, + }) + require.NoError(t, err) + + // Find the line with "${var.my_var}" and position cursor on it. + lines := strings.Split(testBundleYAMLWithInterpolation, "\n") + var targetLine int + var targetCol int + for i, line := range lines { + idx := strings.Index(line, "${var.my_var}") + if idx >= 0 { + targetLine = i + targetCol = idx + 2 // inside the "${...}" + break + } + } + + var locs []LSPLocation + err = cli.CallResult(ctx, "textDocument/definition", DefinitionParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: targetLine, Character: targetCol}, + }, &locs) + require.NoError(t, err) + require.Len(t, locs, 1) + assert.Contains(t, locs[0].URI, "databricks.yml") +} + +func TestServerDefinitionOnResourceKey(t *testing.T) { + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAMLWithInterpolation), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAMLWithInterpolation, + }, + }) + require.NoError(t, err) + + // Find the resource key position. + lines := strings.Split(testBundleYAMLWithInterpolation, "\n") + var myJobLine int + var myJobCol int + for i, line := range lines { + idx := strings.Index(line, "my_job:") + if idx >= 0 { + myJobLine = i + myJobCol = idx + 1 // inside "my_job" + break + } + } + + // Ctrl+click on "my_job" key should return references (${...} expressions referencing it). + // The YAML has name: "${var.my_var}" which does NOT reference my_job, so this may return empty. + rsp, err := cli.Call(ctx, "textDocument/definition", DefinitionParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: myJobLine, Character: myJobCol}, + }) + require.NoError(t, err) + assert.NotNil(t, rsp) +} + +func TestServerDefinitionVarShorthand(t *testing.T) { + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAMLWithInterpolation), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAMLWithInterpolation, + }, + }) + require.NoError(t, err) + + // Find the line with "${var.my_var}" and position cursor on "var" part. + lines := strings.Split(testBundleYAMLWithInterpolation, "\n") + var targetLine int + var targetCol int + for i, line := range lines { + idx := strings.Index(line, "${var.my_var}") + if idx >= 0 { + targetLine = i + targetCol = idx + 2 // on "var" inside "${var.my_var}" + break + } + } + + var locs []LSPLocation + err = cli.CallResult(ctx, "textDocument/definition", DefinitionParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: targetLine, Character: targetCol}, + }, &locs) + require.NoError(t, err) + require.Len(t, locs, 1) + assert.Contains(t, locs[0].URI, "databricks.yml") +} + +func TestServerDefinitionNoMatch(t *testing.T) { + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(testBundleYAMLWithInterpolation), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: testBundleYAMLWithInterpolation, + }, + }) + require.NoError(t, err) + + // Cursor on line 0, character 0 ("bundle:") — not an interpolation or resource key. + var result []LSPLocation + err = cli.CallResult(ctx, "textDocument/definition", DefinitionParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: 0, Character: 0}, + }, &result) + require.NoError(t, err) + assert.Nil(t, result) +} + +func TestServerDefinitionCrossFile(t *testing.T) { + mainYAML := `bundle: + name: test-bundle +include: + - "resources/*.yml" +variables: + my_var: + default: "hello" +` + resourceYAML := `resources: + jobs: + my_job: + name: "${var.my_var}" +` + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(mainYAML), 0o644)) + require.NoError(t, os.MkdirAll(filepath.Join(tmpDir, "resources"), 0o755)) + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "resources", "jobs.yml"), []byte(resourceYAML), 0o644)) + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + // Open the resource file with the interpolation. + resDocURI := PathToURI(filepath.Join(tmpDir, "resources", "jobs.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: resDocURI, + LanguageID: "yaml", + Version: 1, + Text: resourceYAML, + }, + }) + require.NoError(t, err) + + // Find "${var.my_var}" in the resource file. + lines := strings.Split(resourceYAML, "\n") + var targetLine int + var targetCol int + for i, line := range lines { + idx := strings.Index(line, "${var.my_var}") + if idx >= 0 { + targetLine = i + targetCol = idx + 2 + break + } + } + + // Definition should resolve to variables.my_var in the main config file. + var locs []LSPLocation + err = cli.CallResult(ctx, "textDocument/definition", DefinitionParams{ + TextDocument: TextDocumentIdentifier{URI: resDocURI}, + Position: Position{Line: targetLine, Character: targetCol}, + }, &locs) + require.NoError(t, err) + require.Len(t, locs, 1) + assert.Contains(t, locs[0].URI, "databricks.yml") +} + +func TestServerHoverMultiTarget(t *testing.T) { + bundleYAML := `bundle: + name: test-bundle +workspace: + host: "https://default.databricks.com" +targets: + dev: + default: true + workspace: + host: "https://dev.databricks.com" + prod: + workspace: + host: "https://prod.databricks.com" +resources: + jobs: + my_job: + name: "My Job" +` + tmpDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "databricks.yml"), []byte(bundleYAML), 0o644)) + + // Create state for both targets. + for _, target := range []struct { + name string + id string + }{ + {"dev", "111"}, + {"prod", "222"}, + } { + stateDir := filepath.Join(tmpDir, ".databricks", "bundle", target.name) + require.NoError(t, os.MkdirAll(stateDir, 0o755)) + stateJSON := `{"state_version": 1, "state": {"resources.jobs.my_job": {"__id__": "` + target.id + `"}}}` + require.NoError(t, os.WriteFile(filepath.Join(stateDir, "resources.json"), []byte(stateJSON), 0o644)) + } + + srv := NewServer() + cli := newTestClientServer(t, srv) + ctx := t.Context() + + initializeClient(ctx, t, cli, PathToURI(tmpDir)) + + docURI := PathToURI(filepath.Join(tmpDir, "databricks.yml")) + err := cli.Notify(ctx, "textDocument/didOpen", DidOpenTextDocumentParams{ + TextDocument: TextDocumentItem{ + URI: docURI, + LanguageID: "yaml", + Version: 1, + Text: bundleYAML, + }, + }) + require.NoError(t, err) + + // Find the position of my_job key. + lines := strings.Split(bundleYAML, "\n") + var myJobLine int + var myJobCol int + for i, line := range lines { + idx := strings.Index(line, "my_job:") + if idx >= 0 { + myJobLine = i + myJobCol = idx + 1 + break + } + } + + var hover Hover + err = cli.CallResult(ctx, "textDocument/hover", HoverParams{ + TextDocument: TextDocumentIdentifier{URI: docURI}, + Position: Position{Line: myJobLine, Character: myJobCol}, + }, &hover) + require.NoError(t, err) + assert.Contains(t, hover.Contents.Value, "dev") + assert.Contains(t, hover.Contents.Value, "prod") + assert.Contains(t, hover.Contents.Value, "111") + assert.Contains(t, hover.Contents.Value, "222") + assert.Contains(t, hover.Contents.Value, "Open in Databricks") +} diff --git a/bundle/lsp/server_test.go b/bundle/lsp/server_test.go new file mode 100644 index 0000000000..234095f2ba --- /dev/null +++ b/bundle/lsp/server_test.go @@ -0,0 +1,386 @@ +package lsp_test + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/databricks/cli/bundle/lsp" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIndexResources(t *testing.T) { + yaml := ` +resources: + jobs: + my_etl_job: + name: "ETL Job" + pipelines: + data_pipeline: + name: "Pipeline" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + doc := &lsp.Document{ + URI: "file:///test.yml", + Content: yaml, + Lines: strings.Split(yaml, "\n"), + Value: v, + } + + entries := lsp.IndexResources(doc) + require.Len(t, entries, 2) + + // Verify first entry (jobs.my_etl_job). + assert.Equal(t, "jobs", entries[0].Type) + assert.Equal(t, "my_etl_job", entries[0].Key) + assert.Equal(t, "resources.jobs.my_etl_job", entries[0].Path) + // Key should span the length of "my_etl_job". + assert.Equal(t, entries[0].KeyRange.Start.Character+len("my_etl_job"), entries[0].KeyRange.End.Character) + + // Verify second entry (pipelines.data_pipeline). + assert.Equal(t, "pipelines", entries[1].Type) + assert.Equal(t, "data_pipeline", entries[1].Key) + assert.Equal(t, "resources.pipelines.data_pipeline", entries[1].Path) +} + +func TestIndexResourcesInvalidYAML(t *testing.T) { + doc := &lsp.Document{ + URI: "file:///bad.yml", + Value: dyn.InvalidValue, + } + + entries := lsp.IndexResources(doc) + assert.Nil(t, entries) +} + +func TestIndexResourcesNoResources(t *testing.T) { + yaml := ` +bundle: + name: "test" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + doc := &lsp.Document{ + URI: "file:///test.yml", + Value: v, + } + + entries := lsp.IndexResources(doc) + assert.Nil(t, entries) +} + +func TestPositionInRange(t *testing.T) { + tests := []struct { + name string + pos lsp.Position + r lsp.Range + expected bool + }{ + { + name: "inside range", + pos: lsp.Position{Line: 3, Character: 5}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: true, + }, + { + name: "at start of range", + pos: lsp.Position{Line: 3, Character: 4}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: true, + }, + { + name: "at end of range (exclusive)", + pos: lsp.Position{Line: 3, Character: 14}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: false, + }, + { + name: "before range", + pos: lsp.Position{Line: 3, Character: 2}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: false, + }, + { + name: "after range", + pos: lsp.Position{Line: 3, Character: 20}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: false, + }, + { + name: "wrong line above", + pos: lsp.Position{Line: 2, Character: 5}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: false, + }, + { + name: "wrong line below", + pos: lsp.Position{Line: 4, Character: 5}, + r: lsp.Range{Start: lsp.Position{Line: 3, Character: 4}, End: lsp.Position{Line: 3, Character: 14}}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, lsp.PositionInRange(tt.pos, tt.r)) + }) + } +} + +func TestBuildResourceURL(t *testing.T) { + host := "https://my-workspace.databricks.com" + + tests := []struct { + resourceType string + id string + expected string + }{ + {"jobs", "123", host + "/jobs/123"}, + {"pipelines", "abc-def", host + "/pipelines/abc-def"}, + {"dashboards", "d1", host + "/dashboardsv3/d1/published"}, + {"model_serving_endpoints", "ep1", host + "/ml/endpoints/ep1"}, + {"experiments", "exp1", host + "/ml/experiments/exp1"}, + {"models", "m1", host + "/ml/models/m1"}, + {"clusters", "c1", host + "/compute/clusters/c1"}, + {"apps", "a1", host + "/apps/a1"}, + {"alerts", "al1", host + "/sql/alerts-v2/al1"}, + {"sql_warehouses", "sw1", host + "/sql/warehouses/sw1"}, + {"quality_monitors", "qm1", host + "/explore/data/qm1"}, + {"secret_scopes", "ss1", host + "/secrets/scopes/ss1"}, + {"unknown_type", "x1", host}, + } + + for _, tt := range tests { + t.Run(tt.resourceType, func(t *testing.T) { + assert.Equal(t, tt.expected, lsp.BuildResourceURL(host, tt.resourceType, tt.id)) + }) + } +} + +func TestBuildResourceURLEmptyInputs(t *testing.T) { + assert.Equal(t, "", lsp.BuildResourceURL("", "jobs", "123")) + assert.Equal(t, "", lsp.BuildResourceURL("https://host.com", "jobs", "")) +} + +func TestBuildResourceURLTrailingSlash(t *testing.T) { + assert.Equal(t, "https://host.com/jobs/123", lsp.BuildResourceURL("https://host.com/", "jobs", "123")) +} + +func TestLoadWorkspaceHost(t *testing.T) { + yaml := ` +workspace: + host: "https://my-workspace.databricks.com" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + assert.Equal(t, "https://my-workspace.databricks.com", lsp.LoadWorkspaceHost(v)) +} + +func TestLoadWorkspaceHostWithInterpolation(t *testing.T) { + yaml := ` +workspace: + host: "${var.host}" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + assert.Equal(t, "", lsp.LoadWorkspaceHost(v)) +} + +func TestLoadWorkspaceHostMissing(t *testing.T) { + yaml := ` +bundle: + name: "test" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + assert.Equal(t, "", lsp.LoadWorkspaceHost(v)) +} + +func TestLoadTarget(t *testing.T) { + tests := []struct { + name string + yaml string + expected string + }{ + { + name: "default target marked", + yaml: ` +targets: + dev: + default: true + prod: + workspace: + host: "https://prod.databricks.com" +`, + expected: "dev", + }, + { + name: "no default returns first", + yaml: ` +targets: + staging: + workspace: + host: "https://staging.databricks.com" + prod: + workspace: + host: "https://prod.databricks.com" +`, + expected: "staging", + }, + { + name: "no targets section", + yaml: ` +bundle: + name: "test" +`, + expected: "default", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(tt.yaml)) + require.NoError(t, err) + assert.Equal(t, tt.expected, lsp.LoadTarget(v)) + }) + } +} + +func TestURIToPathRoundTrip(t *testing.T) { + // Test that PathToURI and URIToPath are inverses of each other. + tmpDir := t.TempDir() + path := filepath.Join(tmpDir, "databricks.yml") + uri := lsp.PathToURI(path) + assert.Equal(t, path, lsp.URIToPath(uri)) +} + +func TestDocumentStoreOpenGetClose(t *testing.T) { + store := lsp.NewDocumentStore() + + assert.Nil(t, store.Get("file:///test.yml")) + + store.Open("file:///test.yml", 1, "key: value") + doc := store.Get("file:///test.yml") + require.NotNil(t, doc) + assert.Equal(t, 1, doc.Version) + assert.Equal(t, "key: value", doc.Content) + assert.True(t, doc.Value.IsValid()) + + store.Close("file:///test.yml") + assert.Nil(t, store.Get("file:///test.yml")) +} + +func TestDocumentStoreChange(t *testing.T) { + store := lsp.NewDocumentStore() + + store.Open("file:///test.yml", 1, "key: value") + store.Change("file:///test.yml", 2, "key: updated") + + doc := store.Get("file:///test.yml") + require.NotNil(t, doc) + assert.Equal(t, 2, doc.Version) + assert.Equal(t, "key: updated", doc.Content) + assert.True(t, doc.Value.IsValid()) +} + +func TestDocumentStoreParseInvalidYAML(t *testing.T) { + store := lsp.NewDocumentStore() + store.Open("file:///bad.yml", 1, "{{{{invalid yaml") + doc := store.Get("file:///bad.yml") + require.NotNil(t, doc) + assert.False(t, doc.Value.IsValid()) +} + +func TestLoadResourceStateDirectEngine(t *testing.T) { + tmpDir := t.TempDir() + stateDir := filepath.Join(tmpDir, ".databricks", "bundle", "dev") + require.NoError(t, os.MkdirAll(stateDir, 0o755)) + + stateJSON := `{ + "state_version": 1, + "state": { + "resources.jobs.etl": {"__id__": "111"}, + "resources.pipelines.dlt": {"__id__": "222"} + } + }` + require.NoError(t, os.WriteFile(filepath.Join(stateDir, "resources.json"), []byte(stateJSON), 0o644)) + + result := lsp.LoadResourceState(tmpDir, "dev") + assert.Equal(t, "111", result["resources.jobs.etl"].ID) + assert.Equal(t, "222", result["resources.pipelines.dlt"].ID) +} + +func TestLoadResourceStateNoState(t *testing.T) { + result := lsp.LoadResourceState("/nonexistent", "dev") + assert.Empty(t, result) +} + +func TestLoadAllTargets(t *testing.T) { + yaml := ` +targets: + dev: + default: true + staging: + workspace: + host: "https://staging.databricks.com" + prod: + workspace: + host: "https://prod.databricks.com" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + targets := lsp.LoadAllTargets(v) + require.Len(t, targets, 3) + assert.Equal(t, "dev", targets[0]) + assert.Equal(t, "staging", targets[1]) + assert.Equal(t, "prod", targets[2]) +} + +func TestLoadAllTargetsNoTargets(t *testing.T) { + yaml := ` +bundle: + name: "test" +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + targets := lsp.LoadAllTargets(v) + assert.Nil(t, targets) +} + +func TestLoadTargetWorkspaceHost(t *testing.T) { + yaml := ` +workspace: + host: "https://default.databricks.com" +targets: + dev: + workspace: + host: "https://dev.databricks.com" + prod: + workspace: + host: "https://prod.databricks.com" + staging: {} +` + v, err := yamlloader.LoadYAML("test.yml", strings.NewReader(yaml)) + require.NoError(t, err) + + assert.Equal(t, "https://dev.databricks.com", lsp.LoadTargetWorkspaceHost(v, "dev")) + assert.Equal(t, "https://prod.databricks.com", lsp.LoadTargetWorkspaceHost(v, "prod")) + // staging has no override, falls back to root. + assert.Equal(t, "https://default.databricks.com", lsp.LoadTargetWorkspaceHost(v, "staging")) +} + +func TestPathToURI(t *testing.T) { + assert.Equal(t, "file:///home/user/file.yml", lsp.PathToURI("/home/user/file.yml")) +} diff --git a/bundle/lsp/state.go b/bundle/lsp/state.go new file mode 100644 index 0000000000..097feabc9d --- /dev/null +++ b/bundle/lsp/state.go @@ -0,0 +1,159 @@ +package lsp + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + + "github.com/databricks/cli/libs/dyn" +) + +// ResourceInfo holds deployment info for a resource. +type ResourceInfo struct { + ID string + URL string + Name string +} + +// LoadResourceState reads the direct engine deployment state to get resource IDs. +// Returns a map from resource path (e.g., "resources.jobs.my_job") to ResourceInfo. +func LoadResourceState(bundleRoot, target string) map[string]ResourceInfo { + directPath := filepath.Join(bundleRoot, ".databricks", "bundle", target, "resources.json") + if info := loadDirectState(directPath); info != nil { + return info + } + return make(map[string]ResourceInfo) +} + +type directState struct { + State map[string]directResourceEntry `json:"state"` +} + +type directResourceEntry struct { + ID string `json:"__id__"` +} + +func loadDirectState(path string) map[string]ResourceInfo { + data, err := os.ReadFile(path) + if err != nil { + return nil + } + + var state directState + if err := json.Unmarshal(data, &state); err != nil { + return nil + } + + result := make(map[string]ResourceInfo) + for key, entry := range state.State { + result[key] = ResourceInfo{ID: entry.ID} + } + return result +} + +// LoadWorkspaceHost extracts the workspace host from the bundle YAML config. +func LoadWorkspaceHost(v dyn.Value) string { + host := v.Get("workspace").Get("host") + s, ok := host.AsString() + if !ok { + return "" + } + // Skip interpolation references like ${var.host}. + if strings.Contains(s, "${") { + return "" + } + return s +} + +// LoadTarget extracts the default target name from the bundle config. +func LoadTarget(v dyn.Value) string { + targets := v.Get("targets") + if targets.Kind() != dyn.KindMap { + return "default" + } + tm, ok := targets.AsMap() + if !ok { + return "default" + } + + // Look for a target with default: true. + for _, pair := range tm.Pairs() { + d := pair.Value.Get("default") + if b, ok := d.AsBool(); ok && b { + return pair.Key.MustString() + } + } + + // Return first target if none marked as default. + if tm.Len() > 0 { + return tm.Pairs()[0].Key.MustString() + } + return "default" +} + +// LoadAllTargets returns all target names from the parsed config. +func LoadAllTargets(v dyn.Value) []string { + targets := v.Get("targets") + if targets.Kind() != dyn.KindMap { + return nil + } + tm, ok := targets.AsMap() + if !ok { + return nil + } + var names []string + for _, pair := range tm.Pairs() { + names = append(names, pair.Key.MustString()) + } + return names +} + +// LoadTargetWorkspaceHost extracts workspace host from a specific target override, +// falling back to the root-level workspace host. +func LoadTargetWorkspaceHost(v dyn.Value, target string) string { + // Try target-specific override first. + host := v.Get("targets").Get(target).Get("workspace").Get("host") + if s, ok := host.AsString(); ok && !strings.Contains(s, "${") { + return s + } + // Fall back to root-level. + return LoadWorkspaceHost(v) +} + +// BuildResourceURL constructs the workspace URL for a resource. +func BuildResourceURL(host, resourceType, id string) string { + if host == "" || id == "" { + return "" + } + host = strings.TrimRight(host, "/") + + switch resourceType { + case "jobs": + return host + "/jobs/" + id + case "pipelines": + return host + "/pipelines/" + id + case "dashboards": + return host + "/dashboardsv3/" + id + "/published" + case "model_serving_endpoints": + return host + "/ml/endpoints/" + id + case "experiments": + return host + "/ml/experiments/" + id + case "models": + return host + "/ml/models/" + id + case "clusters": + return host + "/compute/clusters/" + id + case "apps": + return host + "/apps/" + id + case "alerts": + return host + "/sql/alerts-v2/" + id + case "sql_warehouses": + return host + "/sql/warehouses/" + id + case "quality_monitors": + return host + "/explore/data/" + id + case "secret_scopes": + return host + "/secrets/scopes/" + id + default: + return host + } +} diff --git a/cmd/experimental/experimental.go b/cmd/experimental/experimental.go index eb3b7814e1..1a843b7249 100644 --- a/cmd/experimental/experimental.go +++ b/cmd/experimental/experimental.go @@ -21,6 +21,7 @@ development. They may change or be removed in future versions without notice.`, } cmd.AddCommand(aitoolscmd.NewAitoolsCmd()) + cmd.AddCommand(newBundleLspCommand()) return cmd } diff --git a/cmd/experimental/lsp.go b/cmd/experimental/lsp.go new file mode 100644 index 0000000000..b242da96cd --- /dev/null +++ b/cmd/experimental/lsp.go @@ -0,0 +1,28 @@ +package experimental + +import ( + "github.com/databricks/cli/bundle/lsp" + "github.com/databricks/cli/cmd/root" + "github.com/spf13/cobra" +) + +func newBundleLspCommand() *cobra.Command { + var target string + + cmd := &cobra.Command{ + Use: "bundle-lsp", + Short: "Start a Language Server Protocol server for bundle files", + Args: root.NoArgs, + RunE: func(cmd *cobra.Command, _ []string) error { + srv := lsp.NewServer() + if target != "" { + srv.SetTarget(target) + } + return srv.Run(cmd.Context()) + }, + } + + cmd.Flags().StringVar(&target, "target", "", "Bundle target to use for resource resolution") + + return cmd +} diff --git a/go.mod b/go.mod index 4cf2047691..24ec07b6c2 100644 --- a/go.mod +++ b/go.mod @@ -48,6 +48,8 @@ require gopkg.in/yaml.v3 v3.0.1 // indirect // Dependencies for experimental SSH commands require github.com/tailscale/hujson v0.0.0-20250605163823-992244df8c5a // BSD-3-Clause +require github.com/creachadair/jrpc2 v1.3.5 + require ( cloud.google.com/go/auth v0.18.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect @@ -68,6 +70,7 @@ require ( github.com/clipperhouse/stringish v0.1.1 // indirect github.com/clipperhouse/uax29/v2 v2.5.0 // indirect github.com/cloudflare/circl v1.6.3 // indirect + github.com/creachadair/mds v0.26.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect diff --git a/go.sum b/go.sum index affad9a015..0b40af3a13 100644 --- a/go.sum +++ b/go.sum @@ -71,6 +71,10 @@ github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsV github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/creachadair/jrpc2 v1.3.5 h1:onJko+1u6xoiRph3xwWmfNISR91teCRhbJwSyS9Svzo= +github.com/creachadair/jrpc2 v1.3.5/go.mod h1:YXDmS53AavsiytbAwskrczJPcVHvKC9GoyWzwfSQXoE= +github.com/creachadair/mds v0.26.1 h1:CQG8f4cueHX/c20q5Sy/Ubk8Bvy+aRzVgbpxVieMBAs= +github.com/creachadair/mds v0.26.1/go.mod h1:dMBTCSy3iS3dwh4Rb1zxeZz2d7K8+N24GCTsayWtQRI= github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=