Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions acceptance/pipelines/deploy/auto-approve/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
bundle:
name: test-pipeline-auto-approve

include:
- "./*.yml"
5 changes: 5 additions & 0 deletions acceptance/pipelines/deploy/auto-approve/out.test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
36 changes: 36 additions & 0 deletions acceptance/pipelines/deploy/auto-approve/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@

>>> [PIPELINES] deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-pipeline-auto-approve/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

=== Remove resources from configuration to test auto-approve
>>> rm resources.yml

=== Try to redeploy without --auto-approve - should fail
>>> errcode [PIPELINES] deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-pipeline-auto-approve/default/files...

This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:
delete pipeline foo
Error: the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed


Exit code: 1

=== Redeploy with --auto-approve - should succeed
>>> [PIPELINES] deploy --auto-approve
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-pipeline-auto-approve/default/files...

This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:
delete pipeline foo
Deploying resources...
Updating deployment state...
Deployment complete!
4 changes: 4 additions & 0 deletions acceptance/pipelines/deploy/auto-approve/resources.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
resources:
pipelines:
foo:
name: test-pipeline
10 changes: 10 additions & 0 deletions acceptance/pipelines/deploy/auto-approve/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
trace $PIPELINES deploy

title "Remove resources from configuration to test auto-approve"
trace rm resources.yml

title "Try to redeploy without --auto-approve - should fail"
trace errcode $PIPELINES deploy

title "Redeploy with --auto-approve - should succeed"
trace $PIPELINES deploy --auto-approve
1 change: 1 addition & 0 deletions acceptance/pipelines/deploy/auto-approve/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
10 changes: 10 additions & 0 deletions acceptance/pipelines/deploy/create-pipeline/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
bundle:
name: test-create-pipeline

resources:
pipelines:
foo:
name: test-pipeline
libraries:
- notebook:
path: ./nb.sql
2 changes: 2 additions & 0 deletions acceptance/pipelines/deploy/create-pipeline/nb.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- Databricks notebook source
select 1
5 changes: 5 additions & 0 deletions acceptance/pipelines/deploy/create-pipeline/out.test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
28 changes: 28 additions & 0 deletions acceptance/pipelines/deploy/create-pipeline/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@

>>> [PIPELINES] deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-create-pipeline/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

>>> [CLI] pipelines get [UUID]
{
"spec": {
"channel": "CURRENT",
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-create-pipeline/default/state/metadata.json"
},
"edition": "ADVANCED",
"id": "[UUID]",
"libraries": [
{
"notebook": {
"path": "/Workspace/Users/[USERNAME]/.bundle/test-create-pipeline/default/files/nb"
}
}
],
"name": "test-pipeline",
"storage": "dbfs:/pipelines/[UUID]"
}
}
3 changes: 3 additions & 0 deletions acceptance/pipelines/deploy/create-pipeline/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
trace $PIPELINES deploy
PIPELINE_ID=$($CLI bundle summary -o json | jq -r '.resources.pipelines.foo.id')
trace $CLI pipelines get "${PIPELINE_ID}" | jq "{spec}"
1 change: 1 addition & 0 deletions acceptance/pipelines/deploy/create-pipeline/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
bundle:
name: render-diagnostics-warning-pipeline

resources:
pipelines:
test-pipeline:
# This is an unknown property that should trigger a warning
unknown_property: "this_should_trigger_a_warning"
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@

>>> [PIPELINES] deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/render-diagnostics-warning-pipeline/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
Warning: unknown field: unknown_property
at resources.pipelines.test-pipeline
in databricks.yml:8:7

<EOL>
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
trace $PIPELINES deploy
# print newline to comply with whitespace linter
printf "<EOL>\n"
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
14 changes: 14 additions & 0 deletions acceptance/pipelines/deploy/var-flag/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
bundle:
name: test-pipeline-var-flag

variables:
catalog:
default: main
schema:
default: test_schema

resources:
pipelines:
foo:
catalog: ${var.catalog}
target: ${var.schema}
5 changes: 5 additions & 0 deletions acceptance/pipelines/deploy/var-flag/out.test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
14 changes: 14 additions & 0 deletions acceptance/pipelines/deploy/var-flag/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

=== Test --var flag usage
>>> [PIPELINES] deploy --var=catalog=custom_catalog --var=schema=custom_schema --auto-approve
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-pipeline-var-flag/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

=== Verify: Check that variables were substituted correctly
>>> [CLI] pipelines get [UUID]
{
"catalog": "custom_catalog",
"target": "custom_schema"
}
7 changes: 7 additions & 0 deletions acceptance/pipelines/deploy/var-flag/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
title "Test --var flag usage"
trace $PIPELINES deploy --var="catalog=custom_catalog" --var="schema=custom_schema" --auto-approve

title "Verify: Check that variables were substituted correctly"
PIPELINE_ID=$($CLI bundle summary -o json | jq -r '.resources.pipelines.foo.id')
trace $CLI pipelines get "${PIPELINE_ID}" | jq '.spec | {catalog: .catalog, target: .target}'
# Expected: catalog should be "custom_catalog" and target should contain "custom_schema"
1 change: 1 addition & 0 deletions acceptance/pipelines/deploy/var-flag/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
14 changes: 9 additions & 5 deletions acceptance/pipelines/install-pipelines-cli/output.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@

=== install pipelines cli
>>> errcode [CLI] install-pipelines-cli -d ./subdir
>>> [CLI] install-pipelines-cli -d ./subdir
pipelines successfully installed in directory "./subdir"

>>> errcode ./subdir/pipelines
>>> ./subdir/pipelines
Pipelines CLI

Usage:
Expand All @@ -12,6 +12,7 @@ Usage:
Available Commands:
auth Authentication related commands
completion Generate the autocompletion script for the specified shell
deploy Deploy pipelines
help Help about any command
init Initialize a new pipelines project

Expand All @@ -21,12 +22,13 @@ Flags:
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string project target to use (if applicable)
--var strings set values for variables defined in project config. Example: --var="foo=bar"
-v, --version version for pipelines

Use "pipelines [command] --help" for more information about a command.

=== pipelines already installed
>>> errcode [CLI] install-pipelines-cli -d ./subdir
>>> [CLI] install-pipelines-cli -d ./subdir
pipelines already installed in directory "./subdir"

=== pipelines file exists, should not overwrite
Expand All @@ -36,10 +38,10 @@ Error: cannot install pipelines CLI: "subdir/pipelines" already exists
Exit code: 1

=== databricks executable called with alias
>>> errcode ./subdir/notdatabricks install-pipelines-cli -d ./subdir
>>> ./subdir/notdatabricks install-pipelines-cli -d ./subdir
pipelines successfully installed in directory "./subdir"

>>> errcode ./subdir/pipelines
>>> ./subdir/pipelines
Pipelines CLI

Usage:
Expand All @@ -48,6 +50,7 @@ Usage:
Available Commands:
auth Authentication related commands
completion Generate the autocompletion script for the specified shell
deploy Deploy pipelines
help Help about any command
init Initialize a new pipelines project

Expand All @@ -57,6 +60,7 @@ Flags:
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string project target to use (if applicable)
--var strings set values for variables defined in project config. Example: --var="foo=bar"
-v, --version version for pipelines

Use "pipelines [command] --help" for more information about a command.
14 changes: 6 additions & 8 deletions acceptance/pipelines/install-pipelines-cli/script
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ pipelines="$tmpdir/pipelines"
mkdir -p $tmpdir

title "install pipelines cli"
trace errcode $CLI install-pipelines-cli -d $tmpdir
trace errcode $pipelines
trace $CLI install-pipelines-cli -d $tmpdir
trace $pipelines

title "pipelines already installed"
trace errcode $CLI install-pipelines-cli -d $tmpdir
trace $CLI install-pipelines-cli -d $tmpdir
rm -f $pipelines

title "pipelines file exists, should not overwrite"
Expand All @@ -17,9 +17,7 @@ rm -f $pipelines

title "databricks executable called with alias"
cp $CLI $tmpdir/notdatabricks
trace errcode $tmpdir/notdatabricks install-pipelines-cli -d $tmpdir
trace errcode $pipelines
trace $tmpdir/notdatabricks install-pipelines-cli -d $tmpdir
trace $pipelines

# Cleanup
rm -f $tmpdir/notdatabricks $pipelines
rm -rf $tmpdir
rm -rf $tmpdir/notdatabricks $pipelines $tmpdir
3 changes: 3 additions & 0 deletions acceptance/pipelines/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# All pipelines tests are local only
Local = true
Cloud = false
69 changes: 69 additions & 0 deletions cmd/pipelines/deploy.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
// Copied from cmd/bundle/deploy.go and adapted for pipelines use.
package pipelines

import (
"context"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/cmd/bundle/utils"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/sync"
"github.com/spf13/cobra"
)

func deployCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "deploy",
Short: "Deploy pipelines",
Args: root.NoArgs,
}

var forceLock bool
var autoApprove bool
var verbose bool
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals that might be required for deployment.")
cmd.Flags().BoolVar(&verbose, "verbose", false, "Enable verbose output.")
// Verbose flag currently only affects file sync output, it's used by the vscode extension
cmd.Flags().MarkHidden("verbose")

cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
b, diags := utils.ConfigureBundleWithVariables(cmd)

if !diags.HasError() {
bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) diag.Diagnostics {
b.Config.Bundle.Deployment.Lock.Force = forceLock
b.AutoApprove = autoApprove
return nil
})

var outputHandler sync.OutputHandler
if verbose {
outputHandler = func(ctx context.Context, c <-chan sync.Event) {
sync.TextOutput(ctx, c, cmd.OutOrStdout())
}
}

diags = diags.Extend(phases.Initialize(ctx, b))

if !diags.HasError() {
diags = diags.Extend(bundle.Apply(ctx, b, validate.FastValidate()))
}

if !diags.HasError() {
diags = diags.Extend(phases.Build(ctx, b))
}

if !diags.HasError() {
diags = diags.Extend(phases.Deploy(ctx, b, outputHandler))
}
}

return renderDiagnostics(cmd.OutOrStdout(), b, diags)
}
return cmd
}
2 changes: 2 additions & 0 deletions cmd/pipelines/pipelines.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ import (

func New(ctx context.Context) *cobra.Command {
cli := root.New(ctx)
initVariableFlag(cli)
cli.AddCommand(initCommand())
cli.AddCommand(deployCommand())
cli.AddCommand(authCommand())
return cli
}
Loading