Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions acceptance/bundle/paths/fallback_metric/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ Deployment complete!
"value": false
},
{
"key": "skip_artifact_cleanup",
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The order is changed because now we set wheel wrapper earlier then we do artifacts cleanup

"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down Expand Up @@ -57,11 +57,11 @@ Deployment complete!
"value": true
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
19 changes: 19 additions & 0 deletions acceptance/bundle/plan/no_upload/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
bundle:
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For more comprehensive test, can we add "bundle plan" + request recording to default-python various local and integration tests and check that there are no side effects there?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried to include bundle plan in default-python template tests but it appeared not trivial as these are quite complex tests and adding bundle plan added additional output just as bundle.tf.json and terraform.lock files. Will follow up with these tests afterwards not to block this PR.

name: plan-no-upload

resources:
jobs:
my_job:
name: my-job
tasks:
- task_key: task1
spark_python_task:
python_file: "./my_script.py"
environment_key: "env"

environments:
- environment_key: "env"
spec:
client: "1"
dependencies:
- "*.whl"
1 change: 1 addition & 0 deletions acceptance/bundle/plan/no_upload/my_script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
print("Hello, World!")
5 changes: 5 additions & 0 deletions acceptance/bundle/plan/no_upload/out.test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Local = true
Cloud = false

[EnvMatrix]
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
5 changes: 5 additions & 0 deletions acceptance/bundle/plan/no_upload/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

>>> [CLI] bundle plan
create jobs.my_job

>>> jq -s .[] | select(.method != "GET") out.requests.txt
6 changes: 6 additions & 0 deletions acceptance/bundle/plan/no_upload/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
trace $CLI bundle plan

# Expect no non-GET requests
trace jq -s '.[] | select(.method != "GET")' out.requests.txt

rm out.requests.txt
3 changes: 3 additions & 0 deletions acceptance/bundle/plan/no_upload/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Local = true
Cloud = false
RecordRequests = true
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,3 @@
"method": "GET",
"path": "/api/2.0/workspace/get-status"
}
{
"method": "POST",
"path": "/api/2.0/workspace/delete",
"body": {
"path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal",
"recursive": true
}
}
{
"method": "POST",
"path": "/api/2.0/workspace/mkdirs",
"body": {
"path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal"
}
}
8 changes: 4 additions & 4 deletions acceptance/bundle/telemetry/deploy-compute-type/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ Deployment complete!
"value": false
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand All @@ -52,11 +52,11 @@ Deployment complete!
"value": false
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
4 changes: 2 additions & 2 deletions acceptance/bundle/telemetry/deploy-experimental/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ Deployment complete!
"value": false
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ Deployment complete!
"value": true
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ Deployment complete!
"value": true
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
8 changes: 4 additions & 4 deletions acceptance/bundle/telemetry/deploy-whl-artifacts/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ Deployment complete!
"value": false
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down Expand Up @@ -61,11 +61,11 @@ Deployment complete!
"value": true
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": true
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": true
},
{
Expand Down
4 changes: 2 additions & 2 deletions acceptance/bundle/telemetry/deploy/out.telemetry.txt
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@
"value": false
},
{
"key": "skip_artifact_cleanup",
"key": "python_wheel_wrapper_is_set",
"value": false
},
{
"key": "python_wheel_wrapper_is_set",
"key": "skip_artifact_cleanup",
"value": false
},
{
Expand Down
18 changes: 16 additions & 2 deletions bundle/libraries/filer.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package libraries
import (
"context"
"path"
"strings"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
Expand All @@ -24,6 +25,7 @@ func GetFilerForLibraries(ctx context.Context, b *bundle.Bundle) (filer.Filer, s
}

uploadPath := path.Join(b.Config.Workspace.ArtifactPath, InternalDirName)
uploadPath = ensureWorkspaceOrVolumesPrefix(uploadPath)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add a comment why this is needed? Unclear why we would be missing prefix now.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added, it's not that we miss it now, I just moved it from the other place to here, but I added the comment to the function


switch {
case IsVolumesPath(artifactPath):
Expand All @@ -40,11 +42,23 @@ func GetFilerForLibrariesCleanup(ctx context.Context, b *bundle.Bundle) (filer.F
return nil, "", diag.Errorf("remote artifact path not configured")
}

artifactPath = ensureWorkspaceOrVolumesPrefix(artifactPath)

switch {
case IsVolumesPath(artifactPath):
return filerForVolume(b, b.Config.Workspace.ArtifactPath)
return filerForVolume(b, artifactPath)

default:
return filerForWorkspace(b, b.Config.Workspace.ArtifactPath)
return filerForWorkspace(b, artifactPath)
}
}

// If the remote path does not start with /Workspace or /Volumes, prepend /Workspace
// Some of the bundle configuration might use workspace paths like /Users or /Shared.
// While this is still a valid workspace path, the backend converts it to /Workspace/Users or /Workspace/Shared.
func ensureWorkspaceOrVolumesPrefix(path string) string {
if !strings.HasPrefix(path, "/Workspace") && !strings.HasPrefix(path, "/Volumes") {
path = "/Workspace" + path
}
return path
}
4 changes: 2 additions & 2 deletions bundle/libraries/filer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ func TestGetFilerForLibrariesValidWsfs(t *testing.T) {

client, uploadPath, diags := GetFilerForLibraries(context.Background(), b)
require.NoError(t, diags.Error())
assert.Equal(t, "/foo/bar/artifacts/.internal", uploadPath)
assert.Equal(t, "/Workspace/foo/bar/artifacts/.internal", uploadPath)

assert.IsType(t, &filer.WorkspaceFilesClient{}, client)
}
Expand All @@ -48,7 +48,7 @@ func TestGetFilerForLibrariesCleanupValidWsfs(t *testing.T) {

client, uploadPath, diags := GetFilerForLibrariesCleanup(context.Background(), b)
require.NoError(t, diags.Error())
assert.Equal(t, "/foo/bar/artifacts", uploadPath)
assert.Equal(t, "/Workspace/foo/bar/artifacts", uploadPath)

assert.IsType(t, &filer.WorkspaceFilesClient{}, client)
}
Expand Down
Loading
Loading