Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
### CLI

### Bundles
* Add support for configurable catalog/schema for dashboards ([#4130](https://github.com/databricks/cli/pull/4130))
* engine/direct: Fix dependency-ordered deletion by persisting depends_on in state ([#4105](https://github.com/databricks/cli/pull/4105))
* Pass SYSTEM_ACCESSTOKEN from env to the Terraform provider ([#4135](https://github.com/databricks/cli/pull/4135)

Expand Down
2 changes: 2 additions & 0 deletions acceptance/bundle/refschema/out.fields.txt
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,8 @@ resources.clusters.*.permissions.permissions[*].service_principal_name string AL
resources.clusters.*.permissions.permissions[*].user_name string ALL
resources.dashboards.*.create_time string ALL
resources.dashboards.*.dashboard_id string ALL
resources.dashboards.*.dataset_catalog string ALL
resources.dashboards.*.dataset_schema string ALL
resources.dashboards.*.display_name string ALL
resources.dashboards.*.embed_credentials bool ALL
resources.dashboards.*.etag string ALL
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Acceptance test for deploying dashboards with dataset_catalog and dataset_schema.
# These fields override the catalog/schema for all datasets in the dashboard.
#
bundle:
name: deploy-dashboard-dataset-test-$UNIQUE_NAME

resources:
dashboards:
dashboard1:
display_name: $DASHBOARD_DISPLAY_NAME
warehouse_id: $TEST_DEFAULT_WAREHOUSE_ID
embed_credentials: true
dataset_catalog: main
dataset_schema: default
file_path: "sample-dashboard.lvdash.json"
parent_path: /Users/$CURRENT_USER_NAME
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
"plan_version": 1,
"cli_version": "[DEV_VERSION]",
"plan": {
"resources.dashboards.dashboard1": {
"action": "skip",
"remote_state": {
"create_time": "[TIMESTAMP]",
"dashboard_id": "[DASHBOARD_ID]",
"display_name": "test bundle-deploy-dashboard-dataset [UUID]",
"embed_credentials": true,
"etag": [ETAG],
"lifecycle_state": "ACTIVE",
"parent_path": "/Workspace/Users/[USERNAME]",
"path": "/Users/[USERNAME]/test bundle-deploy-dashboard-dataset [UUID].lvdash.json",
"serialized_dashboard": "[SERIALIZED_FIXTURE]",
"update_time": "[TIMESTAMP]",
"warehouse_id": "[TEST_DEFAULT_WAREHOUSE_ID]"
},
"changes": {
"local": {
"etag": {
"action": "skip",
"old": [ETAG]
}
},
"remote": {
"dataset_catalog": {
"action": "skip",
"old": "main"
},
"dataset_schema": {
"action": "skip",
"old": "default"
},
"serialized_dashboard": {
"action": "skip",
"old": "[SERIALIZED_FIXTURE_OLD]",
"new": "[SERIALIZED_FIXTURE_NEW]"
}
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"plan_version": 1,
"cli_version": "[DEV_VERSION]",
"plan": {
"resources.dashboards.dashboard1": {
"action": "skip"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"method": "POST",
"path": "/api/2.0/lakeview/dashboards",
"q": {
"dataset_catalog": "main",
"dataset_schema": "default"
},
"body": {
"display_name": "test bundle-deploy-dashboard-dataset [UUID]",
"parent_path": "/Workspace/Users/[USERNAME]",
"serialized_dashboard": "{\n \"pages\": [\n {\n \"name\": \"test_page\",\n \"displayName\": \"Test Page\",\n \"pageType\": \"PAGE_TYPE_CANVAS\"\n }\n ],\n \"datasets\": [\n {\n \"name\": \"bf8f76f4\",\n \"displayName\": \"Test Dataset\",\n \"queryLines\": [\n \"SELECT 1\\n\"\n ],\n \"catalog\": \"foobar\",\n \"schema\": \"foobar\"\n }\n ]\n}\n",
"warehouse_id": "[TEST_DEFAULT_WAREHOUSE_ID]"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"method": "POST",
"path": "/api/2.0/lakeview/dashboards",
"q": {
"dataset_catalog": "main",
"dataset_schema": "default"
},
"body": {
"display_name": "test bundle-deploy-dashboard-dataset [UUID]",
"parent_path": "/Workspace/Users/[USERNAME]",
"serialized_dashboard": "{\n \"pages\": [\n {\n \"name\": \"test_page\",\n \"displayName\": \"Test Page\",\n \"pageType\": \"PAGE_TYPE_CANVAS\"\n }\n ],\n \"datasets\": [\n {\n \"name\": \"bf8f76f4\",\n \"displayName\": \"Test Dataset\",\n \"queryLines\": [\n \"SELECT 1\\n\"\n ],\n \"catalog\": \"foobar\",\n \"schema\": \"foobar\"\n }\n ]\n}\n",
"warehouse_id": "[TEST_DEFAULT_WAREHOUSE_ID]"
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@

>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/deploy-dashboard-dataset-test-[UNIQUE_NAME]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

>>> [CLI] lakeview get [DASHBOARD_ID]
{
"lifecycle_state": "ACTIVE",
"parent_path": "/Users/[USERNAME]",
"path": "/Users/[USERNAME]/test bundle-deploy-dashboard-dataset [UUID].lvdash.json"
}

>>> [CLI] lakeview get [DASHBOARD_ID]
{
"catalog": "main",
"schema": "default"
}

>>> [CLI] bundle plan -o json

>>> [CLI] bundle destroy --auto-approve
The following resources will be deleted:
delete resources.dashboards.dashboard1

All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/deploy-dashboard-dataset-test-[UNIQUE_NAME]/default

Deleting files...
Destroy complete!
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"pages": [
{
"name": "test_page",
"displayName": "Test Page",
"pageType": "PAGE_TYPE_CANVAS"
}
],
"datasets": [
{
"name": "bf8f76f4",
"displayName": "Test Dataset",
"queryLines": [
"SELECT 1\n"
],
"catalog": "foobar",
"schema": "foobar"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/bin/bash
DASHBOARD_DISPLAY_NAME="test bundle-deploy-dashboard-dataset $(uuid)"
if [ -z "$CLOUD_ENV" ]; then
export TEST_DEFAULT_WAREHOUSE_ID="warehouse-1234"
echo "warehouse-1234:TEST_DEFAULT_WAREHOUSE_ID" >> ACC_REPLS
fi

export DASHBOARD_DISPLAY_NAME
envsubst < databricks.yml.tmpl > databricks.yml

cleanup() {
trace $CLI bundle destroy --auto-approve
rm -f out.requests.txt
}
trap cleanup EXIT

trace $CLI bundle deploy
DASHBOARD_ID=$($CLI bundle summary --output json | jq -r '.resources.dashboards.dashboard1.id')

# Capture the dashboard ID as a replacement.
echo "$DASHBOARD_ID:DASHBOARD_ID" >> ACC_REPLS

trace $CLI lakeview get $DASHBOARD_ID | jq '{lifecycle_state, parent_path, path}'

# Verify that the serialized_dashboard datasets have the overridden catalog/schema values.
# The dataset_catalog and dataset_schema parameters should override the values in the datasets.
trace $CLI lakeview get $DASHBOARD_ID | jq '.serialized_dashboard | fromjson | .datasets[] | {catalog, schema}'

# Verify that there is no drift right after deploy.
trace $CLI bundle plan -o json > out.plan.$DATABRICKS_BUNDLE_ENGINE.json

# Modify the direct plan to replace "serialized_dashboard" with a fixture.
# It is normalized on the backend so we cannot compare reliably across local and cloud.
if [ "$DATABRICKS_BUNDLE_ENGINE" = "direct" ]; then
jq '.plan["resources.dashboards.dashboard1"].remote_state.serialized_dashboard |=
if . then "[SERIALIZED_FIXTURE]" else . end' \
out.plan.direct.json > out.plan.direct.json.tmp && mv out.plan.direct.json.tmp out.plan.direct.json
jq '.plan["resources.dashboards.dashboard1"].changes.remote.serialized_dashboard |=
if . then {"action": .action, "old": "[SERIALIZED_FIXTURE_OLD]", "new": "[SERIALIZED_FIXTURE_NEW]"} else . end' \
out.plan.direct.json > out.plan.direct.json.tmp && mv out.plan.direct.json.tmp out.plan.direct.json
fi

# Print API requests made to create the dashboard.
# This verifies that dataset_catalog and dataset_schema are passed to the API.
cat out.requests.txt | \
jq 'select(.method == "POST")' | \
jq 'select(.path | contains("/api/2.0/lakeview/dashboards"))' | \
jq 'select(.path | contains("/published") | not)' \
> out.post.requests.$DATABRICKS_BUNDLE_ENGINE.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
Local = true
Cloud = true
RequiresWarehouse = true
RecordRequests = true
EnvVaryOutput = "DATABRICKS_BUNDLE_ENGINE"

[EnvMatrix]
DATABRICKS_BUNDLE_ENGINE = ["direct", "terraform"]

Ignore = [
"databricks.yml",
]
10 changes: 10 additions & 0 deletions bundle/config/resources/dashboard.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,16 @@ type DashboardConfig struct {
//
// Defaults to false if not set.
EmbedCredentials bool `json:"embed_credentials,omitempty"`

// DatasetCatalog sets the default catalog for all datasets in this dashboard.
// When set, this overrides the catalog specified in individual dataset definitions.
// This is a request only parameter and not returned by the GET API.
DatasetCatalog string `json:"dataset_catalog,omitempty"`

// DatasetSchema sets the default schema for all datasets in this dashboard.
// When set, this overrides the schema specified in individual dataset definitions.
// This is a request only parameter and not returned by the GET API.
DatasetSchema string `json:"dataset_schema,omitempty"`
}

func (c *DashboardConfig) UnmarshalJSON(b []byte) error {
Expand Down
23 changes: 23 additions & 0 deletions bundle/config/resources/dashboard_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package resources

import (
"encoding/json"
"reflect"
"testing"

Expand Down Expand Up @@ -59,3 +60,25 @@ func TestDashboardConfigIsSupersetOfSDKDashboard(t *testing.T) {
}
}
}

func TestDashboardConfigWithDatasetCatalogSchema(t *testing.T) {
jsonConfig := `{
"display_name": "Test Dashboard",
"warehouse_id": "test_warehouse_id",
"dataset_catalog": "main",
"dataset_schema": "default",
"embed_credentials": true,
"serialized_dashboard": "{\"key\": \"value\"}"
}`

var config DashboardConfig
err := json.Unmarshal([]byte(jsonConfig), &config)
assert.NoError(t, err)

assert.Equal(t, "Test Dashboard", config.DisplayName)
assert.Equal(t, "test_warehouse_id", config.WarehouseId)
assert.Equal(t, "main", config.DatasetCatalog)
assert.Equal(t, "default", config.DatasetSchema)
assert.True(t, config.EmbedCredentials)
assert.Equal(t, `{"key": "value"}`, config.SerializedDashboard)
}
31 changes: 31 additions & 0 deletions bundle/deploy/terraform/tfdyn/convert_dashboard_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,34 @@ func TestConvertDashboardSerializedDashboardAny(t *testing.T) {
// Assert that the "file_path" is dropped.
assert.NotContains(t, out.Dashboard["my_dashboard"], "file_path")
}

func TestConvertDashboardDatasetCatalogSchema(t *testing.T) {
src := resources.Dashboard{
DashboardConfig: resources.DashboardConfig{
DisplayName: "my dashboard",
WarehouseId: "f00dcafe",
ParentPath: "/some/path",
DatasetCatalog: "main",
DatasetSchema: "default",
EmbedCredentials: true,
},
}

vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)

ctx := context.Background()
out := schema.NewResources()
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
require.NoError(t, err)

// Assert that dataset_catalog and dataset_schema are included.
assert.Subset(t, out.Dashboard["my_dashboard"], map[string]any{
"display_name": "my dashboard",
"warehouse_id": "f00dcafe",
"parent_path": "/some/path",
"dataset_catalog": "main",
"dataset_schema": "default",
"embed_credentials": true,
})
}
Loading