From b4dcf34585e4f93bbcecae3a6aeffdca32c86007 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Thu, 24 Oct 2024 13:52:02 +0200 Subject: [PATCH] [Exporter] **Breaking changes** Use new query and alert resources instead of legacy ones This change replaces legacy `databricks_sql_query` and `databricks_sql_alert` with new resources `databricks_query` and `databricks_alert`. Also, services `sql-queries` and `sql-alerts` are renamed to `queries` and `alerts`. Other changes include: * Improve performance of Lakeview dashboards scan by using bigger page size * Generalize `isMatchingCatalogAndSchema` implementation for use in multiple resources where attribute names could be different * Generalize handling of `/Workspace` prefix when emitting notebooks, workspace files and directories. --- docs/guides/experimental-exporter.md | 6 +- exporter/context.go | 4 +- exporter/exporter_test.go | 83 +++++++---- exporter/importables.go | 184 ++++++++++++++---------- exporter/importables_test.go | 1 + exporter/test-data/get-alert.json | 25 ++++ exporter/test-data/get-alerts.json | 29 ++++ exporter/test-data/get-queries.json | 17 +++ exporter/test-data/get-query.json | 13 ++ exporter/test-data/get-sql-alert.json | 83 ----------- exporter/test-data/get-sql-alerts.json | 84 ----------- exporter/test-data/get-sql-queries.json | 39 ----- exporter/util.go | 62 +++----- exporter/util_workspace.go | 26 ++-- sql/resource_query.go | 12 +- 15 files changed, 295 insertions(+), 373 deletions(-) create mode 100644 exporter/test-data/get-alert.json create mode 100644 exporter/test-data/get-alerts.json create mode 100644 exporter/test-data/get-queries.json create mode 100644 exporter/test-data/get-query.json delete mode 100644 exporter/test-data/get-sql-alert.json delete mode 100644 exporter/test-data/get-sql-alerts.json delete mode 100644 exporter/test-data/get-sql-queries.json diff --git a/docs/guides/experimental-exporter.md b/docs/guides/experimental-exporter.md index e709f4765d..6f41bf6154 100644 --- a/docs/guides/experimental-exporter.md +++ b/docs/guides/experimental-exporter.md @@ -111,6 +111,7 @@ Services are just logical groups of resources used for filtering and organizatio Please note that for services not marked with **listing**, we'll export resources only if they are referenced from other resources. * `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md). +* `alerts` - **listing** [databricks_alert](../resources/alert.md). * `compute` - **listing** [databricks_cluster](../resources/cluster.md). * `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md). * `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.* @@ -123,13 +124,12 @@ Services are just logical groups of resources used for filtering and organizatio * `notebooks` - **listing** [databricks_notebook](../resources/notebook.md). * `policies` - **listing** [databricks_cluster_policy](../resources/cluster_policy). * `pools` - **listing** [instance pools](../resources/instance_pool.md). +* `queries` - **listing** [databricks_query](../resources/query.md). * `repos` - **listing** [databricks_repo](../resources/repo.md) * `secrets` - **listing** [databricks_secret_scope](../resources/secret_scope.md) along with [keys](../resources/secret.md) and [ACLs](../resources/secret_acl.md). * `settings` - **listing** [databricks_notification_destination](../resources/notification_destination.md). -* `sql-alerts` - **listing** [databricks_sql_alert](../resources/sql_alert.md). -* `sql-dashboards` - **listing** [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md). +* `sql-dashboards` - **listing** Legacy [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md). * `sql-endpoints` - **listing** [databricks_sql_endpoint](../resources/sql_endpoint.md) along with [databricks_sql_global_config](../resources/sql_global_config.md). -* `sql-queries` - **listing** [databricks_sql_query](../resources/sql_query.md). * `storage` - only [databricks_dbfs_file](../resources/dbfs_file.md) and [databricks_file](../resources/file.md) referenced in other resources (libraries, init scripts, ...) will be downloaded locally and properly arranged into terraform state. * `uc-artifact-allowlist` - **listing** exports [databricks_artifact_allowlist](../resources/artifact_allowlist.md) resources for Unity Catalog Allow Lists attached to the current metastore. * `uc-catalogs` - **listing** [databricks_catalog](../resources/catalog.md) and [databricks_workspace_binding](../resources/workspace_binding.md) diff --git a/exporter/context.go b/exporter/context.go index 70ab462029..c7f2b18235 100644 --- a/exporter/context.go +++ b/exporter/context.go @@ -204,8 +204,8 @@ var goroutinesNumber = map[string]int{ "databricks_sql_dashboard": 3, "databricks_sql_widget": 4, "databricks_sql_visualization": 4, - "databricks_sql_query": 5, - "databricks_sql_alert": 2, + "databricks_query": 4, + "databricks_alert": 2, "databricks_permissions": 11, } diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index 2b528f56b8..9c2f64cf15 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -56,20 +56,6 @@ func getJSONObject(filename string) any { return obj } -func getJSONArray(filename string) any { - data, err := os.ReadFile(filename) - if err != nil { - panic(err) - } - var obj []any - err = json.Unmarshal(data, &obj) - if err != nil { - fmt.Printf("[ERROR] error! file=%s err=%v\n", filename, err) - fmt.Printf("[ERROR] data=%s\n", string(data)) - } - return obj -} - func workspaceConfKeysToURL() string { keys := make([]string, 0, len(workspaceConfKeys)) for k := range workspaceConfKeys { @@ -379,14 +365,14 @@ var emptySqlDashboards = qa.HTTPFixture{ var emptySqlQueries = qa.HTTPFixture{ Method: "GET", - Resource: "/api/2.0/preview/sql/queries?page_size=100", + Resource: "/api/2.0/sql/queries?page_size=100", Response: map[string]any{}, ReuseRequest: true, } var emptySqlAlerts = qa.HTTPFixture{ Method: "GET", - Resource: "/api/2.0/preview/sql/alerts", + Resource: "/api/2.0/sql/alerts?page_size=100", Response: []tfsql.AlertEntity{}, ReuseRequest: true, } @@ -447,7 +433,7 @@ var emptyMetastoreList = qa.HTTPFixture{ var emptyLakeviewList = qa.HTTPFixture{ Method: "GET", - Resource: "/api/2.0/lakeview/dashboards?page_size=100", + Resource: "/api/2.0/lakeview/dashboards?page_size=1000", Response: sdk_dashboards.ListDashboardsResponse{}, ReuseRequest: true, } @@ -1015,6 +1001,16 @@ func TestImportingClusters(t *testing.T) { }, }, }, + { + Method: "GET", + Resource: "/api/2.0/preview/scim/v2/Users?attributes=id%2CuserName&count=100&startIndex=1", + ReuseRequest: true, + Response: scim.UserList{ + Resources: []scim.User{ + {ID: "123", DisplayName: "test@test.com", UserName: "test@test.com"}, + }, + }, + }, }, func(ctx context.Context, client *common.DatabricksClient) { os.Setenv("EXPORTER_PARALLELISM_default", "1") @@ -1950,16 +1946,21 @@ func TestImportingSqlObjects(t *testing.T) { }, { Method: "GET", - Resource: "/api/2.0/preview/sql/queries?page_size=100", - Response: getJSONObject("test-data/get-sql-queries.json"), + Resource: "/api/2.0/sql/queries?page_size=100", + Response: getJSONObject("test-data/get-queries.json"), ReuseRequest: true, }, { Method: "GET", - Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc", - Response: getJSONObject("test-data/get-sql-query.json"), + Resource: "/api/2.0/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?", + Response: getJSONObject("test-data/get-query.json"), ReuseRequest: true, }, + { + Method: "GET", + Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc", + Response: getJSONObject("test-data/get-sql-query.json"), + }, { Method: "GET", Resource: "/api/2.0/permissions/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?", @@ -1972,14 +1973,14 @@ func TestImportingSqlObjects(t *testing.T) { }, { Method: "GET", - Resource: "/api/2.0/preview/sql/alerts", - Response: getJSONArray("test-data/get-sql-alerts.json"), + Resource: "/api/2.0/sql/alerts?page_size=100", + Response: getJSONObject("test-data/get-alerts.json"), ReuseRequest: true, }, { Method: "GET", - Resource: "/api/2.0/preview/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?", - Response: getJSONObject("test-data/get-sql-alert.json"), + Resource: "/api/2.0/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?", + Response: getJSONObject("test-data/get-alert.json"), }, { Method: "GET", @@ -1993,18 +1994,44 @@ func TestImportingSqlObjects(t *testing.T) { ic := newImportContext(client) ic.Directory = tmpDir - ic.enableListing("sql-dashboards,sql-queries,sql-endpoints,sql-alerts") - ic.enableServices("sql-dashboards,sql-queries,sql-alerts,sql-endpoints,access,notebooks") + ic.enableListing("sql-dashboards,queries,sql-endpoints,alerts") + ic.enableServices("sql-dashboards,queries,alerts,sql-endpoints,access") err := ic.Run() assert.NoError(t, err) + // check the generated HCL for SQL Warehouses content, err := os.ReadFile(tmpDir + "/sql-endpoints.tf") assert.NoError(t, err) contentStr := string(content) assert.True(t, strings.Contains(contentStr, `enable_serverless_compute = false`)) assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_endpoint" "test" {`)) assert.False(t, strings.Contains(contentStr, `tags {`)) + // check the generated HCL for SQL Dashboards + content, err = os.ReadFile(tmpDir + "/sql-dashboards.tf") + assert.NoError(t, err) + contentStr = string(content) + assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_dashboard" "test_9cb0c8f5_6262_4a1f_a741_2181de76028f" {`)) + assert.True(t, strings.Contains(contentStr, `dashboard_id = databricks_sql_dashboard.test_9cb0c8f5_6262_4a1f_a741_2181de76028f.id`)) + assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_widget" "rd4dd2082685" {`)) + assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_visualization" "chart_16c4f969_eea0_4aad_8f82_03d79b078dcc_1a062d3a_eefe_11eb_9559_dc7cd9c86087"`)) + // check the generated HCL for Qieries + content, err = os.ReadFile(tmpDir + "/queries.tf") + assert.NoError(t, err) + contentStr = string(content) + assert.True(t, strings.Contains(contentStr, `resource "databricks_query" "jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc"`)) + assert.True(t, strings.Contains(contentStr, `warehouse_id = databricks_sql_endpoint.test.id`)) + assert.True(t, strings.Contains(contentStr, `owner_user_name = "user@domain.com"`)) + assert.True(t, strings.Contains(contentStr, `display_name = "Jobs per day per status last 30 days"`)) + // check the generated HCL for Alerts + content, err = os.ReadFile(tmpDir + "/alerts.tf") + assert.NoError(t, err) + contentStr = string(content) + assert.True(t, strings.Contains(contentStr, `resource "databricks_alert" "test_alert_3cf91a42_6217_4f3c_a6f0_345d489051b9"`)) + assert.True(t, strings.Contains(contentStr, `query_id = databricks_query.jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc.id`)) + assert.True(t, strings.Contains(contentStr, `display_name = "Test Alert"`)) + assert.True(t, strings.Contains(contentStr, `op = "GREATER_THAN"`)) + assert.True(t, strings.Contains(contentStr, `owner_user_name = "test@domain.com"`)) }) } @@ -2795,7 +2822,7 @@ func TestImportingLakeviewDashboards(t *testing.T) { noCurrentMetastoreAttached, { Method: "GET", - Resource: "/api/2.0/lakeview/dashboards?page_size=100", + Resource: "/api/2.0/lakeview/dashboards?page_size=1000", Response: sdk_dashboards.ListDashboardsResponse{ Dashboards: []sdk_dashboards.Dashboard{ { diff --git a/exporter/importables.go b/exporter/importables.go index 54f0fb6da3..04833df814 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -437,9 +437,9 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "task.spark_submit_task.parameters", Resource: "databricks_workspace_file", Match: "workspace_path"}, {Path: "task.sql_task.file.path", Resource: "databricks_workspace_file", Match: "path"}, {Path: "task.dbt_task.project_directory", Resource: "databricks_directory", Match: "path"}, - {Path: "task.sql_task.alert.alert_id", Resource: "databricks_sql_alert"}, + {Path: "task.sql_task.alert.alert_id", Resource: "databricks_alert"}, {Path: "task.sql_task.dashboard.dashboard_id", Resource: "databricks_sql_dashboard"}, - {Path: "task.sql_task.query.query_id", Resource: "databricks_sql_query"}, + {Path: "task.sql_task.query.query_id", Resource: "databricks_query"}, {Path: "task.sql_task.warehouse_id", Resource: "databricks_sql_endpoint"}, {Path: "task.webhook_notifications.on_duration_warning_threshold_exceeded.id", Resource: "databricks_notification_destination"}, {Path: "task.webhook_notifications.on_failure.id", Resource: "databricks_notification_destination"}, @@ -544,7 +544,7 @@ var resourcesMap map[string]importable = map[string]importable{ if task.SqlTask != nil { if task.SqlTask.Query != nil { ic.Emit(&resource{ - Resource: "databricks_sql_query", + Resource: "databricks_query", ID: task.SqlTask.Query.QueryId, }) } @@ -556,7 +556,7 @@ var resourcesMap map[string]importable = map[string]importable{ } if task.SqlTask.Alert != nil { ic.Emit(&resource{ - Resource: "databricks_sql_alert", + Resource: "databricks_alert", ID: task.SqlTask.Alert.AlertId, }) } @@ -1169,8 +1169,8 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "cluster_id", Resource: "databricks_cluster"}, {Path: "instance_pool_id", Resource: "databricks_instance_pool"}, {Path: "cluster_policy_id", Resource: "databricks_cluster_policy"}, - {Path: "sql_query_id", Resource: "databricks_sql_query"}, - {Path: "sql_alert_id", Resource: "databricks_sql_alert"}, + {Path: "sql_query_id", Resource: "databricks_query"}, + {Path: "sql_alert_id", Resource: "databricks_alert"}, {Path: "sql_dashboard_id", Resource: "databricks_sql_dashboard"}, {Path: "sql_endpoint_id", Resource: "databricks_sql_endpoint"}, {Path: "dashboard_id", Resource: "databricks_dashboard"}, @@ -1674,65 +1674,84 @@ var resourcesMap map[string]importable = map[string]importable{ MatchType: MatchPrefix, SearchValueTransformFunc: appendEndingSlashToDirName}, }, }, - "databricks_sql_query": { + "databricks_query": { WorkspaceLevel: true, - Service: "sql-queries", + Service: "queries", Name: func(ic *importContext, d *schema.ResourceData) string { - return d.Get("name").(string) + "_" + d.Id() + return d.Get("display_name").(string) + "_" + d.Id() }, List: func(ic *importContext) error { - qs, err := dbsqlListObjects(ic, "/preview/sql/queries") - if err != nil { - return nil - } - for i, q := range qs { - name := q["name"].(string) - if !ic.MatchesName(name) { + it := ic.workspaceClient.Queries.List(ic.Context, sql.ListQueriesRequest{PageSize: 100}) + i := 0 + for it.HasNext(ic.Context) { + q, err := it.Next(ic.Context) + if err != nil { + return err + } + i++ + if !ic.MatchesName(q.DisplayName) { continue } + // TODO: look if we can create data based on the response, without calling Get ic.EmitIfUpdatedAfterIsoString(&resource{ - Resource: "databricks_sql_query", - ID: q["id"].(string), + Resource: "databricks_query", + ID: q.Id, Incremental: ic.incremental, - }, q["updated_at"].(string), fmt.Sprintf("query '%s'", name)) - log.Printf("[INFO] Imported %d of %d SQL queries", i+1, len(qs)) + }, q.UpdateTime, fmt.Sprintf("query '%s'", q.DisplayName)) + if i%50 == 0 { + log.Printf("[INFO] Imported %d Queries", i) + } } - + log.Printf("[INFO] Listed %d Queries", i) return nil }, Import: func(ic *importContext, r *resource) error { - var query tfsql.QueryEntity - s := ic.Resources["databricks_sql_query"].Schema + var query tfsql.QueryStruct + s := ic.Resources["databricks_query"].Schema common.DataToStructPointer(r.Data, s, &query) - sqlEndpointID, err := ic.getSqlEndpoint(query.DataSourceID) - if err == nil { + if query.WarehouseId != "" { ic.Emit(&resource{ Resource: "databricks_sql_endpoint", - ID: sqlEndpointID, + ID: query.WarehouseId, }) - } else { - log.Printf("[WARN] Can't find SQL endpoint for data source '%s'", query.DataSourceID) } // emit queries specified as parameters - for _, p := range query.Parameter { - if p.Query != nil { + for _, p := range query.Parameters { + if p.QueryBackedValue != nil { ic.Emit(&resource{ - Resource: "databricks_sql_query", - ID: p.Query.QueryID, + Resource: "databricks_query", + ID: p.QueryBackedValue.QueryId, }) } } - ic.emitSqlParentDirectory(query.Parent) + ic.emitUserOrServicePrincipal(query.OwnerUserName) + ic.emitDirectoryOrRepo(query.ParentPath) + // TODO: r.AddExtraData(ParentDirectoryExtraKey, directoryPath) ? ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/sql/queries/%s", r.ID), - "sql_query_"+ic.Importables["databricks_sql_query"].Name(ic, r.Data)) + "query_"+ic.Importables["databricks_query"].Name(ic, r.Data)) + if query.Catalog != "" && query.Schema != "" { + ic.Emit(&resource{ + Resource: "databricks_schema", + ID: fmt.Sprintf("%s.%s", query.Catalog, query.Schema), + }) + } return nil }, - Ignore: generateIgnoreObjectWithEmptyAttributeValue("databricks_sql_query", "name"), + // TODO: exclude owner if it's the current user? + Ignore: generateIgnoreObjectWithEmptyAttributeValue("databricks_query", "display_name"), Depends: []reference{ - {Path: "data_source_id", Resource: "databricks_sql_endpoint", Match: "data_source_id"}, - {Path: "parameter.query.query_id", Resource: "databricks_sql_query", Match: "id"}, - {Path: "parent", Resource: "databricks_directory", Match: "object_id", MatchType: MatchRegexp, - Regexp: sqlParentRegexp}, + {Path: "warehouse_id", Resource: "databricks_sql_endpoint"}, + {Path: "parameter.query_backed_value.query_id", Resource: "databricks_query", Match: "id"}, + {Path: "owner_user_name", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "owner_user_name", Resource: "databricks_service_principal", Match: "application_id"}, + {Path: "catalog", Resource: "databricks_catalog"}, + {Path: "schema", Resource: "databricks_schema", Match: "name", + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog", "schema"), + SkipDirectLookup: true}, + // TODO: add match like for workspace files? + {Path: "parent_path", Resource: "databricks_directory"}, + {Path: "parent_path", Resource: "databricks_directory", Match: "workspace_path"}, + // TODO: add support for Repos? }, }, "databricks_sql_endpoint": { @@ -1901,7 +1920,7 @@ var resourcesMap map[string]importable = map[string]importable{ ID: visualizationID, }) ic.Emit(&resource{ - Resource: "databricks_sql_query", + Resource: "databricks_query", ID: query.ID, }) sqlEndpointID, err := ic.getSqlEndpoint(query.DataSourceID) @@ -1933,7 +1952,7 @@ var resourcesMap map[string]importable = map[string]importable{ }, Depends: []reference{ {Path: "visualization_id", Resource: "databricks_sql_visualization", Match: "visualization_id"}, - {Path: "dashboard_id", Resource: "databricks_sql_dashboard", Match: "id"}, + {Path: "dashboard_id", Resource: "databricks_sql_dashboard"}, }, }, "databricks_sql_visualization": { @@ -1944,51 +1963,63 @@ var resourcesMap map[string]importable = map[string]importable{ return name }, Depends: []reference{ - {Path: "query_id", Resource: "databricks_sql_query", Match: "id"}, + {Path: "query_id", Resource: "databricks_query"}, }, }, - "databricks_sql_alert": { + "databricks_alert": { WorkspaceLevel: true, - Service: "sql-alerts", + Service: "alerts", Name: func(ic *importContext, d *schema.ResourceData) string { - return d.Get("name").(string) + "_" + d.Id() + return d.Get("display_name").(string) + "_" + d.Id() }, List: func(ic *importContext) error { - alerts, err := ic.workspaceClient.AlertsLegacy.List(ic.Context) - if err != nil { - return err - } - for i, alert := range alerts { - name := alert.Name - if !ic.MatchesName(name) { + it := ic.workspaceClient.Alerts.List(ic.Context, sql.ListAlertsRequest{PageSize: 100}) + i := 0 + for it.HasNext(ic.Context) { + a, err := it.Next(ic.Context) + if err != nil { + return err + } + i++ + if !ic.MatchesName(a.DisplayName) { continue } + // TODO: look if we can create data based on the response, without calling Get ic.EmitIfUpdatedAfterIsoString(&resource{ - Resource: "databricks_sql_alert", - ID: alert.Id, + Resource: "databricks_alert", + ID: a.Id, Incremental: ic.incremental, - }, alert.UpdatedAt, fmt.Sprintf("alert '%s'", name)) - log.Printf("[INFO] Imported %d of %d SQL alerts", i+1, len(alerts)) + }, a.UpdateTime, fmt.Sprintf("alert '%s'", a.DisplayName)) + if i%50 == 0 { + log.Printf("[INFO] Imported %d Alerts", i) + } } + log.Printf("[INFO] Listed %d Alerts", i) return nil }, Import: func(ic *importContext, r *resource) error { - var alert tfsql.AlertEntity - s := ic.Resources["databricks_sql_alert"].Schema + var alert sql.Alert + s := ic.Resources["databricks_alert"].Schema common.DataToStructPointer(r.Data, s, &alert) if alert.QueryId != "" { - ic.Emit(&resource{Resource: "databricks_sql_query", ID: alert.QueryId}) + ic.Emit(&resource{Resource: "databricks_query", ID: alert.QueryId}) } - ic.emitSqlParentDirectory(alert.Parent) + ic.emitDirectoryOrRepo(alert.ParentPath) + ic.emitUserOrServicePrincipal(alert.OwnerUserName) + // TODO: r.AddExtraData(ParentDirectoryExtraKey, directoryPath) ? ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/sql/alerts/%s", r.ID), - "sql_alert_"+ic.Importables["databricks_sql_alert"].Name(ic, r.Data)) + "alert_"+ic.Importables["databricks_alert"].Name(ic, r.Data)) return nil }, - Ignore: generateIgnoreObjectWithEmptyAttributeValue("databricks_sql_alert", "name"), + // TODO: exclude owner if it's the current user? + Ignore: generateIgnoreObjectWithEmptyAttributeValue("databricks_alert", "display_name"), Depends: []reference{ - {Path: "query_id", Resource: "databricks_sql_query", Match: "id"}, - {Path: "parent", Resource: "databricks_directory", Match: "object_id", - MatchType: MatchRegexp, Regexp: sqlParentRegexp}, + {Path: "query_id", Resource: "databricks_query"}, + {Path: "owner_user_name", Resource: "databricks_user", Match: "user_name", MatchType: MatchCaseInsensitive}, + {Path: "owner_user_name", Resource: "databricks_service_principal", Match: "application_id"}, + // TODO: add match like for workspace files? + {Path: "parent_path", Resource: "databricks_directory"}, + {Path: "parent_path", Resource: "databricks_directory", Match: "workspace_path"}, }, }, "databricks_pipeline": { @@ -2145,7 +2176,11 @@ var resourcesMap map[string]importable = map[string]importable{ Depends: []reference{ {Path: "catalog", Resource: "databricks_catalog"}, {Path: "target", Resource: "databricks_schema", Match: "name", - IsValidApproximation: dltIsMatchingCatalogAndSchema, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog", "target"), + SkipDirectLookup: true}, + {Path: "schema", Resource: "databricks_schema", Match: "name", + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog", "schema"), + SkipDirectLookup: true}, {Path: "cluster.aws_attributes.instance_profile_arn", Resource: "databricks_instance_profile"}, {Path: "cluster.init_scripts.dbfs.destination", Resource: "databricks_dbfs_file", Match: "dbfs_path"}, {Path: "cluster.init_scripts.volumes.destination", Resource: "databricks_file"}, @@ -2352,7 +2387,8 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "config.served_entities.entity_name", Resource: "databricks_registered_model"}, {Path: "config.auto_capture_config.catalog_name", Resource: "databricks_catalog"}, {Path: "config.auto_capture_config.schema_name", Resource: "databricks_schema", Match: "name", - IsValidApproximation: isMatchingCatalogAndSchemaInModelServing, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("config.0.auto_capture_config.0.catalog_name", "config.0.auto_capture_config.0.schema_name"), + SkipDirectLookup: true}, }, }, "databricks_mlflow_webhook": { @@ -2760,7 +2796,8 @@ var resourcesMap map[string]importable = map[string]importable{ Depends: []reference{ {Path: "catalog_name", Resource: "databricks_catalog"}, {Path: "schema_name", Resource: "databricks_schema", Match: "name", - IsValidApproximation: isMatchingCatalogAndSchema, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog_name", "schema_name"), + SkipDirectLookup: true}, {Path: "storage_location", Resource: "databricks_external_location", Match: "url", MatchType: MatchLongestPrefix}, }, @@ -2794,7 +2831,8 @@ var resourcesMap map[string]importable = map[string]importable{ Depends: []reference{ {Path: "catalog_name", Resource: "databricks_catalog"}, {Path: "schema_name", Resource: "databricks_schema", Match: "name", - IsValidApproximation: isMatchingCatalogAndSchema, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog_name", "schema_name"), + SkipDirectLookup: true}, {Path: "storage_location", Resource: "databricks_external_location", Match: "url", MatchType: MatchLongestPrefix}, }, @@ -3106,7 +3144,8 @@ var resourcesMap map[string]importable = map[string]importable{ Depends: []reference{ {Path: "catalog_name", Resource: "databricks_catalog"}, {Path: "schema_name", Resource: "databricks_schema", Match: "name", - IsValidApproximation: isMatchingCatalogAndSchema, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog_name", "schema_name"), + SkipDirectLookup: true}, {Path: "storage_root", Resource: "databricks_external_location", Match: "url", MatchType: MatchLongestPrefix}, }, }, @@ -3321,7 +3360,7 @@ var resourcesMap map[string]importable = map[string]importable{ WorkspaceLevel: true, Service: "dashboards", List: func(ic *importContext) error { - it := ic.workspaceClient.Lakeview.List(ic.Context, dashboards.ListDashboardsRequest{PageSize: 100}) + it := ic.workspaceClient.Lakeview.List(ic.Context, dashboards.ListDashboardsRequest{PageSize: 1000}) i := 0 for it.HasNext(ic.Context) { d, err := it.Next(ic.Context) @@ -3516,7 +3555,8 @@ var resourcesMap map[string]importable = map[string]importable{ Depends: []reference{ {Path: "catalog_name", Resource: "databricks_catalog"}, {Path: "schema_name", Resource: "databricks_schema", Match: "name", - IsValidApproximation: isMatchingCatalogAndSchema, SkipDirectLookup: true}, + IsValidApproximation: createIsMatchingCatalogAndSchema("catalog_name", "schema_name"), + SkipDirectLookup: true}, {Path: "spec.source_table_full_name", Resource: "databricks_sql_table"}, }, }, diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 645fba93e4..34a25b88ce 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -1343,6 +1343,7 @@ func TestDbfsFileGeneration(t *testing.T) { }) } +// TODO: remove it completely after we remove support for legacy dashboards func TestSqlListObjects(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ { diff --git a/exporter/test-data/get-alert.json b/exporter/test-data/get-alert.json new file mode 100644 index 0000000000..2d367e2332 --- /dev/null +++ b/exporter/test-data/get-alert.json @@ -0,0 +1,25 @@ +{ + "condition": { + "op":"GREATER_THAN", + "operand": { + "column": { + "name":"threshold" + } + }, + "threshold": { + "value": { + "string_value":"50" + } + } + }, + "create_time":"2023-04-10T08:14:47Z", + "display_name":"Test Alert", + "id":"3cf91a42-6217-4f3c-a6f0-345d489051b9", + "lifecycle_state":"ACTIVE", + "notify_on_ok":true, + "owner_user_name":"test@domain.com", + "query_id":"16c4f969-eea0-4aad-8f82-03d79b078dcc", + "state":"OK", + "trigger_time":"2023-04-10T08:15:56Z", + "update_time":"2023-04-10T08:14:47Z" +} diff --git a/exporter/test-data/get-alerts.json b/exporter/test-data/get-alerts.json new file mode 100644 index 0000000000..cc52cd4834 --- /dev/null +++ b/exporter/test-data/get-alerts.json @@ -0,0 +1,29 @@ +{ + "results": [ + { + "condition": { + "op":"GREATER_THAN", + "operand": { + "column": { + "name":"threshold" + } + }, + "threshold": { + "value": { + "string_value":"50" + } + } + }, + "create_time":"2023-04-10T08:14:47Z", + "display_name":"Test Alert", + "id":"3cf91a42-6217-4f3c-a6f0-345d489051b9", + "lifecycle_state":"ACTIVE", + "notify_on_ok":true, + "owner_user_name":"test@domain.com", + "query_id":"16c4f969-eea0-4aad-8f82-03d79b078dcc", + "state":"OK", + "trigger_time":"2023-04-10T08:15:56Z", + "update_time":"2023-04-10T08:14:47Z" + } + ] +} diff --git a/exporter/test-data/get-queries.json b/exporter/test-data/get-queries.json new file mode 100644 index 0000000000..951a154266 --- /dev/null +++ b/exporter/test-data/get-queries.json @@ -0,0 +1,17 @@ +{ + "results": [ + { + "create_time":"2021-04-03T13:03:51Z", + "description":"", + "display_name":"Jobs per day per status last 30 days", + "id":"16c4f969-eea0-4aad-8f82-03d79b078dcc", + "last_modifier_user_name":"user@domain.com", + "lifecycle_state":"ACTIVE", + "owner_user_name":"user@domain.com", + "query_text":"select\n to_date(job_runtime.startTS) as day,\n job_terminal_state,\n count(1) as cnt\nfrom\n overwatch.jobrun\ngroup by\n to_date(job_runtime.startTS),\n job_terminal_state\nhaving day \u003e date_sub(current_date(), 30)\norder by\n day desc", + "run_as_mode":"OWNER", + "update_time":"2021-04-03T13:03:51Z", + "warehouse_id":"f562046bc1272886" + } + ] +} diff --git a/exporter/test-data/get-query.json b/exporter/test-data/get-query.json new file mode 100644 index 0000000000..d172a1074e --- /dev/null +++ b/exporter/test-data/get-query.json @@ -0,0 +1,13 @@ +{ + "create_time":"2021-04-03T13:03:51Z", + "description":"", + "display_name":"Jobs per day per status last 30 days", + "id":"16c4f969-eea0-4aad-8f82-03d79b078dcc", + "last_modifier_user_name":"user@domain.com", + "lifecycle_state":"ACTIVE", + "owner_user_name":"user@domain.com", + "query_text":"select\n to_date(job_runtime.startTS) as day,\n job_terminal_state,\n count(1) as cnt\nfrom\n overwatch.jobrun\ngroup by\n to_date(job_runtime.startTS),\n job_terminal_state\nhaving day \u003e date_sub(current_date(), 30)\norder by\n day desc", + "run_as_mode":"OWNER", + "update_time":"2021-04-03T13:03:51Z", + "warehouse_id":"f562046bc1272886" +} diff --git a/exporter/test-data/get-sql-alert.json b/exporter/test-data/get-sql-alert.json deleted file mode 100644 index 8723224c2c..0000000000 --- a/exporter/test-data/get-sql-alert.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "conditions": { - "alert": { - "column": { - "aggregation": null, - "display": "threshold", - "name": "threshold" - } - }, - "op": "\u003e", - "query_plan": null, - "threshold": { - "value": "50" - } - }, - "created_at": "2023-04-10T08:14:47Z", - "id": "3cf91a42-6217-4f3c-a6f0-345d489051b9", - "last_triggered_at": "2023-04-10T08:15:56Z", - "name": "Test Alert", - "parent": "folders/4451965692354143", - "options": { - "aggregation": null, - "column": "threshold", - "display_column": "threshold", - "folder_node_internal_name": "tree/3467386930489745", - "folder_node_status": "ACTIVE", - "muted": false, - "op": "\u003e", - "parent": "folders/4451965692354143", - "query_plan": null, - "value": "50" - }, - "query": { - "created_at": "2023-04-10T08:13:33Z", - "data_source_id": "78520023-ab69-44a4-84d0-4fda0c69ea91", - "description": null, - "id": "16c4f969-eea0-4aad-8f82-03d79b078dcc", - "is_archived": false, - "is_draft": false, - "is_safe": true, - "name": "Alert Query", - "options": { - "apply_auto_limit": true, - "folder_node_internal_name": "tree/3467386930489744", - "folder_node_status": "ACTIVE", - "parameters": null, - "parent": "folders/4451965692354143", - "visualization_control_order": null - }, - "query": "select 42 as threshold", - "run_as_role": null, - "run_as_service_principal_id": null, - "schedule": null, - "tags": null, - "updated_at": "2023-04-10T08:14:13Z", - "user_id": 661448457191611, - "version": 1 - }, - "rearm": null, - "refresh_schedules": [ - { - "cron": "1 15 8 * * ?", - "data_source_id": "78520023-ab69-44a4-84d0-4fda0c69ea91", - "id": "71cebca8-3684-4b60-95f1-5d9b3786b9f8", - "job_id": "91aeb0a4644e0d357a36f61824a8c71436b61506" - } - ], - "state": "ok", - "subscriptions": [ - { - "user_id": 661448457191611 - } - ], - "updated_at": "2023-04-10T08:17:21Z", - "user": { - "email": "user@domain.com", - "id": 661448457191611, - "is_db_admin": false, - "name": "user@domain.com", - "profile_image_url": "https://www.gravatar.com/avatar/1111?s=40\u0026d=identicon" - }, - "user_id": 661448457191611 -} diff --git a/exporter/test-data/get-sql-alerts.json b/exporter/test-data/get-sql-alerts.json deleted file mode 100644 index 088b20bbdf..0000000000 --- a/exporter/test-data/get-sql-alerts.json +++ /dev/null @@ -1,84 +0,0 @@ -[ - { - "conditions": { - "alert": { - "column": { - "aggregation": null, - "display": "threshold", - "name": "threshold" - } - }, - "op": "\u003e", - "query_plan": null, - "threshold": { - "value": "50" - } - }, - "created_at": "2023-04-10T08:14:47Z", - "id": "3cf91a42-6217-4f3c-a6f0-345d489051b9", - "last_triggered_at": "2023-04-10T08:15:56Z", - "name": "Test Alert", - "options": { - "aggregation": null, - "column": "threshold", - "display_column": "threshold", - "folder_node_internal_name": "tree/3467386930489745", - "folder_node_status": "ACTIVE", - "muted": false, - "op": "\u003e", - "parent": "folders/4451965692354143", - "query_plan": null, - "value": "50" - }, - "query": { - "created_at": "2023-04-10T08:13:33Z", - "data_source_id": "78520023-ab69-44a4-84d0-4fda0c69ea91", - "description": null, - "id": "16c4f969-eea0-4aad-8f82-03d79b078dcc", - "is_archived": false, - "is_draft": false, - "is_safe": true, - "name": "Alert Query", - "options": { - "apply_auto_limit": true, - "folder_node_internal_name": "tree/3467386930489744", - "folder_node_status": "ACTIVE", - "parameters": null, - "parent": "folders/4451965692354143", - "visualization_control_order": null - }, - "query": "select 42 as threshold", - "run_as_role": null, - "run_as_service_principal_id": null, - "schedule": null, - "tags": null, - "updated_at": "2023-04-10T08:14:13Z", - "user_id": 661448457191611, - "version": 1 - }, - "rearm": null, - "refresh_schedules": [ - { - "cron": "1 15 8 * * ?", - "data_source_id": "78520023-ab69-44a4-84d0-4fda0c69ea91", - "id": "71cebca8-3684-4b60-95f1-5d9b3786b9f8", - "job_id": "91aeb0a4644e0d357a36f61824a8c71436b61506" - } - ], - "state": "ok", - "subscriptions": [ - { - "user_id": 661448457191611 - } - ], - "updated_at": "2023-04-10T08:17:21Z", - "user": { - "email": "user@domain.com", - "id": 661448457191611, - "is_db_admin": false, - "name": "user@domain.com", - "profile_image_url": "https://www.gravatar.com/avatar/1111?s=40\u0026d=identicon" - }, - "user_id": 661448457191611 - } -] diff --git a/exporter/test-data/get-sql-queries.json b/exporter/test-data/get-sql-queries.json deleted file mode 100644 index 3ebb168036..0000000000 --- a/exporter/test-data/get-sql-queries.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "count": 1, - "page": 1, - "page_size": 25, - "results": [ - { - "created_at": "2021-04-03T13:03:51Z", - "data_source_id": "147164a6-8316-4a9d-beff-f57261801374", - "description": null, - "id": "16c4f969-eea0-4aad-8f82-03d79b078dcc", - "is_archived": false, - "is_draft": false, - "is_favorite": false, - "is_safe": true, - "name": "Jobs per day per status last 30 days", - "options": { - "apply_auto_limit": true, - "parameters": [] - }, - "query": "select\n to_date(job_runtime.startTS) as day,\n job_terminal_state,\n count(1) as cnt\nfrom\n overwatch.jobrun\ngroup by\n to_date(job_runtime.startTS),\n job_terminal_state\nhaving day \u003e date_sub(current_date(), 30)\norder by\n day desc", - "retrieved_at": "2022-01-07T13:53:08Z", - "runtime": 32.9793, - "schedule": null, - "tags": [ - "overwatch" - ], - "updated_at": "2021-09-21T16:04:23Z", - "user": { - "email": "user@domain.com", - "id": 661448457191611, - "is_db_admin": false, - "name": "Test", - "profile_image_url": "https://www.gravatar.com/avatar/12242?s=40\u0026d=identicon" - }, - "user_id": 661448457191611, - "version": 1 - } - ] -} diff --git a/exporter/util.go b/exporter/util.go index 6687f4dc8f..e9380a9b56 100644 --- a/exporter/util.go +++ b/exporter/util.go @@ -438,33 +438,27 @@ func appendEndingSlashToDirName(dir string) string { return dir + "/" } -func isMatchingCatalogAndSchema(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { - res_catalog_name := res.Data.Get("catalog_name").(string) - res_schema_name := res.Data.Get("schema_name").(string) - ra_catalog_name, cat_found := ra.Get("catalog_name") - ra_schema_name, schema_found := ra.Get("name") - if !cat_found || !schema_found { - log.Printf("[WARN] Can't find attributes in approximation: %s %s, catalog='%v' (found? %v) schema='%v' (found? %v). Resource: %s, catalog='%s', schema='%s'", - ra.Type, ra.Name, ra_catalog_name, cat_found, ra_schema_name, schema_found, res.Resource, res_catalog_name, res_schema_name) - return true - } - result := ra_catalog_name.(string) == res_catalog_name && ra_schema_name.(string) == res_schema_name - return result -} +func createIsMatchingCatalogAndSchema(catalog_name_attr, schema_name_attr string) func(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { + return func(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { + // catalog and schema names for the source resource + res_catalog_name := res.Data.Get(catalog_name_attr).(string) + res_schema_name := res.Data.Get(schema_name_attr).(string) + // In some cases catalog or schema name could be empty, like, in non-UC DLT pipelines, so we need to skip it + if res_catalog_name == "" || res_schema_name == "" { + return false + } + // catalog and schema names for target resource approximation + ra_catalog_name, cat_found := ra.Get("catalog_name") + ra_schema_name, schema_found := ra.Get("name") + if !cat_found || !schema_found { + log.Printf("[WARN] Can't find attributes in approximation: %s %s, catalog='%v' (found? %v) schema='%v' (found? %v). Resource: %s, catalog='%s', schema='%s'", + ra.Type, ra.Name, ra_catalog_name, cat_found, ra_schema_name, schema_found, res.Resource, res_catalog_name, res_schema_name) + return false + } + result := ra_catalog_name.(string) == res_catalog_name && ra_schema_name.(string) == res_schema_name + return result -func isMatchingCatalogAndSchemaInModelServing(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { - res_catalog_name := res.Data.Get("config.0.auto_capture_config.0.catalog_name").(string) - res_schema_name := res.Data.Get("config.0.auto_capture_config.0.schema_name").(string) - ra_catalog_name, cat_found := ra.Get("catalog_name") - ra_schema_name, schema_found := ra.Get("name") - if !cat_found || !schema_found { - log.Printf("[WARN] Can't find attributes in approximation: %s %s, catalog='%v' (found? %v) schema='%v' (found? %v). Resource: %s, catalog='%s', schema='%s'", - ra.Type, ra.Name, ra_catalog_name, cat_found, ra_schema_name, schema_found, res.Resource, res_catalog_name, res_schema_name) - return true } - - result := ra_catalog_name.(string) == res_catalog_name && ra_schema_name.(string) == res_schema_name - return result } func isMatchingShareRecipient(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { @@ -537,24 +531,6 @@ func (ic *importContext) emitPermissionsIfNotIgnored(r *resource, id, name strin } } -func dltIsMatchingCatalogAndSchema(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { - res_catalog_name := res.Data.Get("catalog").(string) - if res_catalog_name == "" { - return false - } - res_schema_name := res.Data.Get("target").(string) - ra_catalog_name, cat_found := ra.Get("catalog_name") - ra_schema_name, schema_found := ra.Get("name") - if !cat_found || !schema_found { - log.Printf("[WARN] Can't find attributes in approximation: %s %s, catalog='%v' (found? %v) schema='%v' (found? %v). Resource: %s, catalog='%s', schema='%s'", - ra.Type, ra.Name, ra_catalog_name, cat_found, ra_schema_name, schema_found, res.Resource, res_catalog_name, res_schema_name) - return true - } - - result := ra_catalog_name.(string) == res_catalog_name && ra_schema_name.(string) == res_schema_name - return result -} - func (ic *importContext) emitWorkspaceBindings(securableType, securableName string) { bindings, err := ic.workspaceClient.WorkspaceBindings.GetBindingsAll(ic.Context, catalog.GetBindingsRequest{ SecurableName: securableName, diff --git a/exporter/util_workspace.go b/exporter/util_workspace.go index 470e590ef0..5a5621f806 100644 --- a/exporter/util_workspace.go +++ b/exporter/util_workspace.go @@ -52,25 +52,25 @@ func maybeStringWorkspacePrefix(path string) string { return path } -func (ic *importContext) emitWorkspaceFileOrRepo(path string) { +func (ic *importContext) emitWorkspaceObject(objType, path string) { + path = maybeStringWorkspacePrefix(path) if isRepoPath(path) { - ic.emitRepoByPath(maybeStringWorkspacePrefix(path)) + ic.emitRepoByPath(path) } else { - // TODO: wrap this into ic.shouldEmit... - // TODO: strip /Workspace prefix if it's provided - ic.Emit(&resource{ - Resource: "databricks_workspace_file", - ID: maybeStringWorkspacePrefix(path), - }) + ic.maybeEmitWorkspaceObject(objType, path, nil) } } +func (ic *importContext) emitDirectoryOrRepo(path string) { + ic.emitWorkspaceObject("databricks_directory", path) +} + +func (ic *importContext) emitWorkspaceFileOrRepo(path string) { + ic.emitWorkspaceObject("databricks_workspace_file", path) +} + func (ic *importContext) emitNotebookOrRepo(path string) { - if isRepoPath(path) { - ic.emitRepoByPath(maybeStringWorkspacePrefix(path)) - } else { - ic.maybeEmitWorkspaceObject("databricks_notebook", maybeStringWorkspacePrefix(path), nil) - } + ic.emitWorkspaceObject("databricks_notebook", path) } func (ic *importContext) getAllDirectories() []workspace.ObjectStatus { diff --git a/sql/resource_query.go b/sql/resource_query.go index 80a69a385c..120353c171 100644 --- a/sql/resource_query.go +++ b/sql/resource_query.go @@ -12,7 +12,7 @@ import ( ) // Need a struct for Query because there are aliases we need and it'll be needed in the create method. -type queryStruct struct { +type QueryStruct struct { sql.Query } @@ -20,13 +20,13 @@ var queryAliasMap = map[string]string{ "parameters": "parameter", } -func (queryStruct) Aliases() map[string]map[string]string { +func (QueryStruct) Aliases() map[string]map[string]string { return map[string]map[string]string{ - "sql.queryStruct": queryAliasMap, + "sql.QueryStruct": queryAliasMap, } } -func (queryStruct) CustomizeSchema(m *common.CustomizableSchema) *common.CustomizableSchema { +func (QueryStruct) CustomizeSchema(m *common.CustomizableSchema) *common.CustomizableSchema { m.SchemaPath("display_name").SetRequired().SetValidateFunc(validation.StringIsNotWhiteSpace) m.SchemaPath("query_text").SetRequired() m.SchemaPath("warehouse_id").SetRequired().SetValidateFunc(validation.StringIsNotWhiteSpace) @@ -92,7 +92,7 @@ func (queryUpdateStruct) CustomizeSchema(s *common.CustomizableSchema) *common.C } func ResourceQuery() common.Resource { - s := common.StructToSchema(queryStruct{}, nil) + s := common.StructToSchema(QueryStruct{}, nil) return common.Resource{ Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { w, err := c.WorkspaceClient() @@ -134,7 +134,7 @@ func ResourceQuery() common.Resource { if parentPath != "" && strings.HasPrefix(apiQuery.ParentPath, "/Workspace") && !strings.HasPrefix(parentPath, "/Workspace") { apiQuery.ParentPath = strings.TrimPrefix(parentPath, "/Workspace") } - return common.StructToData(queryStruct{Query: *apiQuery}, s, d) + return common.StructToData(QueryStruct{Query: *apiQuery}, s, d) }, Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { w, err := c.WorkspaceClient()