Skip to content

Commit

Permalink
[Exporter] **Breaking changes** Use new query and alert resources ins…
Browse files Browse the repository at this point in the history
…tead of legacy ones

This change replaces legacy `databricks_sql_query` and `databricks_sql_alert` with new
resources `databricks_query` and `databricks_alert`.  Also, services `sql-queries` and
`sql-alerts` are renamed to `queries` and `alerts`.

Other changes include:

* Improve performance of Lakeview dashboards scan by using bigger page size
* Generalize `isMatchingCatalogAndSchema` implementation for use in multiple resources
  where attribute names could be different
* Generalize handling of `/Workspace` prefix when emitting notebooks, workspace files and
  directories.
  • Loading branch information
alexott committed Oct 24, 2024
1 parent 8b00572 commit b4dcf34
Show file tree
Hide file tree
Showing 15 changed files with 295 additions and 373 deletions.
6 changes: 3 additions & 3 deletions docs/guides/experimental-exporter.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ Services are just logical groups of resources used for filtering and organizatio
Please note that for services not marked with **listing**, we'll export resources only if they are referenced from other resources.

* `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md).
* `alerts` - **listing** [databricks_alert](../resources/alert.md).
* `compute` - **listing** [databricks_cluster](../resources/cluster.md).
* `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md).
* `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.*
Expand All @@ -123,13 +124,12 @@ Services are just logical groups of resources used for filtering and organizatio
* `notebooks` - **listing** [databricks_notebook](../resources/notebook.md).
* `policies` - **listing** [databricks_cluster_policy](../resources/cluster_policy).
* `pools` - **listing** [instance pools](../resources/instance_pool.md).
* `queries` - **listing** [databricks_query](../resources/query.md).
* `repos` - **listing** [databricks_repo](../resources/repo.md)
* `secrets` - **listing** [databricks_secret_scope](../resources/secret_scope.md) along with [keys](../resources/secret.md) and [ACLs](../resources/secret_acl.md).
* `settings` - **listing** [databricks_notification_destination](../resources/notification_destination.md).
* `sql-alerts` - **listing** [databricks_sql_alert](../resources/sql_alert.md).
* `sql-dashboards` - **listing** [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md).
* `sql-dashboards` - **listing** Legacy [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md).
* `sql-endpoints` - **listing** [databricks_sql_endpoint](../resources/sql_endpoint.md) along with [databricks_sql_global_config](../resources/sql_global_config.md).
* `sql-queries` - **listing** [databricks_sql_query](../resources/sql_query.md).
* `storage` - only [databricks_dbfs_file](../resources/dbfs_file.md) and [databricks_file](../resources/file.md) referenced in other resources (libraries, init scripts, ...) will be downloaded locally and properly arranged into terraform state.
* `uc-artifact-allowlist` - **listing** exports [databricks_artifact_allowlist](../resources/artifact_allowlist.md) resources for Unity Catalog Allow Lists attached to the current metastore.
* `uc-catalogs` - **listing** [databricks_catalog](../resources/catalog.md) and [databricks_workspace_binding](../resources/workspace_binding.md)
Expand Down
4 changes: 2 additions & 2 deletions exporter/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,8 @@ var goroutinesNumber = map[string]int{
"databricks_sql_dashboard": 3,
"databricks_sql_widget": 4,
"databricks_sql_visualization": 4,
"databricks_sql_query": 5,
"databricks_sql_alert": 2,
"databricks_query": 4,
"databricks_alert": 2,
"databricks_permissions": 11,
}

Expand Down
83 changes: 55 additions & 28 deletions exporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,20 +56,6 @@ func getJSONObject(filename string) any {
return obj
}

func getJSONArray(filename string) any {
data, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
var obj []any
err = json.Unmarshal(data, &obj)
if err != nil {
fmt.Printf("[ERROR] error! file=%s err=%v\n", filename, err)
fmt.Printf("[ERROR] data=%s\n", string(data))
}
return obj
}

func workspaceConfKeysToURL() string {
keys := make([]string, 0, len(workspaceConfKeys))
for k := range workspaceConfKeys {
Expand Down Expand Up @@ -379,14 +365,14 @@ var emptySqlDashboards = qa.HTTPFixture{

var emptySqlQueries = qa.HTTPFixture{
Method: "GET",
Resource: "/api/2.0/preview/sql/queries?page_size=100",
Resource: "/api/2.0/sql/queries?page_size=100",
Response: map[string]any{},
ReuseRequest: true,
}

var emptySqlAlerts = qa.HTTPFixture{
Method: "GET",
Resource: "/api/2.0/preview/sql/alerts",
Resource: "/api/2.0/sql/alerts?page_size=100",
Response: []tfsql.AlertEntity{},
ReuseRequest: true,
}
Expand Down Expand Up @@ -447,7 +433,7 @@ var emptyMetastoreList = qa.HTTPFixture{

var emptyLakeviewList = qa.HTTPFixture{
Method: "GET",
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
Resource: "/api/2.0/lakeview/dashboards?page_size=1000",
Response: sdk_dashboards.ListDashboardsResponse{},
ReuseRequest: true,
}
Expand Down Expand Up @@ -1015,6 +1001,16 @@ func TestImportingClusters(t *testing.T) {
},
},
},
{
Method: "GET",
Resource: "/api/2.0/preview/scim/v2/Users?attributes=id%2CuserName&count=100&startIndex=1",
ReuseRequest: true,
Response: scim.UserList{
Resources: []scim.User{
{ID: "123", DisplayName: "[email protected]", UserName: "[email protected]"},
},
},
},
},
func(ctx context.Context, client *common.DatabricksClient) {
os.Setenv("EXPORTER_PARALLELISM_default", "1")
Expand Down Expand Up @@ -1950,16 +1946,21 @@ func TestImportingSqlObjects(t *testing.T) {
},
{
Method: "GET",
Resource: "/api/2.0/preview/sql/queries?page_size=100",
Response: getJSONObject("test-data/get-sql-queries.json"),
Resource: "/api/2.0/sql/queries?page_size=100",
Response: getJSONObject("test-data/get-queries.json"),
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc",
Response: getJSONObject("test-data/get-sql-query.json"),
Resource: "/api/2.0/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?",
Response: getJSONObject("test-data/get-query.json"),
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc",
Response: getJSONObject("test-data/get-sql-query.json"),
},
{
Method: "GET",
Resource: "/api/2.0/permissions/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?",
Expand All @@ -1972,14 +1973,14 @@ func TestImportingSqlObjects(t *testing.T) {
},
{
Method: "GET",
Resource: "/api/2.0/preview/sql/alerts",
Response: getJSONArray("test-data/get-sql-alerts.json"),
Resource: "/api/2.0/sql/alerts?page_size=100",
Response: getJSONObject("test-data/get-alerts.json"),
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/preview/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?",
Response: getJSONObject("test-data/get-sql-alert.json"),
Resource: "/api/2.0/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?",
Response: getJSONObject("test-data/get-alert.json"),
},
{
Method: "GET",
Expand All @@ -1993,18 +1994,44 @@ func TestImportingSqlObjects(t *testing.T) {

ic := newImportContext(client)
ic.Directory = tmpDir
ic.enableListing("sql-dashboards,sql-queries,sql-endpoints,sql-alerts")
ic.enableServices("sql-dashboards,sql-queries,sql-alerts,sql-endpoints,access,notebooks")
ic.enableListing("sql-dashboards,queries,sql-endpoints,alerts")
ic.enableServices("sql-dashboards,queries,alerts,sql-endpoints,access")

err := ic.Run()
assert.NoError(t, err)

// check the generated HCL for SQL Warehouses
content, err := os.ReadFile(tmpDir + "/sql-endpoints.tf")
assert.NoError(t, err)
contentStr := string(content)
assert.True(t, strings.Contains(contentStr, `enable_serverless_compute = false`))
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_endpoint" "test" {`))
assert.False(t, strings.Contains(contentStr, `tags {`))
// check the generated HCL for SQL Dashboards
content, err = os.ReadFile(tmpDir + "/sql-dashboards.tf")
assert.NoError(t, err)
contentStr = string(content)
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_dashboard" "test_9cb0c8f5_6262_4a1f_a741_2181de76028f" {`))
assert.True(t, strings.Contains(contentStr, `dashboard_id = databricks_sql_dashboard.test_9cb0c8f5_6262_4a1f_a741_2181de76028f.id`))
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_widget" "rd4dd2082685" {`))
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_visualization" "chart_16c4f969_eea0_4aad_8f82_03d79b078dcc_1a062d3a_eefe_11eb_9559_dc7cd9c86087"`))
// check the generated HCL for Qieries
content, err = os.ReadFile(tmpDir + "/queries.tf")
assert.NoError(t, err)
contentStr = string(content)
assert.True(t, strings.Contains(contentStr, `resource "databricks_query" "jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc"`))
assert.True(t, strings.Contains(contentStr, `warehouse_id = databricks_sql_endpoint.test.id`))
assert.True(t, strings.Contains(contentStr, `owner_user_name = "[email protected]"`))
assert.True(t, strings.Contains(contentStr, `display_name = "Jobs per day per status last 30 days"`))
// check the generated HCL for Alerts
content, err = os.ReadFile(tmpDir + "/alerts.tf")
assert.NoError(t, err)
contentStr = string(content)
assert.True(t, strings.Contains(contentStr, `resource "databricks_alert" "test_alert_3cf91a42_6217_4f3c_a6f0_345d489051b9"`))
assert.True(t, strings.Contains(contentStr, `query_id = databricks_query.jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc.id`))
assert.True(t, strings.Contains(contentStr, `display_name = "Test Alert"`))
assert.True(t, strings.Contains(contentStr, `op = "GREATER_THAN"`))
assert.True(t, strings.Contains(contentStr, `owner_user_name = "[email protected]"`))
})
}

Expand Down Expand Up @@ -2795,7 +2822,7 @@ func TestImportingLakeviewDashboards(t *testing.T) {
noCurrentMetastoreAttached,
{
Method: "GET",
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
Resource: "/api/2.0/lakeview/dashboards?page_size=1000",
Response: sdk_dashboards.ListDashboardsResponse{
Dashboards: []sdk_dashboards.Dashboard{
{
Expand Down
Loading

0 comments on commit b4dcf34

Please sign in to comment.