From 39dd7dbdfbb3ad7067900a3b5830c566bc545547 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 21 Jan 2026 11:43:40 +0100 Subject: [PATCH 1/5] Added support for UC catalogs (only in direct mode) --- .../catalogs/basic/databricks.yml.tmpl | 17 ++ .../resources/catalogs/basic/out.test.toml | 6 + .../resources/catalogs/basic/output.txt | 44 ++++++ .../bundle/resources/catalogs/basic/script | 27 ++++ .../bundle/resources/catalogs/basic/test.toml | 12 ++ .../catalogs/with-schemas/databricks.yml.tmpl | 23 +++ .../catalogs/with-schemas/out.test.toml | 6 + .../catalogs/with-schemas/output.txt | 64 ++++++++ .../resources/catalogs/with-schemas/script | 45 ++++++ .../resources/catalogs/with-schemas/test.toml | 12 ++ .../databricks.yml | 8 + .../out.test.toml | 5 + .../catalog_requires_direct_mode/output.txt | 8 + .../catalog_requires_direct_mode/script | 1 + .../catalog_requires_direct_mode/test.toml | 7 + .../apply_bundle_permissions_test.go | 1 + .../resourcemutator/apply_target_mode_test.go | 7 +- .../mutator/resourcemutator/run_as_test.go | 2 + .../validate_catalogs_only_with_direct.go | 39 +++++ bundle/config/resources.go | 9 ++ bundle/config/resources/catalog.go | 148 ++++++++++++++++++ bundle/config/resources_test.go | 6 + bundle/direct/dresources/all.go | 2 + bundle/direct/dresources/all_test.go | 20 +++ bundle/direct/dresources/catalog.go | 87 ++++++++++ bundle/phases/plan.go | 1 + 26 files changed, 606 insertions(+), 1 deletion(-) create mode 100644 acceptance/bundle/resources/catalogs/basic/databricks.yml.tmpl create mode 100644 acceptance/bundle/resources/catalogs/basic/out.test.toml create mode 100644 acceptance/bundle/resources/catalogs/basic/output.txt create mode 100644 acceptance/bundle/resources/catalogs/basic/script create mode 100644 acceptance/bundle/resources/catalogs/basic/test.toml create mode 100644 acceptance/bundle/resources/catalogs/with-schemas/databricks.yml.tmpl create mode 100644 acceptance/bundle/resources/catalogs/with-schemas/out.test.toml create mode 100644 acceptance/bundle/resources/catalogs/with-schemas/output.txt create mode 100644 acceptance/bundle/resources/catalogs/with-schemas/script create mode 100644 acceptance/bundle/resources/catalogs/with-schemas/test.toml create mode 100644 acceptance/bundle/validate/catalog_requires_direct_mode/databricks.yml create mode 100644 acceptance/bundle/validate/catalog_requires_direct_mode/out.test.toml create mode 100644 acceptance/bundle/validate/catalog_requires_direct_mode/output.txt create mode 100755 acceptance/bundle/validate/catalog_requires_direct_mode/script create mode 100644 acceptance/bundle/validate/catalog_requires_direct_mode/test.toml create mode 100644 bundle/config/mutator/validate_catalogs_only_with_direct.go create mode 100644 bundle/config/resources/catalog.go create mode 100644 bundle/direct/dresources/catalog.go diff --git a/acceptance/bundle/resources/catalogs/basic/databricks.yml.tmpl b/acceptance/bundle/resources/catalogs/basic/databricks.yml.tmpl new file mode 100644 index 0000000000..d6bf57a618 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/basic/databricks.yml.tmpl @@ -0,0 +1,17 @@ +bundle: + name: uc-catalog-$UNIQUE_NAME + +workspace: + root_path: ~/.bundle/$UNIQUE_NAME + +resources: + catalogs: + test_catalog: + name: test_catalog_$UNIQUE_NAME + comment: "This catalog was created from DABs" + properties: + owner: "dabs" + +targets: + development: + default: true diff --git a/acceptance/bundle/resources/catalogs/basic/out.test.toml b/acceptance/bundle/resources/catalogs/basic/out.test.toml new file mode 100644 index 0000000000..f1d40380d0 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/basic/out.test.toml @@ -0,0 +1,6 @@ +Local = true +Cloud = true +RequiresUnityCatalog = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/resources/catalogs/basic/output.txt b/acceptance/bundle/resources/catalogs/basic/output.txt new file mode 100644 index 0000000000..1e70309242 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/basic/output.txt @@ -0,0 +1,44 @@ + +=== Deploy bundle with catalog +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Assert the catalog is created +>>> [CLI] catalogs get test_catalog_[UNIQUE_NAME] +{ + "name": "test_catalog_[UNIQUE_NAME]", + "comment": "This catalog was created from DABs", + "properties": { + "owner": "dabs" + } +} + +=== Update catalog comment +=== Redeploy with updated comment +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Assert the catalog comment is updated +>>> [CLI] catalogs get test_catalog_[UNIQUE_NAME] +{ + "name": "test_catalog_[UNIQUE_NAME]", + "comment": "Updated comment from DABs" +} + +=== Test cleanup +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.catalogs.test_catalog + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! + +=== Assert the catalog is deleted \ No newline at end of file diff --git a/acceptance/bundle/resources/catalogs/basic/script b/acceptance/bundle/resources/catalogs/basic/script new file mode 100644 index 0000000000..3ce4540463 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/basic/script @@ -0,0 +1,27 @@ +envsubst < databricks.yml.tmpl > databricks.yml + +CATALOG_NAME="test_catalog_${UNIQUE_NAME}" + +cleanup() { + title "Test cleanup" + trace $CLI bundle destroy --auto-approve + + title "Assert the catalog is deleted" + trace errcode $CLI catalogs get "${CATALOG_NAME}" 2>/dev/null +} +trap cleanup EXIT + +title "Deploy bundle with catalog" +trace $CLI bundle deploy + +title "Assert the catalog is created" +trace $CLI catalogs get "${CATALOG_NAME}" | jq "{name, comment, properties}" + +title "Update catalog comment" +update_file.py databricks.yml "This catalog was created from DABs" "Updated comment from DABs" + +title "Redeploy with updated comment" +trace $CLI bundle deploy + +title "Assert the catalog comment is updated" +trace $CLI catalogs get "${CATALOG_NAME}" | jq "{name, comment}" diff --git a/acceptance/bundle/resources/catalogs/basic/test.toml b/acceptance/bundle/resources/catalogs/basic/test.toml new file mode 100644 index 0000000000..d2b122411f --- /dev/null +++ b/acceptance/bundle/resources/catalogs/basic/test.toml @@ -0,0 +1,12 @@ +Local = true +Cloud = true +RecordRequests = false +RequiresUnityCatalog = true + +Ignore = [ + ".databricks", + "databricks.yml", +] + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/resources/catalogs/with-schemas/databricks.yml.tmpl b/acceptance/bundle/resources/catalogs/with-schemas/databricks.yml.tmpl new file mode 100644 index 0000000000..8abd2f1dbd --- /dev/null +++ b/acceptance/bundle/resources/catalogs/with-schemas/databricks.yml.tmpl @@ -0,0 +1,23 @@ +bundle: + name: uc-catalog-schemas-$UNIQUE_NAME + +workspace: + root_path: ~/.bundle/$UNIQUE_NAME + +resources: + catalogs: + test_catalog: + name: test_catalog_$UNIQUE_NAME + comment: "Catalog created from DABs" + properties: + purpose: "testing" + + schemas: + test_schema: + name: schema1 + catalog_name: ${resources.catalogs.test_catalog.name} + comment: "Schema in custom catalog" + +targets: + development: + default: true diff --git a/acceptance/bundle/resources/catalogs/with-schemas/out.test.toml b/acceptance/bundle/resources/catalogs/with-schemas/out.test.toml new file mode 100644 index 0000000000..f1d40380d0 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/with-schemas/out.test.toml @@ -0,0 +1,6 @@ +Local = true +Cloud = true +RequiresUnityCatalog = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/resources/catalogs/with-schemas/output.txt b/acceptance/bundle/resources/catalogs/with-schemas/output.txt new file mode 100644 index 0000000000..8034f5eb2f --- /dev/null +++ b/acceptance/bundle/resources/catalogs/with-schemas/output.txt @@ -0,0 +1,64 @@ + +=== Deploy bundle with catalog and schema +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Assert the catalog is created +>>> [CLI] catalogs get test_catalog_[UNIQUE_NAME] +{ + "name": "test_catalog_[UNIQUE_NAME]", + "comment": "Catalog created from DABs", + "properties": { + "purpose": "testing" + } +} + +=== Assert schema is created in the custom catalog +>>> [CLI] schemas get test_catalog_[UNIQUE_NAME].schema1 +{ + "full_name": "test_catalog_[UNIQUE_NAME].schema1", + "catalog_name": "test_catalog_[UNIQUE_NAME]", + "comment": "Schema in custom catalog" +} + +=== Verify schema belongs to the test catalog +=== Update catalog comment +=== Redeploy with updated catalog +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Assert catalog comment is updated +>>> [CLI] catalogs get test_catalog_[UNIQUE_NAME] +{ + "name": "test_catalog_[UNIQUE_NAME]", + "comment": "Updated catalog comment" +} + +=== Assert schema still exists after catalog update +>>> [CLI] schemas get test_catalog_[UNIQUE_NAME].schema1 +{ + "full_name": "test_catalog_[UNIQUE_NAME].schema1" +} + +=== Test cleanup +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.catalogs.test_catalog + delete resources.schemas.test_schema + +This action will result in the deletion of the following UC schemas. Any underlying data may be lost: + delete resources.schemas.test_schema + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! + +=== Assert schema is deleted +=== Assert the catalog is deleted \ No newline at end of file diff --git a/acceptance/bundle/resources/catalogs/with-schemas/script b/acceptance/bundle/resources/catalogs/with-schemas/script new file mode 100644 index 0000000000..6693092729 --- /dev/null +++ b/acceptance/bundle/resources/catalogs/with-schemas/script @@ -0,0 +1,45 @@ +envsubst < databricks.yml.tmpl > databricks.yml + +CATALOG_NAME="test_catalog_${UNIQUE_NAME}" +SCHEMA_FULL_NAME="${CATALOG_NAME}.schema1" + +cleanup() { + title "Test cleanup" + trace $CLI bundle destroy --auto-approve + + title "Assert schema is deleted" + trace errcode $CLI schemas get "${SCHEMA_FULL_NAME}" 2>/dev/null + + title "Assert the catalog is deleted" + trace errcode $CLI catalogs get "${CATALOG_NAME}" 2>/dev/null +} +trap cleanup EXIT + +title "Deploy bundle with catalog and schema" +trace $CLI bundle deploy + +title "Assert the catalog is created" +trace $CLI catalogs get "${CATALOG_NAME}" | jq "{name, comment, properties}" + +title "Assert schema is created in the custom catalog" +trace $CLI schemas get "${SCHEMA_FULL_NAME}" | jq "{full_name, catalog_name, comment}" + +title "Verify schema belongs to the test catalog" +SCHEMA_CATALOG=$($CLI schemas get "${SCHEMA_FULL_NAME}" | jq -r ".catalog_name") + +if [ "$SCHEMA_CATALOG" != "$CATALOG_NAME" ]; then + echo "Error: Schema catalog_name is $SCHEMA_CATALOG, expected $CATALOG_NAME" + exit 1 +fi + +title "Update catalog comment" +update_file.py databricks.yml "Catalog created from DABs" "Updated catalog comment" + +title "Redeploy with updated catalog" +trace $CLI bundle deploy + +title "Assert catalog comment is updated" +trace $CLI catalogs get "${CATALOG_NAME}" | jq "{name, comment}" + +title "Assert schema still exists after catalog update" +trace $CLI schemas get "${SCHEMA_FULL_NAME}" | jq "{full_name}" diff --git a/acceptance/bundle/resources/catalogs/with-schemas/test.toml b/acceptance/bundle/resources/catalogs/with-schemas/test.toml new file mode 100644 index 0000000000..d2b122411f --- /dev/null +++ b/acceptance/bundle/resources/catalogs/with-schemas/test.toml @@ -0,0 +1,12 @@ +Local = true +Cloud = true +RecordRequests = false +RequiresUnityCatalog = true + +Ignore = [ + ".databricks", + "databricks.yml", +] + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/validate/catalog_requires_direct_mode/databricks.yml b/acceptance/bundle/validate/catalog_requires_direct_mode/databricks.yml new file mode 100644 index 0000000000..d215964bb7 --- /dev/null +++ b/acceptance/bundle/validate/catalog_requires_direct_mode/databricks.yml @@ -0,0 +1,8 @@ +bundle: + name: "catalog_validation_test" + +resources: + catalogs: + test_catalog: + name: "test_catalog" + comment: "This should fail in Terraform mode" diff --git a/acceptance/bundle/validate/catalog_requires_direct_mode/out.test.toml b/acceptance/bundle/validate/catalog_requires_direct_mode/out.test.toml new file mode 100644 index 0000000000..90061dedb1 --- /dev/null +++ b/acceptance/bundle/validate/catalog_requires_direct_mode/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/acceptance/bundle/validate/catalog_requires_direct_mode/output.txt b/acceptance/bundle/validate/catalog_requires_direct_mode/output.txt new file mode 100644 index 0000000000..8672f73c51 --- /dev/null +++ b/acceptance/bundle/validate/catalog_requires_direct_mode/output.txt @@ -0,0 +1,8 @@ +Error: Catalog resources are only supported with direct deployment mode + in databricks.yml:6:5 + +Catalog resources require direct deployment mode. Please set the DATABRICKS_BUNDLE_ENGINE environment variable to 'direct' to use catalog resources. +Learn more at https://docs.databricks.com/dev-tools/bundles/deployment-modes.html + + +Exit code: 1 diff --git a/acceptance/bundle/validate/catalog_requires_direct_mode/script b/acceptance/bundle/validate/catalog_requires_direct_mode/script new file mode 100755 index 0000000000..b260e836a7 --- /dev/null +++ b/acceptance/bundle/validate/catalog_requires_direct_mode/script @@ -0,0 +1 @@ +$CLI bundle plan diff --git a/acceptance/bundle/validate/catalog_requires_direct_mode/test.toml b/acceptance/bundle/validate/catalog_requires_direct_mode/test.toml new file mode 100644 index 0000000000..f59176e360 --- /dev/null +++ b/acceptance/bundle/validate/catalog_requires_direct_mode/test.toml @@ -0,0 +1,7 @@ +Local = true +Cloud = false + +Ignore = [".databricks"] + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/bundle/config/mutator/resourcemutator/apply_bundle_permissions_test.go b/bundle/config/mutator/resourcemutator/apply_bundle_permissions_test.go index 4446e7c53e..d873c2b6b2 100644 --- a/bundle/config/mutator/resourcemutator/apply_bundle_permissions_test.go +++ b/bundle/config/mutator/resourcemutator/apply_bundle_permissions_test.go @@ -19,6 +19,7 @@ import ( // This list exists to ensure that this mutator is updated when new resource is added. // These resources are there because they use grants, not permissions: var unsupportedResources = []string{ + "catalogs", "volumes", "schemas", "quality_monitors", diff --git a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go index ec7980348f..cc4a5d00da 100644 --- a/bundle/config/mutator/resourcemutator/apply_target_mode_test.go +++ b/bundle/config/mutator/resourcemutator/apply_target_mode_test.go @@ -131,6 +131,9 @@ func mockBundle(mode config.Mode) *bundle.Bundle { }, }, }, + Catalogs: map[string]*resources.Catalog{ + "catalog1": {CreateCatalog: catalog.CreateCatalog{Name: "catalog1"}}, + }, Schemas: map[string]*resources.Schema{ "schema1": {CreateSchema: catalog.CreateSchema{Name: "schema1"}}, }, @@ -313,6 +316,7 @@ func TestProcessTargetModeDefault(t *testing.T) { assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName) + assert.Equal(t, "catalog1", b.Config.Resources.Catalogs["catalog1"].Name) assert.Equal(t, "schema1", b.Config.Resources.Schemas["schema1"].Name) assert.Equal(t, "volume1", b.Config.Resources.Volumes["volume1"].Name) assert.Equal(t, "cluster1", b.Config.Resources.Clusters["cluster1"].ClusterName) @@ -342,9 +346,10 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) { b := mockBundle(config.Development) // UC resources should not have a prefix added to their name. Right now - // this list only contains the Volume resource since we have yet to remove + // this list only contains the Volume and Catalog resources since we have yet to remove // prefixing support for UC schemas and registered models. ucFields := []reflect.Type{ + reflect.TypeOf(&resources.Catalog{}), reflect.TypeOf(&resources.Volume{}), } diff --git a/bundle/config/mutator/resourcemutator/run_as_test.go b/bundle/config/mutator/resourcemutator/run_as_test.go index 7500374adc..b5f8bf166d 100644 --- a/bundle/config/mutator/resourcemutator/run_as_test.go +++ b/bundle/config/mutator/resourcemutator/run_as_test.go @@ -34,6 +34,7 @@ func allResourceTypes(t *testing.T) []string { require.Equal(t, []string{ "alerts", "apps", + "catalogs", "clusters", "dashboards", "database_catalogs", @@ -145,6 +146,7 @@ func TestRunAsWorksForAllowedResources(t *testing.T) { // some point in the future. These resources are (implicitly) on the deny list, since // they are not on the allow list below. var allowList = []string{ + "catalogs", "clusters", "database_catalogs", "database_instances", diff --git a/bundle/config/mutator/validate_catalogs_only_with_direct.go b/bundle/config/mutator/validate_catalogs_only_with_direct.go new file mode 100644 index 0000000000..9a54effc62 --- /dev/null +++ b/bundle/config/mutator/validate_catalogs_only_with_direct.go @@ -0,0 +1,39 @@ +package mutator + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/engine" + "github.com/databricks/cli/libs/diag" +) + +type validateCatalogsOnlyWithDirect struct { + engine engine.EngineType +} + +// ValidateCatalogsOnlyWithDirect returns a mutator that validates catalog resources +// are only used with direct deployment mode. +func ValidateCatalogsOnlyWithDirect(engine engine.EngineType) bundle.Mutator { + return &validateCatalogsOnlyWithDirect{engine: engine} +} + +func (m *validateCatalogsOnlyWithDirect) Name() string { + return "ValidateCatalogsOnlyWithDirect" +} + +func (m *validateCatalogsOnlyWithDirect) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + // Catalogs are only supported in direct deployment mode + if !m.engine.IsDirect() && len(b.Config.Resources.Catalogs) > 0 { + return diag.Diagnostics{{ + Severity: diag.Error, + Summary: "Catalog resources are only supported with direct deployment mode", + Detail: "Catalog resources require direct deployment mode. " + + "Please set the DATABRICKS_BUNDLE_ENGINE environment variable to 'direct' to use catalog resources.\n" + + "Learn more at https://docs.databricks.com/dev-tools/bundles/deployment-modes.html", + Locations: b.Config.GetLocations("resources.catalogs"), + }} + } + + return nil +} diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 4cb5da53ce..68570e4167 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -19,6 +19,7 @@ type Resources struct { ModelServingEndpoints map[string]*resources.ModelServingEndpoint `json:"model_serving_endpoints,omitempty"` RegisteredModels map[string]*resources.RegisteredModel `json:"registered_models,omitempty"` QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"` + Catalogs map[string]*resources.Catalog `json:"catalogs,omitempty"` Schemas map[string]*resources.Schema `json:"schemas,omitempty"` Volumes map[string]*resources.Volume `json:"volumes,omitempty"` Clusters map[string]*resources.Cluster `json:"clusters,omitempty"` @@ -87,6 +88,7 @@ func (r *Resources) AllResources() []ResourceGroup { collectResourceMap(descriptions["model_serving_endpoints"], r.ModelServingEndpoints), collectResourceMap(descriptions["registered_models"], r.RegisteredModels), collectResourceMap(descriptions["quality_monitors"], r.QualityMonitors), + collectResourceMap(descriptions["catalogs"], r.Catalogs), collectResourceMap(descriptions["schemas"], r.Schemas), collectResourceMap(descriptions["clusters"], r.Clusters), collectResourceMap(descriptions["dashboards"], r.Dashboards), @@ -121,6 +123,12 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) } } + for k := range r.Catalogs { + if k == key { + found = append(found, r.Catalogs[k]) + } + } + for k := range r.Schemas { if k == key { found = append(found, r.Schemas[k]) @@ -230,6 +238,7 @@ func SupportedResources() map[string]resources.ResourceDescription { "model_serving_endpoints": (&resources.ModelServingEndpoint{}).ResourceDescription(), "registered_models": (&resources.RegisteredModel{}).ResourceDescription(), "quality_monitors": (&resources.QualityMonitor{}).ResourceDescription(), + "catalogs": (&resources.Catalog{}).ResourceDescription(), "schemas": (&resources.Schema{}).ResourceDescription(), "clusters": (&resources.Cluster{}).ResourceDescription(), "dashboards": (&resources.Dashboard{}).ResourceDescription(), diff --git a/bundle/config/resources/catalog.go b/bundle/config/resources/catalog.go new file mode 100644 index 0000000000..47b6ca61f2 --- /dev/null +++ b/bundle/config/resources/catalog.go @@ -0,0 +1,148 @@ +package resources + +import ( + "context" + "net/url" + "strings" + + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/marshal" + "github.com/databricks/databricks-sdk-go/service/catalog" + + "github.com/databricks/cli/libs/log" +) + +type CatalogGrantPrivilege string + +const ( + CatalogGrantPrivilegeAllPrivileges CatalogGrantPrivilege = "ALL_PRIVILEGES" + CatalogGrantPrivilegeApplyTag CatalogGrantPrivilege = "APPLY_TAG" + CatalogGrantPrivilegeCreateConnection CatalogGrantPrivilege = "CREATE_CONNECTION" + CatalogGrantPrivilegeCreateExternalLocation CatalogGrantPrivilege = "CREATE_EXTERNAL_LOCATION" + CatalogGrantPrivilegeCreateExternalTable CatalogGrantPrivilege = "CREATE_EXTERNAL_TABLE" + CatalogGrantPrivilegeCreateExternalVolume CatalogGrantPrivilege = "CREATE_EXTERNAL_VOLUME" + CatalogGrantPrivilegeCreateForeignCatalog CatalogGrantPrivilege = "CREATE_FOREIGN_CATALOG" + CatalogGrantPrivilegeCreateFunction CatalogGrantPrivilege = "CREATE_FUNCTION" + CatalogGrantPrivilegeCreateManagedStorage CatalogGrantPrivilege = "CREATE_MANAGED_STORAGE" + CatalogGrantPrivilegeCreateMaterializedView CatalogGrantPrivilege = "CREATE_MATERIALIZED_VIEW" + CatalogGrantPrivilegeCreateModel CatalogGrantPrivilege = "CREATE_MODEL" + CatalogGrantPrivilegeCreateSchema CatalogGrantPrivilege = "CREATE_SCHEMA" + CatalogGrantPrivilegeCreateStorageCredential CatalogGrantPrivilege = "CREATE_STORAGE_CREDENTIAL" + CatalogGrantPrivilegeCreateTable CatalogGrantPrivilege = "CREATE_TABLE" + CatalogGrantPrivilegeCreateVolume CatalogGrantPrivilege = "CREATE_VOLUME" + CatalogGrantPrivilegeExecute CatalogGrantPrivilege = "EXECUTE" + CatalogGrantPrivilegeManage CatalogGrantPrivilege = "MANAGE" + CatalogGrantPrivilegeModify CatalogGrantPrivilege = "MODIFY" + CatalogGrantPrivilegeReadVolume CatalogGrantPrivilege = "READ_VOLUME" + CatalogGrantPrivilegeRefresh CatalogGrantPrivilege = "REFRESH" + CatalogGrantPrivilegeSelect CatalogGrantPrivilege = "SELECT" + CatalogGrantPrivilegeUseCatalog CatalogGrantPrivilege = "USE_CATALOG" + CatalogGrantPrivilegeUseConnection CatalogGrantPrivilege = "USE_CONNECTION" + CatalogGrantPrivilegeUseSchema CatalogGrantPrivilege = "USE_SCHEMA" + CatalogGrantPrivilegeWriteVolume CatalogGrantPrivilege = "WRITE_VOLUME" +) + +// Values returns all valid CatalogGrantPrivilege values +func (CatalogGrantPrivilege) Values() []CatalogGrantPrivilege { + return []CatalogGrantPrivilege{ + CatalogGrantPrivilegeAllPrivileges, + CatalogGrantPrivilegeApplyTag, + CatalogGrantPrivilegeCreateConnection, + CatalogGrantPrivilegeCreateExternalLocation, + CatalogGrantPrivilegeCreateExternalTable, + CatalogGrantPrivilegeCreateExternalVolume, + CatalogGrantPrivilegeCreateForeignCatalog, + CatalogGrantPrivilegeCreateFunction, + CatalogGrantPrivilegeCreateManagedStorage, + CatalogGrantPrivilegeCreateMaterializedView, + CatalogGrantPrivilegeCreateModel, + CatalogGrantPrivilegeCreateSchema, + CatalogGrantPrivilegeCreateStorageCredential, + CatalogGrantPrivilegeCreateTable, + CatalogGrantPrivilegeCreateVolume, + CatalogGrantPrivilegeExecute, + CatalogGrantPrivilegeManage, + CatalogGrantPrivilegeModify, + CatalogGrantPrivilegeReadVolume, + CatalogGrantPrivilegeRefresh, + CatalogGrantPrivilegeSelect, + CatalogGrantPrivilegeUseCatalog, + CatalogGrantPrivilegeUseConnection, + CatalogGrantPrivilegeUseSchema, + CatalogGrantPrivilegeWriteVolume, + } +} + +// CatalogGrant holds the grant level settings for a single principal in Unity Catalog. +// Multiple of these can be defined on any catalog. +type CatalogGrant struct { + Privileges []CatalogGrantPrivilege `json:"privileges"` + + Principal string `json:"principal"` +} + +type Catalog struct { + BaseResource + catalog.CreateCatalog + + // Whether predictive optimization should be enabled for this object and objects under it. + // This field is only used for updates and cannot be set during creation. + EnablePredictiveOptimization catalog.EnablePredictiveOptimization `json:"enable_predictive_optimization,omitempty"` + + // Whether the current securable is accessible from all workspaces or a specific set of workspaces. + // This field is only used for updates and cannot be set during creation. + IsolationMode catalog.CatalogIsolationMode `json:"isolation_mode,omitempty"` + + // List of grants to apply on this catalog. + Grants []CatalogGrant `json:"grants,omitempty"` +} + +func (c *Catalog) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) { + log.Tracef(ctx, "Checking if catalog with name=%s exists", name) + + _, err := w.Catalogs.GetByName(ctx, name) + if err != nil { + log.Debugf(ctx, "catalog with name %s does not exist: %v", name, err) + + if apierr.IsMissing(err) { + return false, nil + } + + return false, err + } + return true, nil +} + +func (*Catalog) ResourceDescription() ResourceDescription { + return ResourceDescription{ + SingularName: "catalog", + PluralName: "catalogs", + SingularTitle: "Catalog", + PluralTitle: "Catalogs", + } +} + +func (c *Catalog) InitializeURL(baseURL url.URL) { + if c.ID == "" { + return + } + baseURL.Path = "explore/data/" + strings.ReplaceAll(c.ID, ".", "/") + c.URL = baseURL.String() +} + +func (c *Catalog) GetURL() string { + return c.URL +} + +func (c *Catalog) GetName() string { + return c.Name +} + +func (c *Catalog) UnmarshalJSON(b []byte) error { + return marshal.Unmarshal(b, c) +} + +func (c Catalog) MarshalJSON() ([]byte, error) { + return marshal.Marshal(c) +} diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 6bc61196e2..b5a3808482 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -139,6 +139,11 @@ func TestResourcesBindSupport(t *testing.T) { CreateRegisteredModelRequest: catalog.CreateRegisteredModelRequest{}, }, }, + Catalogs: map[string]*resources.Catalog{ + "my_catalog": { + CreateCatalog: catalog.CreateCatalog{}, + }, + }, Schemas: map[string]*resources.Schema{ "my_schema": { CreateSchema: catalog.CreateSchema{}, @@ -209,6 +214,7 @@ func TestResourcesBindSupport(t *testing.T) { m.GetMockPipelinesAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockExperimentsAPI().EXPECT().GetExperiment(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockRegisteredModelsAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) + m.GetMockCatalogsAPI().EXPECT().GetByName(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockSchemasAPI().EXPECT().GetByFullName(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockClustersAPI().EXPECT().GetByClusterId(mock.Anything, mock.Anything).Return(nil, nil) m.GetMockLakeviewAPI().EXPECT().Get(mock.Anything, mock.Anything).Return(nil, nil) diff --git a/bundle/direct/dresources/all.go b/bundle/direct/dresources/all.go index 3c00f6b262..600602777a 100644 --- a/bundle/direct/dresources/all.go +++ b/bundle/direct/dresources/all.go @@ -10,6 +10,7 @@ var SupportedResources = map[string]any{ "jobs": (*ResourceJob)(nil), "pipelines": (*ResourcePipeline)(nil), "experiments": (*ResourceExperiment)(nil), + "catalogs": (*ResourceCatalog)(nil), "schemas": (*ResourceSchema)(nil), "volumes": (*ResourceVolume)(nil), "models": (*ResourceMlflowModel)(nil), @@ -41,6 +42,7 @@ var SupportedResources = map[string]any{ "dashboards.permissions": (*ResourcePermissions)(nil), // Grants + "catalogs.grants": (*ResourceGrants)(nil), "schemas.grants": (*ResourceGrants)(nil), "volumes.grants": (*ResourceGrants)(nil), "registered_models.grants": (*ResourceGrants)(nil), diff --git a/bundle/direct/dresources/all_test.go b/bundle/direct/dresources/all_test.go index 9357b6fc81..9beadebcda 100644 --- a/bundle/direct/dresources/all_test.go +++ b/bundle/direct/dresources/all_test.go @@ -39,6 +39,15 @@ var testConfig map[string]any = map[string]any{ }, }, + "catalogs": &resources.Catalog{ + CreateCatalog: catalog.CreateCatalog{ + Name: "mycatalog", + Comment: "Test catalog", + }, + // Note: EnablePredictiveOptimization and IsolationMode cannot be set during creation, + // only during updates. They are not included in the test config. + }, + "schemas": &resources.Schema{ CreateSchema: catalog.CreateSchema{ CatalogName: "main", @@ -435,6 +444,17 @@ var testDeps = map[string]prepareWorkspace{ }, nil }, + "catalogs.grants": func(client *databricks.WorkspaceClient) (any, error) { + return &GrantsState{ + SecurableType: "catalog", + FullName: "mycatalog", + Grants: []GrantAssignment{{ + Privileges: []catalog.Privilege{catalog.PrivilegeUseCatalog}, + Principal: "user@example.com", + }}, + }, nil + }, + "schemas.grants": func(client *databricks.WorkspaceClient) (any, error) { return &GrantsState{ SecurableType: "schema", diff --git a/bundle/direct/dresources/catalog.go b/bundle/direct/dresources/catalog.go new file mode 100644 index 0000000000..bf32588837 --- /dev/null +++ b/bundle/direct/dresources/catalog.go @@ -0,0 +1,87 @@ +package dresources + +import ( + "context" + + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/log" + "github.com/databricks/cli/libs/utils" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/catalog" +) + +type ResourceCatalog struct { + client *databricks.WorkspaceClient +} + +func (*ResourceCatalog) New(client *databricks.WorkspaceClient) *ResourceCatalog { + return &ResourceCatalog{client: client} +} + +func (*ResourceCatalog) PrepareState(input *resources.Catalog) *resources.Catalog { + return input +} + +func (*ResourceCatalog) RemapState(info *catalog.CatalogInfo) *resources.Catalog { + return &resources.Catalog{ + CreateCatalog: catalog.CreateCatalog{ + Comment: info.Comment, + ConnectionName: info.ConnectionName, + Name: info.Name, + Options: info.Options, + Properties: info.Properties, + ProviderName: info.ProviderName, + ShareName: info.ShareName, + StorageRoot: info.StorageRoot, + ForceSendFields: utils.FilterFields[catalog.CreateCatalog](info.ForceSendFields), + }, + EnablePredictiveOptimization: info.EnablePredictiveOptimization, + IsolationMode: info.IsolationMode, + } +} + +func (r *ResourceCatalog) DoRead(ctx context.Context, id string) (*catalog.CatalogInfo, error) { + return r.client.Catalogs.GetByName(ctx, id) +} + +func (r *ResourceCatalog) DoCreate(ctx context.Context, input *resources.Catalog) (string, *catalog.CatalogInfo, error) { + response, err := r.client.Catalogs.Create(ctx, input.CreateCatalog) + if err != nil || response == nil { + return "", nil, err + } + return response.Name, response, nil +} + +// DoUpdate updates the catalog in place and returns remote state. +func (r *ResourceCatalog) DoUpdate(ctx context.Context, id string, input *resources.Catalog, _ Changes) (*catalog.CatalogInfo, error) { + updateRequest := catalog.UpdateCatalog{ + Comment: input.Comment, + EnablePredictiveOptimization: input.EnablePredictiveOptimization, + IsolationMode: input.IsolationMode, + Name: id, + NewName: "", // We recreate catalogs on name change intentionally. + Options: input.Options, + Owner: "", // Not supported by DABs + Properties: input.Properties, + ForceSendFields: utils.FilterFields[catalog.UpdateCatalog](input.ForceSendFields, "NewName", "Owner"), + } + + response, err := r.client.Catalogs.Update(ctx, updateRequest) + if err != nil { + return nil, err + } + + if response != nil && response.Name != id { + log.Warnf(ctx, "catalogs: response contains unexpected name=%#v (expected %#v)", response.Name, id) + } + + return response, nil +} + +func (r *ResourceCatalog) DoDelete(ctx context.Context, id string) error { + return r.client.Catalogs.Delete(ctx, catalog.DeleteCatalogRequest{ + Name: id, + Force: true, + ForceSendFields: nil, + }) +} diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index a8926e707f..e5e242138f 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -24,6 +24,7 @@ func PreDeployChecks(ctx context.Context, b *bundle.Bundle, isPlan bool, engine resourcemutator.SecretScopeFixups(engine), deploy.StatePull(), mutator.ValidateGitDetails(), + mutator.ValidateCatalogsOnlyWithDirect(engine), statemgmt.CheckRunningResource(engine), ) } From c16afc67a1d537622678117d96f2ba520d26f960 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 21 Jan 2026 12:02:48 +0100 Subject: [PATCH 2/5] schema + fix --- bundle/config/resources/catalog.go | 56 +++++------- bundle/direct/dresources/catalog.go | 55 +++++------- bundle/internal/schema/annotations.yml | 41 +++++++++ bundle/schema/jsonschema.json | 119 +++++++++++++++++++++++++ 4 files changed, 207 insertions(+), 64 deletions(-) diff --git a/bundle/config/resources/catalog.go b/bundle/config/resources/catalog.go index 47b6ca61f2..cece884153 100644 --- a/bundle/config/resources/catalog.go +++ b/bundle/config/resources/catalog.go @@ -16,31 +16,31 @@ import ( type CatalogGrantPrivilege string const ( - CatalogGrantPrivilegeAllPrivileges CatalogGrantPrivilege = "ALL_PRIVILEGES" - CatalogGrantPrivilegeApplyTag CatalogGrantPrivilege = "APPLY_TAG" - CatalogGrantPrivilegeCreateConnection CatalogGrantPrivilege = "CREATE_CONNECTION" - CatalogGrantPrivilegeCreateExternalLocation CatalogGrantPrivilege = "CREATE_EXTERNAL_LOCATION" - CatalogGrantPrivilegeCreateExternalTable CatalogGrantPrivilege = "CREATE_EXTERNAL_TABLE" - CatalogGrantPrivilegeCreateExternalVolume CatalogGrantPrivilege = "CREATE_EXTERNAL_VOLUME" - CatalogGrantPrivilegeCreateForeignCatalog CatalogGrantPrivilege = "CREATE_FOREIGN_CATALOG" - CatalogGrantPrivilegeCreateFunction CatalogGrantPrivilege = "CREATE_FUNCTION" - CatalogGrantPrivilegeCreateManagedStorage CatalogGrantPrivilege = "CREATE_MANAGED_STORAGE" - CatalogGrantPrivilegeCreateMaterializedView CatalogGrantPrivilege = "CREATE_MATERIALIZED_VIEW" - CatalogGrantPrivilegeCreateModel CatalogGrantPrivilege = "CREATE_MODEL" - CatalogGrantPrivilegeCreateSchema CatalogGrantPrivilege = "CREATE_SCHEMA" + CatalogGrantPrivilegeAllPrivileges CatalogGrantPrivilege = "ALL_PRIVILEGES" + CatalogGrantPrivilegeApplyTag CatalogGrantPrivilege = "APPLY_TAG" + CatalogGrantPrivilegeCreateConnection CatalogGrantPrivilege = "CREATE_CONNECTION" + CatalogGrantPrivilegeCreateExternalLocation CatalogGrantPrivilege = "CREATE_EXTERNAL_LOCATION" + CatalogGrantPrivilegeCreateExternalTable CatalogGrantPrivilege = "CREATE_EXTERNAL_TABLE" + CatalogGrantPrivilegeCreateExternalVolume CatalogGrantPrivilege = "CREATE_EXTERNAL_VOLUME" + CatalogGrantPrivilegeCreateForeignCatalog CatalogGrantPrivilege = "CREATE_FOREIGN_CATALOG" + CatalogGrantPrivilegeCreateFunction CatalogGrantPrivilege = "CREATE_FUNCTION" + CatalogGrantPrivilegeCreateManagedStorage CatalogGrantPrivilege = "CREATE_MANAGED_STORAGE" + CatalogGrantPrivilegeCreateMaterializedView CatalogGrantPrivilege = "CREATE_MATERIALIZED_VIEW" + CatalogGrantPrivilegeCreateModel CatalogGrantPrivilege = "CREATE_MODEL" + CatalogGrantPrivilegeCreateSchema CatalogGrantPrivilege = "CREATE_SCHEMA" CatalogGrantPrivilegeCreateStorageCredential CatalogGrantPrivilege = "CREATE_STORAGE_CREDENTIAL" - CatalogGrantPrivilegeCreateTable CatalogGrantPrivilege = "CREATE_TABLE" - CatalogGrantPrivilegeCreateVolume CatalogGrantPrivilege = "CREATE_VOLUME" - CatalogGrantPrivilegeExecute CatalogGrantPrivilege = "EXECUTE" - CatalogGrantPrivilegeManage CatalogGrantPrivilege = "MANAGE" - CatalogGrantPrivilegeModify CatalogGrantPrivilege = "MODIFY" - CatalogGrantPrivilegeReadVolume CatalogGrantPrivilege = "READ_VOLUME" - CatalogGrantPrivilegeRefresh CatalogGrantPrivilege = "REFRESH" - CatalogGrantPrivilegeSelect CatalogGrantPrivilege = "SELECT" - CatalogGrantPrivilegeUseCatalog CatalogGrantPrivilege = "USE_CATALOG" - CatalogGrantPrivilegeUseConnection CatalogGrantPrivilege = "USE_CONNECTION" - CatalogGrantPrivilegeUseSchema CatalogGrantPrivilege = "USE_SCHEMA" - CatalogGrantPrivilegeWriteVolume CatalogGrantPrivilege = "WRITE_VOLUME" + CatalogGrantPrivilegeCreateTable CatalogGrantPrivilege = "CREATE_TABLE" + CatalogGrantPrivilegeCreateVolume CatalogGrantPrivilege = "CREATE_VOLUME" + CatalogGrantPrivilegeExecute CatalogGrantPrivilege = "EXECUTE" + CatalogGrantPrivilegeManage CatalogGrantPrivilege = "MANAGE" + CatalogGrantPrivilegeModify CatalogGrantPrivilege = "MODIFY" + CatalogGrantPrivilegeReadVolume CatalogGrantPrivilege = "READ_VOLUME" + CatalogGrantPrivilegeRefresh CatalogGrantPrivilege = "REFRESH" + CatalogGrantPrivilegeSelect CatalogGrantPrivilege = "SELECT" + CatalogGrantPrivilegeUseCatalog CatalogGrantPrivilege = "USE_CATALOG" + CatalogGrantPrivilegeUseConnection CatalogGrantPrivilege = "USE_CONNECTION" + CatalogGrantPrivilegeUseSchema CatalogGrantPrivilege = "USE_SCHEMA" + CatalogGrantPrivilegeWriteVolume CatalogGrantPrivilege = "WRITE_VOLUME" ) // Values returns all valid CatalogGrantPrivilege values @@ -86,14 +86,6 @@ type Catalog struct { BaseResource catalog.CreateCatalog - // Whether predictive optimization should be enabled for this object and objects under it. - // This field is only used for updates and cannot be set during creation. - EnablePredictiveOptimization catalog.EnablePredictiveOptimization `json:"enable_predictive_optimization,omitempty"` - - // Whether the current securable is accessible from all workspaces or a specific set of workspaces. - // This field is only used for updates and cannot be set during creation. - IsolationMode catalog.CatalogIsolationMode `json:"isolation_mode,omitempty"` - // List of grants to apply on this catalog. Grants []CatalogGrant `json:"grants,omitempty"` } diff --git a/bundle/direct/dresources/catalog.go b/bundle/direct/dresources/catalog.go index bf32588837..45b5747bf6 100644 --- a/bundle/direct/dresources/catalog.go +++ b/bundle/direct/dresources/catalog.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/utils" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/catalog" @@ -18,25 +17,21 @@ func (*ResourceCatalog) New(client *databricks.WorkspaceClient) *ResourceCatalog return &ResourceCatalog{client: client} } -func (*ResourceCatalog) PrepareState(input *resources.Catalog) *resources.Catalog { - return input +func (*ResourceCatalog) PrepareState(input *resources.Catalog) *catalog.CreateCatalog { + return &input.CreateCatalog } -func (*ResourceCatalog) RemapState(info *catalog.CatalogInfo) *resources.Catalog { - return &resources.Catalog{ - CreateCatalog: catalog.CreateCatalog{ - Comment: info.Comment, - ConnectionName: info.ConnectionName, - Name: info.Name, - Options: info.Options, - Properties: info.Properties, - ProviderName: info.ProviderName, - ShareName: info.ShareName, - StorageRoot: info.StorageRoot, - ForceSendFields: utils.FilterFields[catalog.CreateCatalog](info.ForceSendFields), - }, - EnablePredictiveOptimization: info.EnablePredictiveOptimization, - IsolationMode: info.IsolationMode, +func (*ResourceCatalog) RemapState(info *catalog.CatalogInfo) *catalog.CreateCatalog { + return &catalog.CreateCatalog{ + Comment: info.Comment, + ConnectionName: info.ConnectionName, + Name: info.Name, + Options: info.Options, + Properties: info.Properties, + ProviderName: info.ProviderName, + ShareName: info.ShareName, + StorageRoot: info.StorageRoot, + ForceSendFields: utils.FilterFields[catalog.CreateCatalog](info.ForceSendFields), } } @@ -44,8 +39,8 @@ func (r *ResourceCatalog) DoRead(ctx context.Context, id string) (*catalog.Catal return r.client.Catalogs.GetByName(ctx, id) } -func (r *ResourceCatalog) DoCreate(ctx context.Context, input *resources.Catalog) (string, *catalog.CatalogInfo, error) { - response, err := r.client.Catalogs.Create(ctx, input.CreateCatalog) +func (r *ResourceCatalog) DoCreate(ctx context.Context, config *catalog.CreateCatalog) (string, *catalog.CatalogInfo, error) { + response, err := r.client.Catalogs.Create(ctx, *config) if err != nil || response == nil { return "", nil, err } @@ -53,17 +48,17 @@ func (r *ResourceCatalog) DoCreate(ctx context.Context, input *resources.Catalog } // DoUpdate updates the catalog in place and returns remote state. -func (r *ResourceCatalog) DoUpdate(ctx context.Context, id string, input *resources.Catalog, _ Changes) (*catalog.CatalogInfo, error) { +func (r *ResourceCatalog) DoUpdate(ctx context.Context, id string, config *catalog.CreateCatalog, _ Changes) (*catalog.CatalogInfo, error) { updateRequest := catalog.UpdateCatalog{ - Comment: input.Comment, - EnablePredictiveOptimization: input.EnablePredictiveOptimization, - IsolationMode: input.IsolationMode, + Comment: config.Comment, + EnablePredictiveOptimization: "", // Not supported by DABs + IsolationMode: "", // Not supported by DABs Name: id, - NewName: "", // We recreate catalogs on name change intentionally. - Options: input.Options, + NewName: config.Name, // Support renaming catalogs + Options: config.Options, Owner: "", // Not supported by DABs - Properties: input.Properties, - ForceSendFields: utils.FilterFields[catalog.UpdateCatalog](input.ForceSendFields, "NewName", "Owner"), + Properties: config.Properties, + ForceSendFields: utils.FilterFields[catalog.UpdateCatalog](config.ForceSendFields, "EnablePredictiveOptimization", "IsolationMode", "Owner"), } response, err := r.client.Catalogs.Update(ctx, updateRequest) @@ -71,10 +66,6 @@ func (r *ResourceCatalog) DoUpdate(ctx context.Context, id string, input *resour return nil, err } - if response != nil && response.Name != id { - log.Warnf(ctx, "catalogs: response contains unexpected name=%#v (expected %#v)", response.Name, id) - } - return response, nil } diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 4a9c477f8a..ec90e99972 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -162,6 +162,9 @@ github.com/databricks/cli/bundle/config.Resources: The app resource defines a Databricks app. "markdown_description": |- The app resource defines a [Databricks app](/api/workspace/apps/create). For information about Databricks Apps, see [\_](/dev-tools/databricks-apps/index.md). + "catalogs": + "description": |- + PLACEHOLDER "clusters": "description": |- The cluster definitions for the bundle, where each key is the name of a cluster. @@ -531,6 +534,44 @@ github.com/databricks/cli/bundle/config/resources.AppPermission: "user_name": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Catalog: + "comment": + "description": |- + PLACEHOLDER + "connection_name": + "description": |- + PLACEHOLDER + "grants": + "description": |- + PLACEHOLDER + "lifecycle": + "description": |- + PLACEHOLDER + "name": + "description": |- + PLACEHOLDER + "options": + "description": |- + PLACEHOLDER + "properties": + "description": |- + PLACEHOLDER + "provider_name": + "description": |- + PLACEHOLDER + "share_name": + "description": |- + PLACEHOLDER + "storage_root": + "description": |- + PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.CatalogGrant: + "principal": + "description": |- + PLACEHOLDER + "privileges": + "description": |- + PLACEHOLDER github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index c564dce172..04a0464d5f 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -318,6 +318,80 @@ } ] }, + "resources.Catalog": { + "oneOf": [ + { + "type": "object", + "properties": { + "comment": { + "$ref": "#/$defs/string" + }, + "connection_name": { + "$ref": "#/$defs/string" + }, + "grants": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.CatalogGrant" + }, + "lifecycle": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + }, + "name": { + "$ref": "#/$defs/string" + }, + "options": { + "$ref": "#/$defs/map/string" + }, + "properties": { + "$ref": "#/$defs/map/string" + }, + "provider_name": { + "$ref": "#/$defs/string" + }, + "share_name": { + "$ref": "#/$defs/string" + }, + "storage_root": { + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.CatalogGrant": { + "oneOf": [ + { + "type": "object", + "properties": { + "principal": { + "$ref": "#/$defs/string" + }, + "privileges": { + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.CatalogGrantPrivilege" + } + }, + "additionalProperties": false, + "required": [ + "privileges", + "principal" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.CatalogGrantPrivilege": { + "type": "string" + }, "resources.Cluster": { "oneOf": [ { @@ -2504,6 +2578,9 @@ "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App", "markdownDescription": "The app resource defines a [Databricks app](https://docs.databricks.com/api/workspace/apps/create). For information about Databricks Apps, see [link](https://docs.databricks.com/dev-tools/databricks-apps/index.html)." }, + "catalogs": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Catalog" + }, "clusters": { "description": "The cluster definitions for the bundle, where each key is the name of a cluster.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", @@ -9801,6 +9878,20 @@ } ] }, + "resources.Catalog": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Catalog" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.Cluster": { "oneOf": [ { @@ -10177,6 +10268,34 @@ } ] }, + "resources.CatalogGrant": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.CatalogGrant" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "resources.CatalogGrantPrivilege": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.CatalogGrantPrivilege" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "resources.ClusterPermission": { "oneOf": [ { From 38d059a9726747bc748b8e9a83f20eb43dc60797 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 21 Jan 2026 13:04:26 +0100 Subject: [PATCH 3/5] addressed feedback --- .../grants/catalogs/databricks.yml.tmpl | 13 ++++ .../resources/grants/catalogs/output.txt | 73 +++++++++++++++++++ .../bundle/resources/grants/catalogs/script | 25 +++++++ .../capture_schema_dependency.go | 39 +++++++++- .../capture_schema_dependency_test.go | 61 ++++++++++++++++ .../validate_catalogs_only_with_direct.go | 39 ---------- .../mutator/validate_direct_only_resources.go | 72 ++++++++++++++++++ bundle/deploy/terraform/lifecycle_test.go | 11 +++ bundle/phases/plan.go | 2 +- bundle/statemgmt/state_load_test.go | 35 +++++++++ 10 files changed, 328 insertions(+), 42 deletions(-) create mode 100644 acceptance/bundle/resources/grants/catalogs/databricks.yml.tmpl create mode 100644 acceptance/bundle/resources/grants/catalogs/output.txt create mode 100755 acceptance/bundle/resources/grants/catalogs/script delete mode 100644 bundle/config/mutator/validate_catalogs_only_with_direct.go create mode 100644 bundle/config/mutator/validate_direct_only_resources.go diff --git a/acceptance/bundle/resources/grants/catalogs/databricks.yml.tmpl b/acceptance/bundle/resources/grants/catalogs/databricks.yml.tmpl new file mode 100644 index 0000000000..2187c3aa5f --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/databricks.yml.tmpl @@ -0,0 +1,13 @@ +bundle: + name: catalog-grants-$UNIQUE_NAME + +resources: + catalogs: + grants_catalog: + name: catalog_grants_$UNIQUE_NAME + comment: "Test catalog for grants" + grants: + - principal: deco-test-user@databricks.com + privileges: + - USE_CATALOG + - CREATE_SCHEMA diff --git a/acceptance/bundle/resources/grants/catalogs/output.txt b/acceptance/bundle/resources/grants/catalogs/output.txt new file mode 100644 index 0000000000..acd8f08c79 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/output.txt @@ -0,0 +1,73 @@ + +>>> [CLI] bundle plan +create catalogs.grants_catalog +create catalogs.grants_catalog.grants + +Plan: 2 to add, 0 to change, 0 to delete, 0 unchanged + +>>> [CLI] bundle plan -o json + +>>> print_requests.py --get //permissions + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/catalog-grants-[UNIQUE_NAME]/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> print_requests.py //permissions + +>>> [CLI] bundle plan +Plan: 0 to add, 0 to change, 0 to delete, 2 unchanged + +>>> [CLI] grants get catalog catalog_grants_[UNIQUE_NAME] +{ + "privilege_assignments": [ + { + "principal": "deco-test-user@databricks.com", + "privileges": [ + "CREATE_SCHEMA", + "USE_CATALOG" + ] + } + ] +} + +>>> [CLI] bundle plan -o json + +>>> print_requests.py --get //permissions + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/catalog-grants-[UNIQUE_NAME]/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> print_requests.py //permissions + +>>> [CLI] grants get catalog catalog_grants_[UNIQUE_NAME] +{ + "privilege_assignments": [ + { + "principal": "deco-test-user@databricks.com", + "privileges": [ + "CREATE_SCHEMA", + "USE_SCHEMA" + ] + } + ] +} + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.catalogs.grants_catalog + +This action will result in the deletion of the following UC catalogs. Any underlying data may be lost: + delete resources.catalogs.grants_catalog + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/catalog-grants-[UNIQUE_NAME]/default + +Deleting files... +Destroy complete! + +>>> print_requests.py //permissions diff --git a/acceptance/bundle/resources/grants/catalogs/script b/acceptance/bundle/resources/grants/catalogs/script new file mode 100755 index 0000000000..c8db2c8900 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/script @@ -0,0 +1,25 @@ +envsubst < databricks.yml.tmpl > databricks.yml + +trace $CLI bundle plan +trace $CLI bundle plan -o json > out.plan1.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests.py --get //permissions +trace $CLI bundle deploy +trace print_requests.py //permissions > out.deploy1.requests.$DATABRICKS_BUNDLE_ENGINE.json + +trace $CLI bundle plan + +# Since we're sending different requests between terraform and direct, assert that the end result is the same: +trace $CLI grants get catalog catalog_grants_$UNIQUE_NAME | jq --sort-keys + +update_file.py databricks.yml CREATE_SCHEMA USE_SCHEMA + +trace $CLI bundle plan -o json > out.plan2.$DATABRICKS_BUNDLE_ENGINE.json +jq 'walk(if type == "object" then del(.created_at, .created_by, .updated_at, .updated_by, .metastore_id) else . end)' out.plan2.$DATABRICKS_BUNDLE_ENGINE.json > tmp.json && mv tmp.json out.plan2.$DATABRICKS_BUNDLE_ENGINE.json +trace print_requests.py --get //permissions +trace $CLI bundle deploy +trace print_requests.py //permissions > out.deploy2.requests.$DATABRICKS_BUNDLE_ENGINE.json + +trace $CLI grants get catalog catalog_grants_$UNIQUE_NAME | jq --sort-keys + +trace $CLI bundle destroy --auto-approve +trace print_requests.py //permissions > out.destroy.requests.$DATABRICKS_BUNDLE_ENGINE.json diff --git a/bundle/config/mutator/resourcemutator/capture_schema_dependency.go b/bundle/config/mutator/resourcemutator/capture_schema_dependency.go index bb5a863483..ef1581393a 100644 --- a/bundle/config/mutator/resourcemutator/capture_schema_dependency.go +++ b/bundle/config/mutator/resourcemutator/capture_schema_dependency.go @@ -16,8 +16,11 @@ type captureSchemaDependency struct{} // syntax allows TF to capture the deploy time dependency this DLT pipeline or UC Volume // has on the schema and deploy changes to the schema before deploying the pipeline or volume. // -// This mutator translates any implicit schema references in DLT pipelines or UC Volumes -// to the explicit syntax. +// Similarly, if a user defines a UC catalog in the bundle, they can refer to it in UC schemas +// using the `${resources.catalogs..name}` syntax. This captures the deploy time +// dependency the schema has on the catalog. +// +// This mutator translates any implicit catalog or schema references to the explicit syntax. func CaptureSchemaDependency() bundle.Mutator { return &captureSchemaDependency{} } @@ -30,6 +33,10 @@ func schemaNameRef(key string) string { return fmt.Sprintf("${resources.schemas.%s.name}", key) } +func catalogNameRef(key string) string { + return fmt.Sprintf("${resources.catalogs.%s.name}", key) +} + func findSchema(b *bundle.Bundle, catalogName, schemaName string) (string, *resources.Schema) { if catalogName == "" || schemaName == "" { return "", nil @@ -84,7 +91,35 @@ func resolvePipelineTarget(p *resources.Pipeline, b *bundle.Bundle) { p.Target = schemaNameRef(schemaK) } +func findCatalog(b *bundle.Bundle, catalogName string) (string, *resources.Catalog) { + if catalogName == "" { + return "", nil + } + + for k, c := range b.Config.Resources.Catalogs { + if c != nil && c.Name == catalogName { + return k, c + } + } + return "", nil +} + +func resolveSchema(s *resources.Schema, b *bundle.Bundle) { + if s == nil { + return + } + catalogK, catalog := findCatalog(b, s.CatalogName) + if catalog == nil { + return + } + + s.CatalogName = catalogNameRef(catalogK) +} + func (m *captureSchemaDependency) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + for _, s := range b.Config.Resources.Schemas { + resolveSchema(s, b) + } for _, p := range b.Config.Resources.Pipelines { // "schema" and "target" have the same semantics in the DLT API but are mutually // exclusive i.e. only one can be set at a time. If schema is set, the pipeline diff --git a/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go b/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go index 9edc60ceed..8e37a9ba16 100644 --- a/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go +++ b/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go @@ -275,3 +275,64 @@ func TestCaptureSchemaDependencyForPipelinesWithSchema(t *testing.T) { assert.Empty(t, b.Config.Resources.Pipelines[k].Schema) } } + +func TestCaptureCatalogDependencyForSchema(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Catalogs: map[string]*resources.Catalog{ + "catalog1": { + CreateCatalog: catalog.CreateCatalog{ + Name: "catalog1", + }, + }, + "catalog2": { + CreateCatalog: catalog.CreateCatalog{ + Name: "catalog2", + }, + }, + "nilcatalog": nil, + "emptycatalog": {}, + }, + Schemas: map[string]*resources.Schema{ + "schema1": { + CreateSchema: catalog.CreateSchema{ + CatalogName: "catalog1", + Name: "schema1", + }, + }, + "schema2": { + CreateSchema: catalog.CreateSchema{ + CatalogName: "catalog2", + Name: "schema2", + }, + }, + "schema3": { + CreateSchema: catalog.CreateSchema{ + CatalogName: "catalogX", + Name: "schema3", + }, + }, + "schema4": { + CreateSchema: catalog.CreateSchema{ + CatalogName: "", + Name: "schema4", + }, + }, + "nilschema": nil, + "emptyschema": {}, + }, + }, + }, + } + + d := bundle.Apply(context.Background(), b, CaptureSchemaDependency()) + require.Nil(t, d) + + assert.Equal(t, "${resources.catalogs.catalog1.name}", b.Config.Resources.Schemas["schema1"].CatalogName) + assert.Equal(t, "${resources.catalogs.catalog2.name}", b.Config.Resources.Schemas["schema2"].CatalogName) + assert.Equal(t, "catalogX", b.Config.Resources.Schemas["schema3"].CatalogName) + assert.Equal(t, "", b.Config.Resources.Schemas["schema4"].CatalogName) + + assert.Nil(t, b.Config.Resources.Schemas["nilschema"]) +} diff --git a/bundle/config/mutator/validate_catalogs_only_with_direct.go b/bundle/config/mutator/validate_catalogs_only_with_direct.go deleted file mode 100644 index 9a54effc62..0000000000 --- a/bundle/config/mutator/validate_catalogs_only_with_direct.go +++ /dev/null @@ -1,39 +0,0 @@ -package mutator - -import ( - "context" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/engine" - "github.com/databricks/cli/libs/diag" -) - -type validateCatalogsOnlyWithDirect struct { - engine engine.EngineType -} - -// ValidateCatalogsOnlyWithDirect returns a mutator that validates catalog resources -// are only used with direct deployment mode. -func ValidateCatalogsOnlyWithDirect(engine engine.EngineType) bundle.Mutator { - return &validateCatalogsOnlyWithDirect{engine: engine} -} - -func (m *validateCatalogsOnlyWithDirect) Name() string { - return "ValidateCatalogsOnlyWithDirect" -} - -func (m *validateCatalogsOnlyWithDirect) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - // Catalogs are only supported in direct deployment mode - if !m.engine.IsDirect() && len(b.Config.Resources.Catalogs) > 0 { - return diag.Diagnostics{{ - Severity: diag.Error, - Summary: "Catalog resources are only supported with direct deployment mode", - Detail: "Catalog resources require direct deployment mode. " + - "Please set the DATABRICKS_BUNDLE_ENGINE environment variable to 'direct' to use catalog resources.\n" + - "Learn more at https://docs.databricks.com/dev-tools/bundles/deployment-modes.html", - Locations: b.Config.GetLocations("resources.catalogs"), - }} - } - - return nil -} diff --git a/bundle/config/mutator/validate_direct_only_resources.go b/bundle/config/mutator/validate_direct_only_resources.go new file mode 100644 index 0000000000..e39153d987 --- /dev/null +++ b/bundle/config/mutator/validate_direct_only_resources.go @@ -0,0 +1,72 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/engine" + "github.com/databricks/cli/libs/diag" +) + +type directOnlyResource struct { + resourceType string + pluralName string + singularName string + getResources func(*bundle.Bundle) map[string]any +} + +// Resources that are only supported in direct deployment mode +var directOnlyResources = []directOnlyResource{ + { + resourceType: "catalogs", + pluralName: "Catalog", + singularName: "catalog", + getResources: func(b *bundle.Bundle) map[string]any { + result := make(map[string]any) + for k, v := range b.Config.Resources.Catalogs { + result[k] = v + } + return result + }, + }, +} + +type validateDirectOnlyResources struct { + engine engine.EngineType +} + +// ValidateDirectOnlyResources returns a mutator that validates resources +// that are only supported in direct deployment mode. +func ValidateDirectOnlyResources(engine engine.EngineType) bundle.Mutator { + return &validateDirectOnlyResources{engine: engine} +} + +func (m *validateDirectOnlyResources) Name() string { + return "ValidateDirectOnlyResources" +} + +func (m *validateDirectOnlyResources) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + if m.engine.IsDirect() { + return nil + } + + var diags diag.Diagnostics + + for _, resource := range directOnlyResources { + resourceMap := resource.getResources(b) + if len(resourceMap) > 0 { + diags = diags.Append(diag.Diagnostic{ + Severity: diag.Error, + Summary: resource.pluralName + " resources are only supported with direct deployment mode", + Detail: fmt.Sprintf("%s resources require direct deployment mode. "+ + "Please set the DATABRICKS_BUNDLE_ENGINE environment variable to 'direct' to use %s resources.\n"+ + "Learn more at https://docs.databricks.com/dev-tools/bundles/deployment-modes.html", + resource.pluralName, resource.singularName), + Locations: b.Config.GetLocations("resources." + resource.resourceType), + }) + } + } + + return diags +} diff --git a/bundle/deploy/terraform/lifecycle_test.go b/bundle/deploy/terraform/lifecycle_test.go index 697a2270cd..028b328645 100644 --- a/bundle/deploy/terraform/lifecycle_test.go +++ b/bundle/deploy/terraform/lifecycle_test.go @@ -14,8 +14,19 @@ func TestConvertLifecycleForAllResources(t *testing.T) { supportedResources := config.SupportedResources() ctx := context.Background() + // Resources that are only supported in direct mode and should not be converted to Terraform + ignoredResources := []string{ + "catalogs", + } + for resourceType := range supportedResources { t.Run(resourceType, func(t *testing.T) { + for _, ignored := range ignoredResources { + if resourceType == ignored { + t.Skipf("%s is only supported in direct mode", resourceType) + } + } + vin := dyn.NewValue(map[string]dyn.Value{ "resources": dyn.NewValue(map[string]dyn.Value{ resourceType: dyn.NewValue(map[string]dyn.Value{ diff --git a/bundle/phases/plan.go b/bundle/phases/plan.go index e5e242138f..6e43f42291 100644 --- a/bundle/phases/plan.go +++ b/bundle/phases/plan.go @@ -24,7 +24,7 @@ func PreDeployChecks(ctx context.Context, b *bundle.Bundle, isPlan bool, engine resourcemutator.SecretScopeFixups(engine), deploy.StatePull(), mutator.ValidateGitDetails(), - mutator.ValidateCatalogsOnlyWithDirect(engine), + mutator.ValidateDirectOnlyResources(engine), statemgmt.CheckRunningResource(engine), ) } diff --git a/bundle/statemgmt/state_load_test.go b/bundle/statemgmt/state_load_test.go index 3c0b53b0c9..f8fc110812 100644 --- a/bundle/statemgmt/state_load_test.go +++ b/bundle/statemgmt/state_load_test.go @@ -32,6 +32,7 @@ func TestStateToBundleEmptyLocalResources(t *testing.T) { "resources.model_serving_endpoints.test_model_serving": {ID: "1"}, "resources.registered_models.test_registered_model": {ID: "1"}, "resources.quality_monitors.test_monitor": {ID: "1"}, + "resources.catalogs.test_catalog": {ID: "1"}, "resources.schemas.test_schema": {ID: "1"}, "resources.volumes.test_volume": {ID: "1"}, "resources.clusters.test_cluster": {ID: "1"}, @@ -68,6 +69,9 @@ func TestStateToBundleEmptyLocalResources(t *testing.T) { assert.Equal(t, "1", config.Resources.QualityMonitors["test_monitor"].ID) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.QualityMonitors["test_monitor"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Catalogs["test_catalog"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Catalogs["test_catalog"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Schemas["test_schema"].ID) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Schemas["test_schema"].ModifiedStatus) @@ -151,6 +155,13 @@ func TestStateToBundleEmptyRemoteResources(t *testing.T) { }, }, }, + Catalogs: map[string]*resources.Catalog{ + "test_catalog": { + CreateCatalog: catalog.CreateCatalog{ + Name: "test_catalog", + }, + }, + }, Schemas: map[string]*resources.Schema{ "test_schema": { CreateSchema: catalog.CreateSchema{ @@ -253,6 +264,9 @@ func TestStateToBundleEmptyRemoteResources(t *testing.T) { assert.Equal(t, "", config.Resources.QualityMonitors["test_monitor"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.QualityMonitors["test_monitor"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Catalogs["test_catalog"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Catalogs["test_catalog"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Schemas["test_schema"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Schemas["test_schema"].ModifiedStatus) @@ -376,6 +390,18 @@ func TestStateToBundleModifiedResources(t *testing.T) { }, }, }, + Catalogs: map[string]*resources.Catalog{ + "test_catalog": { + CreateCatalog: catalog.CreateCatalog{ + Name: "test_catalog", + }, + }, + "test_catalog_new": { + CreateCatalog: catalog.CreateCatalog{ + Name: "test_catalog_new", + }, + }, + }, Schemas: map[string]*resources.Schema{ "test_schema": { CreateSchema: catalog.CreateSchema{ @@ -521,6 +547,8 @@ func TestStateToBundleModifiedResources(t *testing.T) { "resources.registered_models.test_registered_model_old": {ID: "2"}, "resources.quality_monitors.test_monitor": {ID: "test_monitor"}, "resources.quality_monitors.test_monitor_old": {ID: "test_monitor_old"}, + "resources.catalogs.test_catalog": {ID: "1"}, + "resources.catalogs.test_catalog_old": {ID: "2"}, "resources.schemas.test_schema": {ID: "1"}, "resources.schemas.test_schema_old": {ID: "2"}, "resources.volumes.test_volume": {ID: "1"}, @@ -592,6 +620,13 @@ func TestStateToBundleModifiedResources(t *testing.T) { assert.Equal(t, "", config.Resources.QualityMonitors["test_monitor_new"].ID) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.QualityMonitors["test_monitor_new"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Catalogs["test_catalog"].ID) + assert.Equal(t, "", config.Resources.Catalogs["test_catalog"].ModifiedStatus) + assert.Equal(t, "2", config.Resources.Catalogs["test_catalog_old"].ID) + assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Catalogs["test_catalog_old"].ModifiedStatus) + assert.Equal(t, "", config.Resources.Catalogs["test_catalog_new"].ID) + assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Catalogs["test_catalog_new"].ModifiedStatus) + assert.Equal(t, "1", config.Resources.Schemas["test_schema"].ID) assert.Equal(t, "", config.Resources.Schemas["test_schema"].ModifiedStatus) assert.Equal(t, "2", config.Resources.Schemas["test_schema_old"].ID) From eb46e0b0ef1b5bc93bdab8a6963e349e74b88ece Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 21 Jan 2026 13:37:35 +0100 Subject: [PATCH 4/5] annotations + coedgen --- bundle/internal/schema/annotations.yml | 53 +++++++++++++ .../validation/generated/enum_fields.go | 2 + .../validation/generated/required_fields.go | 3 + bundle/schema/jsonschema.json | 37 ++++++++- python/codegen/codegen/packages.py | 1 + .../databricks/bundles/catalogs/__init__.py | 34 ++++++++ .../bundles/catalogs/_models/catalog.py | 79 +++++++++++++++++++ .../bundles/catalogs/_models/catalog_grant.py | 40 ++++++++++ .../_models/catalog_grant_privilege.py | 62 +++++++++++++++ .../bundles/catalogs/_models/lifecycle.py | 38 +++++++++ 10 files changed, 348 insertions(+), 1 deletion(-) create mode 100644 python/databricks/bundles/catalogs/__init__.py create mode 100644 python/databricks/bundles/catalogs/_models/catalog.py create mode 100644 python/databricks/bundles/catalogs/_models/catalog_grant.py create mode 100644 python/databricks/bundles/catalogs/_models/catalog_grant_privilege.py create mode 100644 python/databricks/bundles/catalogs/_models/lifecycle.py diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index ec90e99972..478aa11c69 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -572,6 +572,59 @@ github.com/databricks/cli/bundle/config/resources.CatalogGrant: "privileges": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.CatalogGrantPrivilege: + "_": + "enum": + - |- + ALL_PRIVILEGES + - |- + APPLY_TAG + - |- + CREATE_CONNECTION + - |- + CREATE_EXTERNAL_LOCATION + - |- + CREATE_EXTERNAL_TABLE + - |- + CREATE_EXTERNAL_VOLUME + - |- + CREATE_FOREIGN_CATALOG + - |- + CREATE_FUNCTION + - |- + CREATE_MANAGED_STORAGE + - |- + CREATE_MATERIALIZED_VIEW + - |- + CREATE_MODEL + - |- + CREATE_SCHEMA + - |- + CREATE_STORAGE_CREDENTIAL + - |- + CREATE_TABLE + - |- + CREATE_VOLUME + - |- + EXECUTE + - |- + MANAGE + - |- + MODIFY + - |- + READ_VOLUME + - |- + REFRESH + - |- + SELECT + - |- + USE_CATALOG + - |- + USE_CONNECTION + - |- + USE_SCHEMA + - |- + WRITE_VOLUME github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go index ecb042e270..210c17d7fd 100644 --- a/bundle/internal/validation/generated/enum_fields.go +++ b/bundle/internal/validation/generated/enum_fields.go @@ -33,6 +33,8 @@ var EnumFields = map[string][]string{ "resources.apps.*.resources[*].uc_securable.permission": {"EXECUTE", "READ_VOLUME", "SELECT", "USE_CONNECTION", "WRITE_VOLUME"}, "resources.apps.*.resources[*].uc_securable.securable_type": {"CONNECTION", "FUNCTION", "TABLE", "VOLUME"}, + "resources.catalogs.*.grants[*].privileges[*]": {"ALL_PRIVILEGES", "APPLY_TAG", "CREATE_CONNECTION", "CREATE_EXTERNAL_LOCATION", "CREATE_EXTERNAL_TABLE", "CREATE_EXTERNAL_VOLUME", "CREATE_FOREIGN_CATALOG", "CREATE_FUNCTION", "CREATE_MANAGED_STORAGE", "CREATE_MATERIALIZED_VIEW", "CREATE_MODEL", "CREATE_SCHEMA", "CREATE_STORAGE_CREDENTIAL", "CREATE_TABLE", "CREATE_VOLUME", "EXECUTE", "MANAGE", "MODIFY", "READ_VOLUME", "REFRESH", "SELECT", "USE_CATALOG", "USE_CONNECTION", "USE_SCHEMA", "WRITE_VOLUME"}, + "resources.clusters.*.aws_attributes.availability": {"ON_DEMAND", "SPOT", "SPOT_WITH_FALLBACK"}, "resources.clusters.*.aws_attributes.ebs_volume_type": {"GENERAL_PURPOSE_SSD", "THROUGHPUT_OPTIMIZED_HDD"}, "resources.clusters.*.azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go index df5397fc1f..86575e7529 100644 --- a/bundle/internal/validation/generated/required_fields.go +++ b/bundle/internal/validation/generated/required_fields.go @@ -34,6 +34,9 @@ var RequiredFields = map[string][]string{ "resources.apps.*.resources[*].sql_warehouse": {"id", "permission"}, "resources.apps.*.resources[*].uc_securable": {"permission", "securable_full_name", "securable_type"}, + "resources.catalogs.*": {"name"}, + "resources.catalogs.*.grants[*]": {"privileges", "principal"}, + "resources.clusters.*.cluster_log_conf.dbfs": {"destination"}, "resources.clusters.*.cluster_log_conf.s3": {"destination"}, "resources.clusters.*.cluster_log_conf.volumes": {"destination"}, diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 04a0464d5f..39622516b8 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -390,7 +390,42 @@ ] }, "resources.CatalogGrantPrivilege": { - "type": "string" + "oneOf": [ + { + "type": "string", + "enum": [ + "ALL_PRIVILEGES", + "APPLY_TAG", + "CREATE_CONNECTION", + "CREATE_EXTERNAL_LOCATION", + "CREATE_EXTERNAL_TABLE", + "CREATE_EXTERNAL_VOLUME", + "CREATE_FOREIGN_CATALOG", + "CREATE_FUNCTION", + "CREATE_MANAGED_STORAGE", + "CREATE_MATERIALIZED_VIEW", + "CREATE_MODEL", + "CREATE_SCHEMA", + "CREATE_STORAGE_CREDENTIAL", + "CREATE_TABLE", + "CREATE_VOLUME", + "EXECUTE", + "MANAGE", + "MODIFY", + "READ_VOLUME", + "REFRESH", + "SELECT", + "USE_CATALOG", + "USE_CONNECTION", + "USE_SCHEMA", + "WRITE_VOLUME" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] }, "resources.Cluster": { "oneOf": [ diff --git a/python/codegen/codegen/packages.py b/python/codegen/codegen/packages.py index 48fe8270ab..9774911feb 100644 --- a/python/codegen/codegen/packages.py +++ b/python/codegen/codegen/packages.py @@ -5,6 +5,7 @@ RESOURCE_NAMESPACE = { "resources.Job": "jobs", "resources.Pipeline": "pipelines", + "resources.Catalog": "catalogs", "resources.Schema": "schemas", "resources.Volume": "volumes", } diff --git a/python/databricks/bundles/catalogs/__init__.py b/python/databricks/bundles/catalogs/__init__.py new file mode 100644 index 0000000000..b0b79f2dd5 --- /dev/null +++ b/python/databricks/bundles/catalogs/__init__.py @@ -0,0 +1,34 @@ +__all__ = [ + "Catalog", + "CatalogDict", + "CatalogGrant", + "CatalogGrantDict", + "CatalogGrantParam", + "CatalogGrantPrivilege", + "CatalogGrantPrivilegeParam", + "CatalogParam", + "Lifecycle", + "LifecycleDict", + "LifecycleParam", +] + + +from databricks.bundles.catalogs._models.catalog import ( + Catalog, + CatalogDict, + CatalogParam, +) +from databricks.bundles.catalogs._models.catalog_grant import ( + CatalogGrant, + CatalogGrantDict, + CatalogGrantParam, +) +from databricks.bundles.catalogs._models.catalog_grant_privilege import ( + CatalogGrantPrivilege, + CatalogGrantPrivilegeParam, +) +from databricks.bundles.catalogs._models.lifecycle import ( + Lifecycle, + LifecycleDict, + LifecycleParam, +) diff --git a/python/databricks/bundles/catalogs/_models/catalog.py b/python/databricks/bundles/catalogs/_models/catalog.py new file mode 100644 index 0000000000..9d59a29265 --- /dev/null +++ b/python/databricks/bundles/catalogs/_models/catalog.py @@ -0,0 +1,79 @@ +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.catalogs._models.catalog_grant import ( + CatalogGrant, + CatalogGrantParam, +) +from databricks.bundles.catalogs._models.lifecycle import Lifecycle, LifecycleParam +from databricks.bundles.core._resource import Resource +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import ( + VariableOr, + VariableOrDict, + VariableOrList, + VariableOrOptional, +) + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Catalog(Resource): + """""" + + name: VariableOr[str] + + comment: VariableOrOptional[str] = None + + connection_name: VariableOrOptional[str] = None + + grants: VariableOrList[CatalogGrant] = field(default_factory=list) + + lifecycle: VariableOrOptional[Lifecycle] = None + + options: VariableOrDict[str] = field(default_factory=dict) + + properties: VariableOrDict[str] = field(default_factory=dict) + + provider_name: VariableOrOptional[str] = None + + share_name: VariableOrOptional[str] = None + + storage_root: VariableOrOptional[str] = None + + @classmethod + def from_dict(cls, value: "CatalogDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "CatalogDict": + return _transform_to_json_value(self) # type:ignore + + +class CatalogDict(TypedDict, total=False): + """""" + + name: VariableOr[str] + + comment: VariableOrOptional[str] + + connection_name: VariableOrOptional[str] + + grants: VariableOrList[CatalogGrantParam] + + lifecycle: VariableOrOptional[LifecycleParam] + + options: VariableOrDict[str] + + properties: VariableOrDict[str] + + provider_name: VariableOrOptional[str] + + share_name: VariableOrOptional[str] + + storage_root: VariableOrOptional[str] + + +CatalogParam = CatalogDict | Catalog diff --git a/python/databricks/bundles/catalogs/_models/catalog_grant.py b/python/databricks/bundles/catalogs/_models/catalog_grant.py new file mode 100644 index 0000000000..6c0ddebb5e --- /dev/null +++ b/python/databricks/bundles/catalogs/_models/catalog_grant.py @@ -0,0 +1,40 @@ +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.catalogs._models.catalog_grant_privilege import ( + CatalogGrantPrivilege, + CatalogGrantPrivilegeParam, +) +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOr, VariableOrList + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class CatalogGrant: + """""" + + principal: VariableOr[str] + + privileges: VariableOrList[CatalogGrantPrivilege] = field(default_factory=list) + + @classmethod + def from_dict(cls, value: "CatalogGrantDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "CatalogGrantDict": + return _transform_to_json_value(self) # type:ignore + + +class CatalogGrantDict(TypedDict, total=False): + """""" + + principal: VariableOr[str] + + privileges: VariableOrList[CatalogGrantPrivilegeParam] + + +CatalogGrantParam = CatalogGrantDict | CatalogGrant diff --git a/python/databricks/bundles/catalogs/_models/catalog_grant_privilege.py b/python/databricks/bundles/catalogs/_models/catalog_grant_privilege.py new file mode 100644 index 0000000000..97c433876f --- /dev/null +++ b/python/databricks/bundles/catalogs/_models/catalog_grant_privilege.py @@ -0,0 +1,62 @@ +from enum import Enum +from typing import Literal + + +class CatalogGrantPrivilege(Enum): + ALL_PRIVILEGES = "ALL_PRIVILEGES" + APPLY_TAG = "APPLY_TAG" + CREATE_CONNECTION = "CREATE_CONNECTION" + CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" + CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" + CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" + CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" + CREATE_FUNCTION = "CREATE_FUNCTION" + CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" + CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" + CREATE_MODEL = "CREATE_MODEL" + CREATE_SCHEMA = "CREATE_SCHEMA" + CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" + CREATE_TABLE = "CREATE_TABLE" + CREATE_VOLUME = "CREATE_VOLUME" + EXECUTE = "EXECUTE" + MANAGE = "MANAGE" + MODIFY = "MODIFY" + READ_VOLUME = "READ_VOLUME" + REFRESH = "REFRESH" + SELECT = "SELECT" + USE_CATALOG = "USE_CATALOG" + USE_CONNECTION = "USE_CONNECTION" + USE_SCHEMA = "USE_SCHEMA" + WRITE_VOLUME = "WRITE_VOLUME" + + +CatalogGrantPrivilegeParam = ( + Literal[ + "ALL_PRIVILEGES", + "APPLY_TAG", + "CREATE_CONNECTION", + "CREATE_EXTERNAL_LOCATION", + "CREATE_EXTERNAL_TABLE", + "CREATE_EXTERNAL_VOLUME", + "CREATE_FOREIGN_CATALOG", + "CREATE_FUNCTION", + "CREATE_MANAGED_STORAGE", + "CREATE_MATERIALIZED_VIEW", + "CREATE_MODEL", + "CREATE_SCHEMA", + "CREATE_STORAGE_CREDENTIAL", + "CREATE_TABLE", + "CREATE_VOLUME", + "EXECUTE", + "MANAGE", + "MODIFY", + "READ_VOLUME", + "REFRESH", + "SELECT", + "USE_CATALOG", + "USE_CONNECTION", + "USE_SCHEMA", + "WRITE_VOLUME", + ] + | CatalogGrantPrivilege +) diff --git a/python/databricks/bundles/catalogs/_models/lifecycle.py b/python/databricks/bundles/catalogs/_models/lifecycle.py new file mode 100644 index 0000000000..c934967f37 --- /dev/null +++ b/python/databricks/bundles/catalogs/_models/lifecycle.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class Lifecycle: + """""" + + prevent_destroy: VariableOrOptional[bool] = None + """ + Lifecycle setting to prevent the resource from being destroyed. + """ + + @classmethod + def from_dict(cls, value: "LifecycleDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "LifecycleDict": + return _transform_to_json_value(self) # type:ignore + + +class LifecycleDict(TypedDict, total=False): + """""" + + prevent_destroy: VariableOrOptional[bool] + """ + Lifecycle setting to prevent the resource from being destroyed. + """ + + +LifecycleParam = LifecycleDict | Lifecycle From ce3a055cfc5aa95f21a134273414f0f26d8862d3 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 21 Jan 2026 14:43:35 +0100 Subject: [PATCH 5/5] fixed test output --- acceptance/bundle/refschema/out.fields.txt | 46 +++++++++++++ .../resources/grants/catalogs/databricks.yml | 13 ++++ .../catalogs/out.deploy1.requests.direct.json | 18 +++++ .../catalogs/out.deploy2.requests.direct.json | 18 +++++ .../catalogs/out.destroy.requests.direct.json | 0 .../grants/catalogs/out.plan1.direct.json | 42 +++++++++++ .../grants/catalogs/out.plan2.direct.json | 69 +++++++++++++++++++ .../resources/grants/catalogs/out.test.toml | 6 ++ .../resources/grants/catalogs/output.txt | 17 +++-- .../resources/grants/catalogs/test.toml | 2 + bundle/direct/dresources/grants.go | 1 + 11 files changed, 228 insertions(+), 4 deletions(-) create mode 100644 acceptance/bundle/resources/grants/catalogs/databricks.yml create mode 100644 acceptance/bundle/resources/grants/catalogs/out.deploy1.requests.direct.json create mode 100644 acceptance/bundle/resources/grants/catalogs/out.deploy2.requests.direct.json create mode 100644 acceptance/bundle/resources/grants/catalogs/out.destroy.requests.direct.json create mode 100644 acceptance/bundle/resources/grants/catalogs/out.plan1.direct.json create mode 100644 acceptance/bundle/resources/grants/catalogs/out.plan2.direct.json create mode 100644 acceptance/bundle/resources/grants/catalogs/out.test.toml create mode 100644 acceptance/bundle/resources/grants/catalogs/test.toml diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index 52ca8c3802..7638c964a0 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -208,6 +208,52 @@ resources.apps.*.permissions.permissions[*].group_name string ALL resources.apps.*.permissions.permissions[*].permission_level iam.PermissionLevel ALL resources.apps.*.permissions.permissions[*].service_principal_name string ALL resources.apps.*.permissions.permissions[*].user_name string ALL +resources.catalogs.*.browse_only bool REMOTE +resources.catalogs.*.catalog_type catalog.CatalogType REMOTE +resources.catalogs.*.comment string ALL +resources.catalogs.*.connection_name string ALL +resources.catalogs.*.created_at int64 REMOTE +resources.catalogs.*.created_by string REMOTE +resources.catalogs.*.effective_predictive_optimization_flag *catalog.EffectivePredictiveOptimizationFlag REMOTE +resources.catalogs.*.effective_predictive_optimization_flag.inherited_from_name string REMOTE +resources.catalogs.*.effective_predictive_optimization_flag.inherited_from_type catalog.EffectivePredictiveOptimizationFlagInheritedFromType REMOTE +resources.catalogs.*.effective_predictive_optimization_flag.value catalog.EnablePredictiveOptimization REMOTE +resources.catalogs.*.enable_predictive_optimization catalog.EnablePredictiveOptimization REMOTE +resources.catalogs.*.full_name string REMOTE +resources.catalogs.*.grants []resources.CatalogGrant INPUT +resources.catalogs.*.grants[*] resources.CatalogGrant INPUT +resources.catalogs.*.grants[*].principal string INPUT +resources.catalogs.*.grants[*].privileges []resources.CatalogGrantPrivilege INPUT +resources.catalogs.*.grants[*].privileges[*] resources.CatalogGrantPrivilege INPUT +resources.catalogs.*.id string INPUT +resources.catalogs.*.isolation_mode catalog.CatalogIsolationMode REMOTE +resources.catalogs.*.lifecycle resources.Lifecycle INPUT +resources.catalogs.*.lifecycle.prevent_destroy bool INPUT +resources.catalogs.*.metastore_id string REMOTE +resources.catalogs.*.modified_status string INPUT +resources.catalogs.*.name string ALL +resources.catalogs.*.options map[string]string ALL +resources.catalogs.*.options.* string ALL +resources.catalogs.*.owner string REMOTE +resources.catalogs.*.properties map[string]string ALL +resources.catalogs.*.properties.* string ALL +resources.catalogs.*.provider_name string ALL +resources.catalogs.*.provisioning_info *catalog.ProvisioningInfo REMOTE +resources.catalogs.*.provisioning_info.state catalog.ProvisioningInfoState REMOTE +resources.catalogs.*.securable_type catalog.SecurableType REMOTE +resources.catalogs.*.share_name string ALL +resources.catalogs.*.storage_location string REMOTE +resources.catalogs.*.storage_root string ALL +resources.catalogs.*.updated_at int64 REMOTE +resources.catalogs.*.updated_by string REMOTE +resources.catalogs.*.url string INPUT +resources.catalogs.*.grants.full_name string ALL +resources.catalogs.*.grants.grants []dresources.GrantAssignment ALL +resources.catalogs.*.grants.grants[*] dresources.GrantAssignment ALL +resources.catalogs.*.grants.grants[*].principal string ALL +resources.catalogs.*.grants.grants[*].privileges []catalog.Privilege ALL +resources.catalogs.*.grants.grants[*].privileges[*] catalog.Privilege ALL +resources.catalogs.*.grants.securable_type string ALL resources.clusters.*.apply_policy_default_values bool INPUT STATE resources.clusters.*.autoscale *compute.AutoScale ALL resources.clusters.*.autoscale.max_workers int ALL diff --git a/acceptance/bundle/resources/grants/catalogs/databricks.yml b/acceptance/bundle/resources/grants/catalogs/databricks.yml new file mode 100644 index 0000000000..f90139a033 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/databricks.yml @@ -0,0 +1,13 @@ +bundle: + name: catalog-grants-test_cli_12345 + +resources: + catalogs: + grants_catalog: + name: catalog_grants_test_cli_12345 + comment: "Test catalog for grants" + grants: + - principal: deco-test-user@databricks.com + privileges: + - USE_CATALOG + - CREATE_SCHEMA diff --git a/acceptance/bundle/resources/grants/catalogs/out.deploy1.requests.direct.json b/acceptance/bundle/resources/grants/catalogs/out.deploy1.requests.direct.json new file mode 100644 index 0000000000..16f913b8a7 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/out.deploy1.requests.direct.json @@ -0,0 +1,18 @@ +{ + "method": "PATCH", + "path": "/api/2.1/unity-catalog/permissions/catalog/catalog_grants_[UNIQUE_NAME]", + "body": { + "changes": [ + { + "add": [ + "CREATE_SCHEMA", + "USE_CATALOG" + ], + "principal": "deco-test-user@databricks.com", + "remove": [ + "ALL_PRIVILEGES" + ] + } + ] + } +} diff --git a/acceptance/bundle/resources/grants/catalogs/out.deploy2.requests.direct.json b/acceptance/bundle/resources/grants/catalogs/out.deploy2.requests.direct.json new file mode 100644 index 0000000000..47e1335fea --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/out.deploy2.requests.direct.json @@ -0,0 +1,18 @@ +{ + "method": "PATCH", + "path": "/api/2.1/unity-catalog/permissions/catalog/catalog_grants_[UNIQUE_NAME]", + "body": { + "changes": [ + { + "add": [ + "USE_CATALOG", + "USE_SCHEMA" + ], + "principal": "deco-test-user@databricks.com", + "remove": [ + "ALL_PRIVILEGES" + ] + } + ] + } +} diff --git a/acceptance/bundle/resources/grants/catalogs/out.destroy.requests.direct.json b/acceptance/bundle/resources/grants/catalogs/out.destroy.requests.direct.json new file mode 100644 index 0000000000..e69de29bb2 diff --git a/acceptance/bundle/resources/grants/catalogs/out.plan1.direct.json b/acceptance/bundle/resources/grants/catalogs/out.plan1.direct.json new file mode 100644 index 0000000000..1a225b9fb0 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/out.plan1.direct.json @@ -0,0 +1,42 @@ +{ + "plan_version": 2, + "cli_version": "[DEV_VERSION]", + "plan": { + "resources.catalogs.grants_catalog": { + "action": "create", + "new_state": { + "value": { + "comment": "Test catalog for grants", + "name": "catalog_grants_[UNIQUE_NAME]" + } + } + }, + "resources.catalogs.grants_catalog.grants": { + "depends_on": [ + { + "node": "resources.catalogs.grants_catalog", + "label": "${resources.catalogs.grants_catalog.id}" + } + ], + "action": "create", + "new_state": { + "value": { + "securable_type": "catalog", + "full_name": "", + "grants": [ + { + "principal": "deco-test-user@databricks.com", + "privileges": [ + "CREATE_SCHEMA", + "USE_CATALOG" + ] + } + ] + }, + "vars": { + "full_name": "${resources.catalogs.grants_catalog.id}" + } + } + } + } +} diff --git a/acceptance/bundle/resources/grants/catalogs/out.plan2.direct.json b/acceptance/bundle/resources/grants/catalogs/out.plan2.direct.json new file mode 100644 index 0000000000..cb899ab3cf --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/out.plan2.direct.json @@ -0,0 +1,69 @@ +{ + "plan_version": 2, + "cli_version": "[DEV_VERSION]", + "lineage": "[UUID]", + "serial": 1, + "plan": { + "resources.catalogs.grants_catalog": { + "action": "skip", + "remote_state": { + "catalog_type": "MANAGED_CATALOG", + "comment": "Test catalog for grants", + "full_name": "catalog_grants_[UNIQUE_NAME]", + "name": "catalog_grants_[UNIQUE_NAME]", + "owner": "[USERNAME]" + } + }, + "resources.catalogs.grants_catalog.grants": { + "depends_on": [ + { + "node": "resources.catalogs.grants_catalog", + "label": "${resources.catalogs.grants_catalog.id}" + } + ], + "action": "update", + "new_state": { + "value": { + "securable_type": "catalog", + "full_name": "catalog_grants_[UNIQUE_NAME]", + "grants": [ + { + "principal": "deco-test-user@databricks.com", + "privileges": [ + "USE_CATALOG", + "USE_SCHEMA" + ] + } + ] + } + }, + "remote_state": { + "securable_type": "catalog", + "full_name": "catalog_grants_[UNIQUE_NAME]", + "grants": [ + { + "principal": "deco-test-user@databricks.com", + "privileges": [ + "CREATE_SCHEMA", + "USE_CATALOG" + ] + } + ] + }, + "changes": { + "grants[0].privileges[0]": { + "action": "update", + "old": "CREATE_SCHEMA", + "new": "USE_CATALOG", + "remote": "CREATE_SCHEMA" + }, + "grants[0].privileges[1]": { + "action": "update", + "old": "USE_CATALOG", + "new": "USE_SCHEMA", + "remote": "USE_CATALOG" + } + } + } + } +} diff --git a/acceptance/bundle/resources/grants/catalogs/out.test.toml b/acceptance/bundle/resources/grants/catalogs/out.test.toml new file mode 100644 index 0000000000..f1d40380d0 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/out.test.toml @@ -0,0 +1,6 @@ +Local = true +Cloud = true +RequiresUnityCatalog = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/resources/grants/catalogs/output.txt b/acceptance/bundle/resources/grants/catalogs/output.txt index acd8f08c79..067bd10a70 100644 --- a/acceptance/bundle/resources/grants/catalogs/output.txt +++ b/acceptance/bundle/resources/grants/catalogs/output.txt @@ -36,6 +36,18 @@ Plan: 0 to add, 0 to change, 0 to delete, 2 unchanged >>> [CLI] bundle plan -o json >>> print_requests.py --get //permissions +{ + "method": "GET", + "path": "/api/2.1/unity-catalog/permissions/catalog/catalog_grants_[UNIQUE_NAME]" +} +{ + "method": "GET", + "path": "/api/2.1/unity-catalog/permissions/catalog/catalog_grants_[UNIQUE_NAME]" +} +{ + "method": "GET", + "path": "/api/2.1/unity-catalog/permissions/catalog/catalog_grants_[UNIQUE_NAME]" +} >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/catalog-grants-[UNIQUE_NAME]/default/files... @@ -51,7 +63,7 @@ Deployment complete! { "principal": "deco-test-user@databricks.com", "privileges": [ - "CREATE_SCHEMA", + "USE_CATALOG", "USE_SCHEMA" ] } @@ -62,9 +74,6 @@ Deployment complete! The following resources will be deleted: delete resources.catalogs.grants_catalog -This action will result in the deletion of the following UC catalogs. Any underlying data may be lost: - delete resources.catalogs.grants_catalog - All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/catalog-grants-[UNIQUE_NAME]/default Deleting files... diff --git a/acceptance/bundle/resources/grants/catalogs/test.toml b/acceptance/bundle/resources/grants/catalogs/test.toml new file mode 100644 index 0000000000..f32a753074 --- /dev/null +++ b/acceptance/bundle/resources/grants/catalogs/test.toml @@ -0,0 +1,2 @@ +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/bundle/direct/dresources/grants.go b/bundle/direct/dresources/grants.go index 1f6b8678e5..dcdc882e0c 100644 --- a/bundle/direct/dresources/grants.go +++ b/bundle/direct/dresources/grants.go @@ -14,6 +14,7 @@ import ( ) var grantResourceToSecurableType = map[string]string{ + "catalogs": "catalog", "schemas": "schema", "volumes": "volume", "registered_models": "function",