diff --git a/provider/cmd/pulumi-resource-databricks/bridge-metadata.json b/provider/cmd/pulumi-resource-databricks/bridge-metadata.json index 41dad9c1..092c8100 100644 --- a/provider/cmd/pulumi-resource-databricks/bridge-metadata.json +++ b/provider/cmd/pulumi-resource-databricks/bridge-metadata.json @@ -1015,6 +1015,9 @@ } } } + }, + "tags": { + "maxItemsOne": false } } }, @@ -3358,6 +3361,9 @@ "clientSecret": "client_secret", "directoryId": "directory_id" }, + "databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount": { + "credentialId": "credential_id" + }, "databricks:index/MetastoreDataAccessGcpServiceAccountKey:MetastoreDataAccessGcpServiceAccountKey": { "privateKey": "private_key", "privateKeyId": "private_key_id" @@ -3621,6 +3627,9 @@ "clientSecret": "client_secret", "directoryId": "directory_id" }, + "databricks:index/StorageCredentialDatabricksGcpServiceAccount:StorageCredentialDatabricksGcpServiceAccount": { + "credentialId": "credential_id" + }, "databricks:index/StorageCredentialGcpServiceAccountKey:StorageCredentialGcpServiceAccountKey": { "privateKey": "private_key", "privateKeyId": "private_key_id" @@ -4645,11 +4654,12 @@ "awsIamRole": "aws_iam_role", "azureManagedIdentity": "azure_managed_identity", "azureServicePrincipal": "azure_service_principal", - "configurationType": "configuration_type", "databricksGcpServiceAccount": "databricks_gcp_service_account", + "forceDestroy": "force_destroy", "gcpServiceAccountKey": "gcp_service_account_key", "isDefault": "is_default", - "metastoreId": "metastore_id" + "metastoreId": "metastore_id", + "readOnly": "read_only" }, "databricks:index/metastoreProvider:MetastoreProvider": { "authenticationType": "authentication_type", @@ -4665,6 +4675,7 @@ "databricks:index/mlflowModel:MlflowModel": { "creationTimestamp": "creation_timestamp", "lastUpdatedTimestamp": "last_updated_timestamp", + "registeredModelId": "registered_model_id", "userId": "user_id" }, "databricks:index/mlflowWebhook:MlflowWebhook": { diff --git a/provider/cmd/pulumi-resource-databricks/schema.json b/provider/cmd/pulumi-resource-databricks/schema.json index c0869284..cfc32608 100644 --- a/provider/cmd/pulumi-resource-databricks/schema.json +++ b/provider/cmd/pulumi-resource-databricks/schema.json @@ -3450,19 +3450,15 @@ }, "databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount": { "properties": { + "credentialId": { + "type": "string" + }, "email": { "type": "string", "description": "The email of the GCP service account created, to be granted access to relevant buckets.\n\n`azure_service_principal` optional configuration block for credential details for Azure (Legacy):\n" } }, - "type": "object", - "language": { - "nodejs": { - "requiredOutputs": [ - "email" - ] - } - } + "type": "object" }, "databricks:index/MetastoreDataAccessGcpServiceAccountKey:MetastoreDataAccessGcpServiceAccountKey": { "properties": { @@ -3651,6 +3647,20 @@ "trafficPercentage" ] }, + "databricks:index/ModelServingTag:ModelServingTag": { + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "key" + ] + }, "databricks:index/MountAbfs:MountAbfs": { "properties": { "clientId": { @@ -5297,7 +5307,7 @@ }, "managedIdentityId": { "type": "string", - "description": "The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`.\n\n`azure_service_principal` optional configuration block to use service principal as credential details for Azure:\n" + "description": "The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`.\n\n`databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account:\n" } }, "type": "object", @@ -5321,7 +5331,7 @@ }, "clientSecret": { "type": "string", - "description": "The client secret generated for the above app ID in AAD. **This field is redacted on output**\n\n`databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account:\n" + "description": "The client secret generated for the above app ID in AAD. **This field is redacted on output**\n" }, "directoryId": { "type": "string", @@ -5337,19 +5347,15 @@ }, "databricks:index/StorageCredentialDatabricksGcpServiceAccount:StorageCredentialDatabricksGcpServiceAccount": { "properties": { + "credentialId": { + "type": "string" + }, "email": { "type": "string", "description": "The email of the GCP service account created, to be granted access to relevant buckets.\n" } }, - "type": "object", - "language": { - "nodejs": { - "requiredOutputs": [ - "email" - ] - } - } + "type": "object" }, "databricks:index/StorageCredentialGcpServiceAccountKey:StorageCredentialGcpServiceAccountKey": { "properties": { @@ -9568,7 +9574,8 @@ "type": "array", "items": { "$ref": "#/types/databricks:index/ClusterClusterMountInfo:ClusterClusterMountInfo" - } + }, + "deprecationMessage": "cluster_mount_info block is deprecated due the Clusters API changes." }, "clusterName": { "type": "string", @@ -9733,7 +9740,8 @@ "type": "array", "items": { "$ref": "#/types/databricks:index/ClusterClusterMountInfo:ClusterClusterMountInfo" - } + }, + "deprecationMessage": "cluster_mount_info block is deprecated due the Clusters API changes." }, "clusterName": { "type": "string", @@ -9878,7 +9886,8 @@ "type": "array", "items": { "$ref": "#/types/databricks:index/ClusterClusterMountInfo:ClusterClusterMountInfo" - } + }, + "deprecationMessage": "cluster_mount_info block is deprecated due the Clusters API changes." }, "clusterName": { "type": "string", @@ -12458,12 +12467,16 @@ "azureServicePrincipal": { "$ref": "#/types/databricks:index/MetastoreDataAccessAzureServicePrincipal:MetastoreDataAccessAzureServicePrincipal" }, - "configurationType": { + "comment": { "type": "string" }, "databricksGcpServiceAccount": { "$ref": "#/types/databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount" }, + "forceDestroy": { + "type": "boolean", + "description": "Delete the data access configuration regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" + }, "gcpServiceAccountKey": { "$ref": "#/types/databricks:index/MetastoreDataAccessGcpServiceAccountKey:MetastoreDataAccessGcpServiceAccountKey" }, @@ -12472,17 +12485,25 @@ }, "metastoreId": { "type": "string", - "description": "Unique identifier of the parent Metastore\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" + "description": "Unique identifier of the parent Metastore\n" }, "name": { "type": "string", "description": "Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource.\n" + }, + "owner": { + "type": "string", + "description": "Username/groupname/sp application_id of the data access configuration owner.\n" + }, + "readOnly": { + "type": "boolean" } }, "required": [ - "configurationType", + "databricksGcpServiceAccount", "metastoreId", - "name" + "name", + "owner" ], "inputProperties": { "awsIamRole": { @@ -12497,11 +12518,16 @@ "$ref": "#/types/databricks:index/MetastoreDataAccessAzureServicePrincipal:MetastoreDataAccessAzureServicePrincipal", "willReplaceOnChanges": true }, - "configurationType": { - "type": "string" + "comment": { + "type": "string", + "willReplaceOnChanges": true }, "databricksGcpServiceAccount": { - "$ref": "#/types/databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount", + "$ref": "#/types/databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount" + }, + "forceDestroy": { + "type": "boolean", + "description": "Delete the data access configuration regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n", "willReplaceOnChanges": true }, "gcpServiceAccountKey": { @@ -12514,13 +12540,21 @@ }, "metastoreId": { "type": "string", - "description": "Unique identifier of the parent Metastore\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n", + "description": "Unique identifier of the parent Metastore\n", "willReplaceOnChanges": true }, "name": { "type": "string", "description": "Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource.\n", "willReplaceOnChanges": true + }, + "owner": { + "type": "string", + "description": "Username/groupname/sp application_id of the data access configuration owner.\n" + }, + "readOnly": { + "type": "boolean", + "willReplaceOnChanges": true } }, "requiredInputs": [ @@ -12541,11 +12575,16 @@ "$ref": "#/types/databricks:index/MetastoreDataAccessAzureServicePrincipal:MetastoreDataAccessAzureServicePrincipal", "willReplaceOnChanges": true }, - "configurationType": { - "type": "string" + "comment": { + "type": "string", + "willReplaceOnChanges": true }, "databricksGcpServiceAccount": { - "$ref": "#/types/databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount", + "$ref": "#/types/databricks:index/MetastoreDataAccessDatabricksGcpServiceAccount:MetastoreDataAccessDatabricksGcpServiceAccount" + }, + "forceDestroy": { + "type": "boolean", + "description": "Delete the data access configuration regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n", "willReplaceOnChanges": true }, "gcpServiceAccountKey": { @@ -12558,13 +12597,21 @@ }, "metastoreId": { "type": "string", - "description": "Unique identifier of the parent Metastore\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n", + "description": "Unique identifier of the parent Metastore\n", "willReplaceOnChanges": true }, "name": { "type": "string", "description": "Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource.\n", "willReplaceOnChanges": true + }, + "owner": { + "type": "string", + "description": "Username/groupname/sp application_id of the data access configuration owner.\n" + }, + "readOnly": { + "type": "boolean", + "willReplaceOnChanges": true } }, "type": "object" @@ -12756,6 +12803,9 @@ "type": "string", "description": "Name of MLflow model. Change of name triggers new resource.\n" }, + "registeredModelId": { + "type": "string" + }, "tags": { "type": "array", "items": { @@ -12768,7 +12818,8 @@ } }, "required": [ - "name" + "name", + "registeredModelId" ], "inputProperties": { "creationTimestamp": { @@ -12815,6 +12866,9 @@ "description": "Name of MLflow model. Change of name triggers new resource.\n", "willReplaceOnChanges": true }, + "registeredModelId": { + "type": "string" + }, "tags": { "type": "array", "items": { @@ -12937,6 +12991,12 @@ "servingEndpointId": { "type": "string", "description": "Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations.\n" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/types/databricks:index/ModelServingTag:ModelServingTag" + } } }, "required": [ @@ -12953,6 +13013,12 @@ "type": "string", "description": "The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name.\n", "willReplaceOnChanges": true + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/types/databricks:index/ModelServingTag:ModelServingTag" + } } }, "requiredInputs": [ @@ -12973,6 +13039,12 @@ "servingEndpointId": { "type": "string", "description": "Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations.\n" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/types/databricks:index/ModelServingTag:ModelServingTag" + } } }, "type": "object" @@ -17489,7 +17561,8 @@ "$ref": "#/types/databricks:index/StorageCredentialDatabricksGcpServiceAccount:StorageCredentialDatabricksGcpServiceAccount" }, "forceDestroy": { - "type": "boolean" + "type": "boolean", + "description": "Delete storage credential regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" }, "gcpServiceAccountKey": { "$ref": "#/types/databricks:index/StorageCredentialGcpServiceAccountKey:StorageCredentialGcpServiceAccountKey" @@ -17504,11 +17577,11 @@ }, "owner": { "type": "string", - "description": "Username/groupname/sp application_id of the storage credential owner.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" + "description": "Username/groupname/sp application_id of the storage credential owner.\n" }, "readOnly": { "type": "boolean", - "description": "Indicates whether the storage credential is only usable for read operations.\n" + "description": "Indicates whether the storage credential is only usable for read operations.\n\n`azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy):\n" } }, "required": [ @@ -17534,7 +17607,8 @@ "$ref": "#/types/databricks:index/StorageCredentialDatabricksGcpServiceAccount:StorageCredentialDatabricksGcpServiceAccount" }, "forceDestroy": { - "type": "boolean" + "type": "boolean", + "description": "Delete storage credential regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" }, "gcpServiceAccountKey": { "$ref": "#/types/databricks:index/StorageCredentialGcpServiceAccountKey:StorageCredentialGcpServiceAccountKey" @@ -17550,11 +17624,11 @@ }, "owner": { "type": "string", - "description": "Username/groupname/sp application_id of the storage credential owner.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" + "description": "Username/groupname/sp application_id of the storage credential owner.\n" }, "readOnly": { "type": "boolean", - "description": "Indicates whether the storage credential is only usable for read operations.\n" + "description": "Indicates whether the storage credential is only usable for read operations.\n\n`azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy):\n" } }, "stateInputs": { @@ -17576,7 +17650,8 @@ "$ref": "#/types/databricks:index/StorageCredentialDatabricksGcpServiceAccount:StorageCredentialDatabricksGcpServiceAccount" }, "forceDestroy": { - "type": "boolean" + "type": "boolean", + "description": "Delete storage credential regardless of its dependencies.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" }, "gcpServiceAccountKey": { "$ref": "#/types/databricks:index/StorageCredentialGcpServiceAccountKey:StorageCredentialGcpServiceAccountKey" @@ -17592,11 +17667,11 @@ }, "owner": { "type": "string", - "description": "Username/groupname/sp application_id of the storage credential owner.\n\n`aws_iam_role` optional configuration block for credential details for AWS:\n" + "description": "Username/groupname/sp application_id of the storage credential owner.\n" }, "readOnly": { "type": "boolean", - "description": "Indicates whether the storage credential is only usable for read operations.\n" + "description": "Indicates whether the storage credential is only usable for read operations.\n\n`azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy):\n" } }, "type": "object" diff --git a/provider/go.mod b/provider/go.mod index 632b01f8..7eb8c5eb 100644 --- a/provider/go.mod +++ b/provider/go.mod @@ -5,9 +5,9 @@ go 1.21 replace github.com/hashicorp/terraform-plugin-sdk/v2 => github.com/pulumi/terraform-plugin-sdk/v2 v2.0.0-20230912190043-e6d96b3b8f7e require ( - github.com/databricks/databricks-sdk-go v0.19.2 - github.com/databricks/terraform-provider-databricks v1.26.0 - github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.0 + github.com/databricks/databricks-sdk-go v0.20.0 + github.com/databricks/terraform-provider-databricks v1.27.0 + github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.1 ) require ( @@ -99,7 +99,7 @@ require ( github.com/google/go-querystring v1.1.0 // indirect github.com/google/s2a-go v0.1.7 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect - github.com/google/uuid v1.3.0 // indirect + github.com/google/uuid v1.3.1 // indirect github.com/google/wire v0.5.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.2.5 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect @@ -236,11 +236,11 @@ require ( golang.org/x/time v0.3.0 // indirect golang.org/x/tools v0.13.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect - google.golang.org/api v0.138.0 // indirect + google.golang.org/api v0.140.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230911183012-2d3300fd4832 // indirect google.golang.org/grpc v1.58.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect diff --git a/provider/go.sum b/provider/go.sum index ba9bc2dc..b92a8b6b 100644 --- a/provider/go.sum +++ b/provider/go.sum @@ -1154,10 +1154,10 @@ github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1S github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= -github.com/databricks/databricks-sdk-go v0.19.2 h1:5a8q8U8iWVj5TU8e4909xPmbqHs+iV7v/MtUbprDqCQ= -github.com/databricks/databricks-sdk-go v0.19.2/go.mod h1:Bt/3i3ry/rQdE6Y+psvkAENlp+LzJHaQK5PsLIstQb4= -github.com/databricks/terraform-provider-databricks v1.26.0 h1:nXGsit+dNNtOfygy599BT1w5IkTTr1wu0WzeBLKlwbs= -github.com/databricks/terraform-provider-databricks v1.26.0/go.mod h1:4A+x/uiO6CuqigxO+4VoJyh9YeDxnjQbuoaXa4Hl2YY= +github.com/databricks/databricks-sdk-go v0.20.0 h1:KyVQkvyFYgIlTBjQbazAtW/Y5tZaHy2pF5DRpC35s0s= +github.com/databricks/databricks-sdk-go v0.20.0/go.mod h1:COiklTN3IdieazXcs4TnMou5GQFwIM7uhMGrz7nEAAk= +github.com/databricks/terraform-provider-databricks v1.27.0 h1:XWscImzq4y3b/NgSBPCG7LzqbpNPC8dgXXjId8f8LqI= +github.com/databricks/terraform-provider-databricks v1.27.0/go.mod h1:NBVipn/lRiFh9CB6Y72EU8o8EUuXNUfogT2dnzi1OXs= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -1562,8 +1562,9 @@ github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= +github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= @@ -2342,8 +2343,8 @@ github.com/pulumi/pulumi-java/pkg v0.9.6 h1:UJrOAsYHRchwb4QlfI9Q224qg1TOI3rIsI6D github.com/pulumi/pulumi-java/pkg v0.9.6/go.mod h1:c6rSw/+q4O0IImgJ9axxoC6QesbPYWBaG5gimbHouUQ= github.com/pulumi/pulumi-terraform-bridge/testing v0.0.1 h1:SCg1gjfY9N4yn8U8peIUYATifjoDABkyR7H9lmefsfc= github.com/pulumi/pulumi-terraform-bridge/testing v0.0.1/go.mod h1:7OeUPH8rpt5ipyj9EFcnXpuzQ8SHL0dyqdfa8nOacdk= -github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.0 h1:MPhSwNLJJlqLFHGfrXIRXZHzFIu05YLQldAJRYpOHRs= -github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.0/go.mod h1:o0Vfch2UXtHOnGYpNElzGg4htT6B8X8hS9fa5AguP7g= +github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.1 h1:+W2JHLi4y+G57jLPLJbDLv1xvm/9L2NO0gWXrtR8MDM= +github.com/pulumi/pulumi-terraform-bridge/v3 v3.60.1/go.mod h1:o0Vfch2UXtHOnGYpNElzGg4htT6B8X8hS9fa5AguP7g= github.com/pulumi/pulumi-terraform-bridge/x/muxer v0.0.4 h1:rIzMmtcVpPX8ynaz6/nW5AHNY63DiNfCohqmxWvMpM4= github.com/pulumi/pulumi-terraform-bridge/x/muxer v0.0.4/go.mod h1:Kt8RIZWa/N8rW3+0g6NrqCBmF3o+HuIhFaZpssEkG6w= github.com/pulumi/pulumi-yaml v1.2.2 h1:W6BeUBLhDrJ2GSU0em1AUVelG9PBI4ABY61DdhJOO3E= @@ -3408,8 +3409,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/ google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= -google.golang.org/api v0.138.0 h1:K/tVp05MxNVbHShRw9m7e9VJGdagNeTdMzqPH7AUqr0= -google.golang.org/api v0.138.0/go.mod h1:4xyob8CxC+0GChNBvEUAk8VBKNvYOTWM9T3v3UfRxuY= +google.golang.org/api v0.140.0 h1:CaXNdYOH5oQQI7l6iKTHHiMTdxZca4/02hRg2U8c2hM= +google.golang.org/api v0.140.0/go.mod h1:aGbCiFgtwb2P6badchFbSBUurV6oR5d50Af4iNJtDdI= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -3583,8 +3584,8 @@ google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 h1: google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5/go.mod h1:5DZzOUPCLYL3mNkQ0ms0F3EuUNZ7py1Bqeq6sxzI7/Q= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234015-3fc162c6f38a/go.mod h1:xURIpW9ES5+/GZhnV6beoEtxQrnkRGIfP5VQG2tCBLc= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d h1:uvYuEyMHKNt+lT4K3bN6fGswmK8qSvcreM3BwjDh+y4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230911183012-2d3300fd4832 h1:o4LtQxebKIJ4vkzyhtD2rfUNZ20Zf0ik5YVP5E7G7VE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230911183012-2d3300fd4832/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= diff --git a/sdk/dotnet/Cluster.cs b/sdk/dotnet/Cluster.cs index 52fbb8ae..3e31800e 100644 --- a/sdk/dotnet/Cluster.cs +++ b/sdk/dotnet/Cluster.cs @@ -309,6 +309,7 @@ public sealed class ClusterArgs : global::Pulumi.ResourceArgs [Input("clusterMountInfos")] private InputList? _clusterMountInfos; + [Obsolete(@"cluster_mount_info block is deprecated due the Clusters API changes.")] public InputList ClusterMountInfos { get => _clusterMountInfos ?? (_clusterMountInfos = new InputList()); @@ -554,6 +555,7 @@ public sealed class ClusterState : global::Pulumi.ResourceArgs [Input("clusterMountInfos")] private InputList? _clusterMountInfos; + [Obsolete(@"cluster_mount_info block is deprecated due the Clusters API changes.")] public InputList ClusterMountInfos { get => _clusterMountInfos ?? (_clusterMountInfos = new InputList()); diff --git a/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.cs b/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.cs index 13f60e7c..6239b841 100644 --- a/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.cs +++ b/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.cs @@ -12,6 +12,9 @@ namespace Pulumi.Databricks.Inputs public sealed class MetastoreDataAccessDatabricksGcpServiceAccountArgs : global::Pulumi.ResourceArgs { + [Input("credentialId")] + public Input? CredentialId { get; set; } + /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// diff --git a/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountGetArgs.cs b/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountGetArgs.cs index 42fb2b70..4ce373e8 100644 --- a/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountGetArgs.cs +++ b/sdk/dotnet/Inputs/MetastoreDataAccessDatabricksGcpServiceAccountGetArgs.cs @@ -12,6 +12,9 @@ namespace Pulumi.Databricks.Inputs public sealed class MetastoreDataAccessDatabricksGcpServiceAccountGetArgs : global::Pulumi.ResourceArgs { + [Input("credentialId")] + public Input? CredentialId { get; set; } + /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// diff --git a/sdk/dotnet/Inputs/ModelServingTagArgs.cs b/sdk/dotnet/Inputs/ModelServingTagArgs.cs new file mode 100644 index 00000000..4cbb2ad4 --- /dev/null +++ b/sdk/dotnet/Inputs/ModelServingTagArgs.cs @@ -0,0 +1,26 @@ +// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** +// *** Do not edit by hand unless you're certain you know what you are doing! *** + +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Threading.Tasks; +using Pulumi.Serialization; + +namespace Pulumi.Databricks.Inputs +{ + + public sealed class ModelServingTagArgs : global::Pulumi.ResourceArgs + { + [Input("key", required: true)] + public Input Key { get; set; } = null!; + + [Input("value")] + public Input? Value { get; set; } + + public ModelServingTagArgs() + { + } + public static new ModelServingTagArgs Empty => new ModelServingTagArgs(); + } +} diff --git a/sdk/dotnet/Inputs/ModelServingTagGetArgs.cs b/sdk/dotnet/Inputs/ModelServingTagGetArgs.cs new file mode 100644 index 00000000..b06c7003 --- /dev/null +++ b/sdk/dotnet/Inputs/ModelServingTagGetArgs.cs @@ -0,0 +1,26 @@ +// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** +// *** Do not edit by hand unless you're certain you know what you are doing! *** + +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Threading.Tasks; +using Pulumi.Serialization; + +namespace Pulumi.Databricks.Inputs +{ + + public sealed class ModelServingTagGetArgs : global::Pulumi.ResourceArgs + { + [Input("key", required: true)] + public Input Key { get; set; } = null!; + + [Input("value")] + public Input? Value { get; set; } + + public ModelServingTagGetArgs() + { + } + public static new ModelServingTagGetArgs Empty => new ModelServingTagGetArgs(); + } +} diff --git a/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityArgs.cs b/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityArgs.cs index 9fb937bb..1b01edd2 100644 --- a/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityArgs.cs @@ -24,7 +24,7 @@ public sealed class StorageCredentialAzureManagedIdentityArgs : global::Pulumi.R /// /// The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. /// - /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// [Input("managedIdentityId")] public Input? ManagedIdentityId { get; set; } diff --git a/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityGetArgs.cs b/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityGetArgs.cs index 43dfebd5..f5516929 100644 --- a/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityGetArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialAzureManagedIdentityGetArgs.cs @@ -24,7 +24,7 @@ public sealed class StorageCredentialAzureManagedIdentityGetArgs : global::Pulum /// /// The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. /// - /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// [Input("managedIdentityId")] public Input? ManagedIdentityId { get; set; } diff --git a/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalArgs.cs b/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalArgs.cs index a7b955f4..6dc1a6f7 100644 --- a/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalArgs.cs @@ -20,8 +20,6 @@ public sealed class StorageCredentialAzureServicePrincipalArgs : global::Pulumi. /// /// The client secret generated for the above app ID in AAD. **This field is redacted on output** - /// - /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// [Input("clientSecret", required: true)] public Input ClientSecret { get; set; } = null!; diff --git a/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalGetArgs.cs b/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalGetArgs.cs index 292e548a..a2dc9523 100644 --- a/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalGetArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialAzureServicePrincipalGetArgs.cs @@ -20,8 +20,6 @@ public sealed class StorageCredentialAzureServicePrincipalGetArgs : global::Pulu /// /// The client secret generated for the above app ID in AAD. **This field is redacted on output** - /// - /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// [Input("clientSecret", required: true)] public Input ClientSecret { get; set; } = null!; diff --git a/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountArgs.cs b/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountArgs.cs index 20129f3f..baabf0df 100644 --- a/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountArgs.cs @@ -12,6 +12,9 @@ namespace Pulumi.Databricks.Inputs public sealed class StorageCredentialDatabricksGcpServiceAccountArgs : global::Pulumi.ResourceArgs { + [Input("credentialId")] + public Input? CredentialId { get; set; } + /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// diff --git a/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountGetArgs.cs b/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountGetArgs.cs index 8e366206..d1b7ce36 100644 --- a/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountGetArgs.cs +++ b/sdk/dotnet/Inputs/StorageCredentialDatabricksGcpServiceAccountGetArgs.cs @@ -12,6 +12,9 @@ namespace Pulumi.Databricks.Inputs public sealed class StorageCredentialDatabricksGcpServiceAccountGetArgs : global::Pulumi.ResourceArgs { + [Input("credentialId")] + public Input? CredentialId { get; set; } + /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// diff --git a/sdk/dotnet/MetastoreDataAccess.cs b/sdk/dotnet/MetastoreDataAccess.cs index 8be65f39..3e015194 100644 --- a/sdk/dotnet/MetastoreDataAccess.cs +++ b/sdk/dotnet/MetastoreDataAccess.cs @@ -32,11 +32,19 @@ public partial class MetastoreDataAccess : global::Pulumi.CustomResource [Output("azureServicePrincipal")] public Output AzureServicePrincipal { get; private set; } = null!; - [Output("configurationType")] - public Output ConfigurationType { get; private set; } = null!; + [Output("comment")] + public Output Comment { get; private set; } = null!; [Output("databricksGcpServiceAccount")] - public Output DatabricksGcpServiceAccount { get; private set; } = null!; + public Output DatabricksGcpServiceAccount { get; private set; } = null!; + + /// + /// Delete the data access configuration regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// + [Output("forceDestroy")] + public Output ForceDestroy { get; private set; } = null!; [Output("gcpServiceAccountKey")] public Output GcpServiceAccountKey { get; private set; } = null!; @@ -46,8 +54,6 @@ public partial class MetastoreDataAccess : global::Pulumi.CustomResource /// /// Unique identifier of the parent Metastore - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Output("metastoreId")] public Output MetastoreId { get; private set; } = null!; @@ -58,6 +64,15 @@ public partial class MetastoreDataAccess : global::Pulumi.CustomResource [Output("name")] public Output Name { get; private set; } = null!; + /// + /// Username/groupname/sp application_id of the data access configuration owner. + /// + [Output("owner")] + public Output Owner { get; private set; } = null!; + + [Output("readOnly")] + public Output ReadOnly { get; private set; } = null!; + /// /// Create a MetastoreDataAccess resource with the given unique name, arguments, and options. @@ -113,12 +128,20 @@ public sealed class MetastoreDataAccessArgs : global::Pulumi.ResourceArgs [Input("azureServicePrincipal")] public Input? AzureServicePrincipal { get; set; } - [Input("configurationType")] - public Input? ConfigurationType { get; set; } + [Input("comment")] + public Input? Comment { get; set; } [Input("databricksGcpServiceAccount")] public Input? DatabricksGcpServiceAccount { get; set; } + /// + /// Delete the data access configuration regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// + [Input("forceDestroy")] + public Input? ForceDestroy { get; set; } + [Input("gcpServiceAccountKey")] public Input? GcpServiceAccountKey { get; set; } @@ -127,8 +150,6 @@ public sealed class MetastoreDataAccessArgs : global::Pulumi.ResourceArgs /// /// Unique identifier of the parent Metastore - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Input("metastoreId", required: true)] public Input MetastoreId { get; set; } = null!; @@ -139,6 +160,15 @@ public sealed class MetastoreDataAccessArgs : global::Pulumi.ResourceArgs [Input("name")] public Input? Name { get; set; } + /// + /// Username/groupname/sp application_id of the data access configuration owner. + /// + [Input("owner")] + public Input? Owner { get; set; } + + [Input("readOnly")] + public Input? ReadOnly { get; set; } + public MetastoreDataAccessArgs() { } @@ -156,12 +186,20 @@ public sealed class MetastoreDataAccessState : global::Pulumi.ResourceArgs [Input("azureServicePrincipal")] public Input? AzureServicePrincipal { get; set; } - [Input("configurationType")] - public Input? ConfigurationType { get; set; } + [Input("comment")] + public Input? Comment { get; set; } [Input("databricksGcpServiceAccount")] public Input? DatabricksGcpServiceAccount { get; set; } + /// + /// Delete the data access configuration regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// + [Input("forceDestroy")] + public Input? ForceDestroy { get; set; } + [Input("gcpServiceAccountKey")] public Input? GcpServiceAccountKey { get; set; } @@ -170,8 +208,6 @@ public sealed class MetastoreDataAccessState : global::Pulumi.ResourceArgs /// /// Unique identifier of the parent Metastore - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Input("metastoreId")] public Input? MetastoreId { get; set; } @@ -182,6 +218,15 @@ public sealed class MetastoreDataAccessState : global::Pulumi.ResourceArgs [Input("name")] public Input? Name { get; set; } + /// + /// Username/groupname/sp application_id of the data access configuration owner. + /// + [Input("owner")] + public Input? Owner { get; set; } + + [Input("readOnly")] + public Input? ReadOnly { get; set; } + public MetastoreDataAccessState() { } diff --git a/sdk/dotnet/MlflowModel.cs b/sdk/dotnet/MlflowModel.cs index c182a2b1..766bb975 100644 --- a/sdk/dotnet/MlflowModel.cs +++ b/sdk/dotnet/MlflowModel.cs @@ -87,6 +87,9 @@ public partial class MlflowModel : global::Pulumi.CustomResource [Output("name")] public Output Name { get; private set; } = null!; + [Output("registeredModelId")] + public Output RegisteredModelId { get; private set; } = null!; + /// /// Tags for the MLflow model. /// @@ -201,6 +204,9 @@ public sealed class MlflowModelState : global::Pulumi.ResourceArgs [Input("name")] public Input? Name { get; set; } + [Input("registeredModelId")] + public Input? RegisteredModelId { get; set; } + [Input("tags")] private InputList? _tags; diff --git a/sdk/dotnet/ModelServing.cs b/sdk/dotnet/ModelServing.cs index 4e1b8256..df7d6add 100644 --- a/sdk/dotnet/ModelServing.cs +++ b/sdk/dotnet/ModelServing.cs @@ -110,6 +110,9 @@ public partial class ModelServing : global::Pulumi.CustomResource [Output("servingEndpointId")] public Output ServingEndpointId { get; private set; } = null!; + [Output("tags")] + public Output> Tags { get; private set; } = null!; + /// /// Create a ModelServing resource with the given unique name, arguments, and options. @@ -168,6 +171,14 @@ public sealed class ModelServingArgs : global::Pulumi.ResourceArgs [Input("name")] public Input? Name { get; set; } + [Input("tags")] + private InputList? _tags; + public InputList Tags + { + get => _tags ?? (_tags = new InputList()); + set => _tags = value; + } + public ModelServingArgs() { } @@ -194,6 +205,14 @@ public sealed class ModelServingState : global::Pulumi.ResourceArgs [Input("servingEndpointId")] public Input? ServingEndpointId { get; set; } + [Input("tags")] + private InputList? _tags; + public InputList Tags + { + get => _tags ?? (_tags = new InputList()); + set => _tags = value; + } + public ModelServingState() { } diff --git a/sdk/dotnet/Outputs/MetastoreDataAccessDatabricksGcpServiceAccount.cs b/sdk/dotnet/Outputs/MetastoreDataAccessDatabricksGcpServiceAccount.cs index 8aaab037..3e12059a 100644 --- a/sdk/dotnet/Outputs/MetastoreDataAccessDatabricksGcpServiceAccount.cs +++ b/sdk/dotnet/Outputs/MetastoreDataAccessDatabricksGcpServiceAccount.cs @@ -13,6 +13,7 @@ namespace Pulumi.Databricks.Outputs [OutputType] public sealed class MetastoreDataAccessDatabricksGcpServiceAccount { + public readonly string? CredentialId; /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// @@ -21,8 +22,12 @@ public sealed class MetastoreDataAccessDatabricksGcpServiceAccount public readonly string? Email; [OutputConstructor] - private MetastoreDataAccessDatabricksGcpServiceAccount(string? email) + private MetastoreDataAccessDatabricksGcpServiceAccount( + string? credentialId, + + string? email) { + CredentialId = credentialId; Email = email; } } diff --git a/sdk/dotnet/Outputs/ModelServingTag.cs b/sdk/dotnet/Outputs/ModelServingTag.cs new file mode 100644 index 00000000..123989a1 --- /dev/null +++ b/sdk/dotnet/Outputs/ModelServingTag.cs @@ -0,0 +1,29 @@ +// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** +// *** Do not edit by hand unless you're certain you know what you are doing! *** + +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Threading.Tasks; +using Pulumi.Serialization; + +namespace Pulumi.Databricks.Outputs +{ + + [OutputType] + public sealed class ModelServingTag + { + public readonly string Key; + public readonly string? Value; + + [OutputConstructor] + private ModelServingTag( + string key, + + string? value) + { + Key = key; + Value = value; + } + } +} diff --git a/sdk/dotnet/Outputs/StorageCredentialAzureManagedIdentity.cs b/sdk/dotnet/Outputs/StorageCredentialAzureManagedIdentity.cs index 8bec7e49..a72cd024 100644 --- a/sdk/dotnet/Outputs/StorageCredentialAzureManagedIdentity.cs +++ b/sdk/dotnet/Outputs/StorageCredentialAzureManagedIdentity.cs @@ -21,7 +21,7 @@ public sealed class StorageCredentialAzureManagedIdentity /// /// The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. /// - /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// public readonly string? ManagedIdentityId; diff --git a/sdk/dotnet/Outputs/StorageCredentialAzureServicePrincipal.cs b/sdk/dotnet/Outputs/StorageCredentialAzureServicePrincipal.cs index 977c0733..fc749400 100644 --- a/sdk/dotnet/Outputs/StorageCredentialAzureServicePrincipal.cs +++ b/sdk/dotnet/Outputs/StorageCredentialAzureServicePrincipal.cs @@ -19,8 +19,6 @@ public sealed class StorageCredentialAzureServicePrincipal public readonly string ApplicationId; /// /// The client secret generated for the above app ID in AAD. **This field is redacted on output** - /// - /// `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: /// public readonly string ClientSecret; /// diff --git a/sdk/dotnet/Outputs/StorageCredentialDatabricksGcpServiceAccount.cs b/sdk/dotnet/Outputs/StorageCredentialDatabricksGcpServiceAccount.cs index bc8b6257..71d52f42 100644 --- a/sdk/dotnet/Outputs/StorageCredentialDatabricksGcpServiceAccount.cs +++ b/sdk/dotnet/Outputs/StorageCredentialDatabricksGcpServiceAccount.cs @@ -13,14 +13,19 @@ namespace Pulumi.Databricks.Outputs [OutputType] public sealed class StorageCredentialDatabricksGcpServiceAccount { + public readonly string? CredentialId; /// /// The email of the GCP service account created, to be granted access to relevant buckets. /// public readonly string? Email; [OutputConstructor] - private StorageCredentialDatabricksGcpServiceAccount(string? email) + private StorageCredentialDatabricksGcpServiceAccount( + string? credentialId, + + string? email) { + CredentialId = credentialId; Email = email; } } diff --git a/sdk/dotnet/StorageCredential.cs b/sdk/dotnet/StorageCredential.cs index 1a4e9900..657df983 100644 --- a/sdk/dotnet/StorageCredential.cs +++ b/sdk/dotnet/StorageCredential.cs @@ -153,6 +153,11 @@ public partial class StorageCredential : global::Pulumi.CustomResource [Output("databricksGcpServiceAccount")] public Output DatabricksGcpServiceAccount { get; private set; } = null!; + /// + /// Delete storage credential regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// [Output("forceDestroy")] public Output ForceDestroy { get; private set; } = null!; @@ -173,14 +178,14 @@ public partial class StorageCredential : global::Pulumi.CustomResource /// /// Username/groupname/sp application_id of the storage credential owner. - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Output("owner")] public Output Owner { get; private set; } = null!; /// /// Indicates whether the storage credential is only usable for read operations. + /// + /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): /// [Output("readOnly")] public Output ReadOnly { get; private set; } = null!; @@ -246,6 +251,11 @@ public sealed class StorageCredentialArgs : global::Pulumi.ResourceArgs [Input("databricksGcpServiceAccount")] public Input? DatabricksGcpServiceAccount { get; set; } + /// + /// Delete storage credential regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// [Input("forceDestroy")] public Input? ForceDestroy { get; set; } @@ -266,14 +276,14 @@ public sealed class StorageCredentialArgs : global::Pulumi.ResourceArgs /// /// Username/groupname/sp application_id of the storage credential owner. - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Input("owner")] public Input? Owner { get; set; } /// /// Indicates whether the storage credential is only usable for read operations. + /// + /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): /// [Input("readOnly")] public Input? ReadOnly { get; set; } @@ -301,6 +311,11 @@ public sealed class StorageCredentialState : global::Pulumi.ResourceArgs [Input("databricksGcpServiceAccount")] public Input? DatabricksGcpServiceAccount { get; set; } + /// + /// Delete storage credential regardless of its dependencies. + /// + /// `aws_iam_role` optional configuration block for credential details for AWS: + /// [Input("forceDestroy")] public Input? ForceDestroy { get; set; } @@ -321,14 +336,14 @@ public sealed class StorageCredentialState : global::Pulumi.ResourceArgs /// /// Username/groupname/sp application_id of the storage credential owner. - /// - /// `aws_iam_role` optional configuration block for credential details for AWS: /// [Input("owner")] public Input? Owner { get; set; } /// /// Indicates whether the storage credential is only usable for read operations. + /// + /// `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): /// [Input("readOnly")] public Input? ReadOnly { get; set; } diff --git a/sdk/go/databricks/cluster.go b/sdk/go/databricks/cluster.go index 811418cc..e254b234 100644 --- a/sdk/go/databricks/cluster.go +++ b/sdk/go/databricks/cluster.go @@ -29,12 +29,13 @@ type Cluster struct { ApplyPolicyDefaultValues pulumi.BoolPtrOutput `pulumi:"applyPolicyDefaultValues"` Autoscale ClusterAutoscalePtrOutput `pulumi:"autoscale"` // Automatically terminate the cluster after being inactive for this time in minutes. If specified, the threshold must be between 10 and 10000 minutes. You can also set this value to 0 to explicitly disable automatic termination. Defaults to `60`. *We highly recommend having this setting present for Interactive/BI clusters.* - AutoterminationMinutes pulumi.IntPtrOutput `pulumi:"autoterminationMinutes"` - AwsAttributes ClusterAwsAttributesPtrOutput `pulumi:"awsAttributes"` - AzureAttributes ClusterAzureAttributesPtrOutput `pulumi:"azureAttributes"` - ClusterId pulumi.StringOutput `pulumi:"clusterId"` - ClusterLogConf ClusterClusterLogConfPtrOutput `pulumi:"clusterLogConf"` - ClusterMountInfos ClusterClusterMountInfoArrayOutput `pulumi:"clusterMountInfos"` + AutoterminationMinutes pulumi.IntPtrOutput `pulumi:"autoterminationMinutes"` + AwsAttributes ClusterAwsAttributesPtrOutput `pulumi:"awsAttributes"` + AzureAttributes ClusterAzureAttributesPtrOutput `pulumi:"azureAttributes"` + ClusterId pulumi.StringOutput `pulumi:"clusterId"` + ClusterLogConf ClusterClusterLogConfPtrOutput `pulumi:"clusterLogConf"` + // Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. + ClusterMountInfos ClusterClusterMountInfoArrayOutput `pulumi:"clusterMountInfos"` // Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. ClusterName pulumi.StringPtrOutput `pulumi:"clusterName"` // Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `defaultTags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. @@ -168,12 +169,13 @@ type clusterState struct { ApplyPolicyDefaultValues *bool `pulumi:"applyPolicyDefaultValues"` Autoscale *ClusterAutoscale `pulumi:"autoscale"` // Automatically terminate the cluster after being inactive for this time in minutes. If specified, the threshold must be between 10 and 10000 minutes. You can also set this value to 0 to explicitly disable automatic termination. Defaults to `60`. *We highly recommend having this setting present for Interactive/BI clusters.* - AutoterminationMinutes *int `pulumi:"autoterminationMinutes"` - AwsAttributes *ClusterAwsAttributes `pulumi:"awsAttributes"` - AzureAttributes *ClusterAzureAttributes `pulumi:"azureAttributes"` - ClusterId *string `pulumi:"clusterId"` - ClusterLogConf *ClusterClusterLogConf `pulumi:"clusterLogConf"` - ClusterMountInfos []ClusterClusterMountInfo `pulumi:"clusterMountInfos"` + AutoterminationMinutes *int `pulumi:"autoterminationMinutes"` + AwsAttributes *ClusterAwsAttributes `pulumi:"awsAttributes"` + AzureAttributes *ClusterAzureAttributes `pulumi:"azureAttributes"` + ClusterId *string `pulumi:"clusterId"` + ClusterLogConf *ClusterClusterLogConf `pulumi:"clusterLogConf"` + // Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. + ClusterMountInfos []ClusterClusterMountInfo `pulumi:"clusterMountInfos"` // Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. ClusterName *string `pulumi:"clusterName"` // Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `defaultTags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. @@ -280,7 +282,8 @@ type ClusterState struct { AzureAttributes ClusterAzureAttributesPtrInput ClusterId pulumi.StringPtrInput ClusterLogConf ClusterClusterLogConfPtrInput - ClusterMountInfos ClusterClusterMountInfoArrayInput + // Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. + ClusterMountInfos ClusterClusterMountInfoArrayInput // Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. ClusterName pulumi.StringPtrInput // Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `defaultTags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. @@ -386,12 +389,13 @@ type clusterArgs struct { ApplyPolicyDefaultValues *bool `pulumi:"applyPolicyDefaultValues"` Autoscale *ClusterAutoscale `pulumi:"autoscale"` // Automatically terminate the cluster after being inactive for this time in minutes. If specified, the threshold must be between 10 and 10000 minutes. You can also set this value to 0 to explicitly disable automatic termination. Defaults to `60`. *We highly recommend having this setting present for Interactive/BI clusters.* - AutoterminationMinutes *int `pulumi:"autoterminationMinutes"` - AwsAttributes *ClusterAwsAttributes `pulumi:"awsAttributes"` - AzureAttributes *ClusterAzureAttributes `pulumi:"azureAttributes"` - ClusterId *string `pulumi:"clusterId"` - ClusterLogConf *ClusterClusterLogConf `pulumi:"clusterLogConf"` - ClusterMountInfos []ClusterClusterMountInfo `pulumi:"clusterMountInfos"` + AutoterminationMinutes *int `pulumi:"autoterminationMinutes"` + AwsAttributes *ClusterAwsAttributes `pulumi:"awsAttributes"` + AzureAttributes *ClusterAzureAttributes `pulumi:"azureAttributes"` + ClusterId *string `pulumi:"clusterId"` + ClusterLogConf *ClusterClusterLogConf `pulumi:"clusterLogConf"` + // Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. + ClusterMountInfos []ClusterClusterMountInfo `pulumi:"clusterMountInfos"` // Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. ClusterName *string `pulumi:"clusterName"` // Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `defaultTags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. @@ -494,7 +498,8 @@ type ClusterArgs struct { AzureAttributes ClusterAzureAttributesPtrInput ClusterId pulumi.StringPtrInput ClusterLogConf ClusterClusterLogConfPtrInput - ClusterMountInfos ClusterClusterMountInfoArrayInput + // Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. + ClusterMountInfos ClusterClusterMountInfoArrayInput // Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. ClusterName pulumi.StringPtrInput // Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `defaultTags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. @@ -727,6 +732,7 @@ func (o ClusterOutput) ClusterLogConf() ClusterClusterLogConfPtrOutput { return o.ApplyT(func(v *Cluster) ClusterClusterLogConfPtrOutput { return v.ClusterLogConf }).(ClusterClusterLogConfPtrOutput) } +// Deprecated: cluster_mount_info block is deprecated due the Clusters API changes. func (o ClusterOutput) ClusterMountInfos() ClusterClusterMountInfoArrayOutput { return o.ApplyT(func(v *Cluster) ClusterClusterMountInfoArrayOutput { return v.ClusterMountInfos }).(ClusterClusterMountInfoArrayOutput) } diff --git a/sdk/go/databricks/metastoreDataAccess.go b/sdk/go/databricks/metastoreDataAccess.go index 473820e4..fa6c57af 100644 --- a/sdk/go/databricks/metastoreDataAccess.go +++ b/sdk/go/databricks/metastoreDataAccess.go @@ -27,19 +27,24 @@ import ( type MetastoreDataAccess struct { pulumi.CustomResourceState - AwsIamRole MetastoreDataAccessAwsIamRolePtrOutput `pulumi:"awsIamRole"` - AzureManagedIdentity MetastoreDataAccessAzureManagedIdentityPtrOutput `pulumi:"azureManagedIdentity"` - AzureServicePrincipal MetastoreDataAccessAzureServicePrincipalPtrOutput `pulumi:"azureServicePrincipal"` - ConfigurationType pulumi.StringOutput `pulumi:"configurationType"` - DatabricksGcpServiceAccount MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput `pulumi:"databricksGcpServiceAccount"` - GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrOutput `pulumi:"gcpServiceAccountKey"` - IsDefault pulumi.BoolPtrOutput `pulumi:"isDefault"` - // Unique identifier of the parent Metastore + AwsIamRole MetastoreDataAccessAwsIamRolePtrOutput `pulumi:"awsIamRole"` + AzureManagedIdentity MetastoreDataAccessAzureManagedIdentityPtrOutput `pulumi:"azureManagedIdentity"` + AzureServicePrincipal MetastoreDataAccessAzureServicePrincipalPtrOutput `pulumi:"azureServicePrincipal"` + Comment pulumi.StringPtrOutput `pulumi:"comment"` + DatabricksGcpServiceAccount MetastoreDataAccessDatabricksGcpServiceAccountOutput `pulumi:"databricksGcpServiceAccount"` + // Delete the data access configuration regardless of its dependencies. // // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrOutput `pulumi:"forceDestroy"` + GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrOutput `pulumi:"gcpServiceAccountKey"` + IsDefault pulumi.BoolPtrOutput `pulumi:"isDefault"` + // Unique identifier of the parent Metastore MetastoreId pulumi.StringOutput `pulumi:"metastoreId"` // Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringOutput `pulumi:"name"` + // Username/groupname/sp applicationId of the data access configuration owner. + Owner pulumi.StringOutput `pulumi:"owner"` + ReadOnly pulumi.BoolPtrOutput `pulumi:"readOnly"` } // NewMetastoreDataAccess registers a new resource with the given unique name, arguments, and options. @@ -78,32 +83,42 @@ type metastoreDataAccessState struct { AwsIamRole *MetastoreDataAccessAwsIamRole `pulumi:"awsIamRole"` AzureManagedIdentity *MetastoreDataAccessAzureManagedIdentity `pulumi:"azureManagedIdentity"` AzureServicePrincipal *MetastoreDataAccessAzureServicePrincipal `pulumi:"azureServicePrincipal"` - ConfigurationType *string `pulumi:"configurationType"` + Comment *string `pulumi:"comment"` DatabricksGcpServiceAccount *MetastoreDataAccessDatabricksGcpServiceAccount `pulumi:"databricksGcpServiceAccount"` - GcpServiceAccountKey *MetastoreDataAccessGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` - IsDefault *bool `pulumi:"isDefault"` - // Unique identifier of the parent Metastore + // Delete the data access configuration regardless of its dependencies. // // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy *bool `pulumi:"forceDestroy"` + GcpServiceAccountKey *MetastoreDataAccessGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` + IsDefault *bool `pulumi:"isDefault"` + // Unique identifier of the parent Metastore MetastoreId *string `pulumi:"metastoreId"` // Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name *string `pulumi:"name"` + // Username/groupname/sp applicationId of the data access configuration owner. + Owner *string `pulumi:"owner"` + ReadOnly *bool `pulumi:"readOnly"` } type MetastoreDataAccessState struct { AwsIamRole MetastoreDataAccessAwsIamRolePtrInput AzureManagedIdentity MetastoreDataAccessAzureManagedIdentityPtrInput AzureServicePrincipal MetastoreDataAccessAzureServicePrincipalPtrInput - ConfigurationType pulumi.StringPtrInput + Comment pulumi.StringPtrInput DatabricksGcpServiceAccount MetastoreDataAccessDatabricksGcpServiceAccountPtrInput - GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrInput - IsDefault pulumi.BoolPtrInput - // Unique identifier of the parent Metastore + // Delete the data access configuration regardless of its dependencies. // // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrInput + GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrInput + IsDefault pulumi.BoolPtrInput + // Unique identifier of the parent Metastore MetastoreId pulumi.StringPtrInput // Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringPtrInput + // Username/groupname/sp applicationId of the data access configuration owner. + Owner pulumi.StringPtrInput + ReadOnly pulumi.BoolPtrInput } func (MetastoreDataAccessState) ElementType() reflect.Type { @@ -114,16 +129,21 @@ type metastoreDataAccessArgs struct { AwsIamRole *MetastoreDataAccessAwsIamRole `pulumi:"awsIamRole"` AzureManagedIdentity *MetastoreDataAccessAzureManagedIdentity `pulumi:"azureManagedIdentity"` AzureServicePrincipal *MetastoreDataAccessAzureServicePrincipal `pulumi:"azureServicePrincipal"` - ConfigurationType *string `pulumi:"configurationType"` + Comment *string `pulumi:"comment"` DatabricksGcpServiceAccount *MetastoreDataAccessDatabricksGcpServiceAccount `pulumi:"databricksGcpServiceAccount"` - GcpServiceAccountKey *MetastoreDataAccessGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` - IsDefault *bool `pulumi:"isDefault"` - // Unique identifier of the parent Metastore + // Delete the data access configuration regardless of its dependencies. // // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy *bool `pulumi:"forceDestroy"` + GcpServiceAccountKey *MetastoreDataAccessGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` + IsDefault *bool `pulumi:"isDefault"` + // Unique identifier of the parent Metastore MetastoreId string `pulumi:"metastoreId"` // Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name *string `pulumi:"name"` + // Username/groupname/sp applicationId of the data access configuration owner. + Owner *string `pulumi:"owner"` + ReadOnly *bool `pulumi:"readOnly"` } // The set of arguments for constructing a MetastoreDataAccess resource. @@ -131,16 +151,21 @@ type MetastoreDataAccessArgs struct { AwsIamRole MetastoreDataAccessAwsIamRolePtrInput AzureManagedIdentity MetastoreDataAccessAzureManagedIdentityPtrInput AzureServicePrincipal MetastoreDataAccessAzureServicePrincipalPtrInput - ConfigurationType pulumi.StringPtrInput + Comment pulumi.StringPtrInput DatabricksGcpServiceAccount MetastoreDataAccessDatabricksGcpServiceAccountPtrInput - GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrInput - IsDefault pulumi.BoolPtrInput - // Unique identifier of the parent Metastore + // Delete the data access configuration regardless of its dependencies. // // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrInput + GcpServiceAccountKey MetastoreDataAccessGcpServiceAccountKeyPtrInput + IsDefault pulumi.BoolPtrInput + // Unique identifier of the parent Metastore MetastoreId pulumi.StringInput // Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringPtrInput + // Username/groupname/sp applicationId of the data access configuration owner. + Owner pulumi.StringPtrInput + ReadOnly pulumi.BoolPtrInput } func (MetastoreDataAccessArgs) ElementType() reflect.Type { @@ -270,14 +295,21 @@ func (o MetastoreDataAccessOutput) AzureServicePrincipal() MetastoreDataAccessAz }).(MetastoreDataAccessAzureServicePrincipalPtrOutput) } -func (o MetastoreDataAccessOutput) ConfigurationType() pulumi.StringOutput { - return o.ApplyT(func(v *MetastoreDataAccess) pulumi.StringOutput { return v.ConfigurationType }).(pulumi.StringOutput) +func (o MetastoreDataAccessOutput) Comment() pulumi.StringPtrOutput { + return o.ApplyT(func(v *MetastoreDataAccess) pulumi.StringPtrOutput { return v.Comment }).(pulumi.StringPtrOutput) } -func (o MetastoreDataAccessOutput) DatabricksGcpServiceAccount() MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput { - return o.ApplyT(func(v *MetastoreDataAccess) MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput { +func (o MetastoreDataAccessOutput) DatabricksGcpServiceAccount() MetastoreDataAccessDatabricksGcpServiceAccountOutput { + return o.ApplyT(func(v *MetastoreDataAccess) MetastoreDataAccessDatabricksGcpServiceAccountOutput { return v.DatabricksGcpServiceAccount - }).(MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput) + }).(MetastoreDataAccessDatabricksGcpServiceAccountOutput) +} + +// Delete the data access configuration regardless of its dependencies. +// +// `awsIamRole` optional configuration block for credential details for AWS: +func (o MetastoreDataAccessOutput) ForceDestroy() pulumi.BoolPtrOutput { + return o.ApplyT(func(v *MetastoreDataAccess) pulumi.BoolPtrOutput { return v.ForceDestroy }).(pulumi.BoolPtrOutput) } func (o MetastoreDataAccessOutput) GcpServiceAccountKey() MetastoreDataAccessGcpServiceAccountKeyPtrOutput { @@ -291,8 +323,6 @@ func (o MetastoreDataAccessOutput) IsDefault() pulumi.BoolPtrOutput { } // Unique identifier of the parent Metastore -// -// `awsIamRole` optional configuration block for credential details for AWS: func (o MetastoreDataAccessOutput) MetastoreId() pulumi.StringOutput { return o.ApplyT(func(v *MetastoreDataAccess) pulumi.StringOutput { return v.MetastoreId }).(pulumi.StringOutput) } @@ -302,6 +332,15 @@ func (o MetastoreDataAccessOutput) Name() pulumi.StringOutput { return o.ApplyT(func(v *MetastoreDataAccess) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput) } +// Username/groupname/sp applicationId of the data access configuration owner. +func (o MetastoreDataAccessOutput) Owner() pulumi.StringOutput { + return o.ApplyT(func(v *MetastoreDataAccess) pulumi.StringOutput { return v.Owner }).(pulumi.StringOutput) +} + +func (o MetastoreDataAccessOutput) ReadOnly() pulumi.BoolPtrOutput { + return o.ApplyT(func(v *MetastoreDataAccess) pulumi.BoolPtrOutput { return v.ReadOnly }).(pulumi.BoolPtrOutput) +} + type MetastoreDataAccessArrayOutput struct{ *pulumi.OutputState } func (MetastoreDataAccessArrayOutput) ElementType() reflect.Type { diff --git a/sdk/go/databricks/mlflowModel.go b/sdk/go/databricks/mlflowModel.go index 9d2646dd..0f699fe3 100644 --- a/sdk/go/databricks/mlflowModel.go +++ b/sdk/go/databricks/mlflowModel.go @@ -82,7 +82,8 @@ type MlflowModel struct { Description pulumi.StringPtrOutput `pulumi:"description"` LastUpdatedTimestamp pulumi.IntPtrOutput `pulumi:"lastUpdatedTimestamp"` // Name of MLflow model. Change of name triggers new resource. - Name pulumi.StringOutput `pulumi:"name"` + Name pulumi.StringOutput `pulumi:"name"` + RegisteredModelId pulumi.StringOutput `pulumi:"registeredModelId"` // Tags for the MLflow model. Tags MlflowModelTagArrayOutput `pulumi:"tags"` UserId pulumi.StringPtrOutput `pulumi:"userId"` @@ -123,7 +124,8 @@ type mlflowModelState struct { Description *string `pulumi:"description"` LastUpdatedTimestamp *int `pulumi:"lastUpdatedTimestamp"` // Name of MLflow model. Change of name triggers new resource. - Name *string `pulumi:"name"` + Name *string `pulumi:"name"` + RegisteredModelId *string `pulumi:"registeredModelId"` // Tags for the MLflow model. Tags []MlflowModelTag `pulumi:"tags"` UserId *string `pulumi:"userId"` @@ -135,7 +137,8 @@ type MlflowModelState struct { Description pulumi.StringPtrInput LastUpdatedTimestamp pulumi.IntPtrInput // Name of MLflow model. Change of name triggers new resource. - Name pulumi.StringPtrInput + Name pulumi.StringPtrInput + RegisteredModelId pulumi.StringPtrInput // Tags for the MLflow model. Tags MlflowModelTagArrayInput UserId pulumi.StringPtrInput @@ -299,6 +302,10 @@ func (o MlflowModelOutput) Name() pulumi.StringOutput { return o.ApplyT(func(v *MlflowModel) pulumi.StringOutput { return v.Name }).(pulumi.StringOutput) } +func (o MlflowModelOutput) RegisteredModelId() pulumi.StringOutput { + return o.ApplyT(func(v *MlflowModel) pulumi.StringOutput { return v.RegisteredModelId }).(pulumi.StringOutput) +} + // Tags for the MLflow model. func (o MlflowModelOutput) Tags() MlflowModelTagArrayOutput { return o.ApplyT(func(v *MlflowModel) MlflowModelTagArrayOutput { return v.Tags }).(MlflowModelTagArrayOutput) diff --git a/sdk/go/databricks/modelServing.go b/sdk/go/databricks/modelServing.go index c5386d01..44b5628f 100644 --- a/sdk/go/databricks/modelServing.go +++ b/sdk/go/databricks/modelServing.go @@ -101,7 +101,8 @@ type ModelServing struct { // The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name. Name pulumi.StringOutput `pulumi:"name"` // Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations. - ServingEndpointId pulumi.StringOutput `pulumi:"servingEndpointId"` + ServingEndpointId pulumi.StringOutput `pulumi:"servingEndpointId"` + Tags ModelServingTagArrayOutput `pulumi:"tags"` } // NewModelServing registers a new resource with the given unique name, arguments, and options. @@ -142,7 +143,8 @@ type modelServingState struct { // The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name. Name *string `pulumi:"name"` // Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations. - ServingEndpointId *string `pulumi:"servingEndpointId"` + ServingEndpointId *string `pulumi:"servingEndpointId"` + Tags []ModelServingTag `pulumi:"tags"` } type ModelServingState struct { @@ -152,6 +154,7 @@ type ModelServingState struct { Name pulumi.StringPtrInput // Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations. ServingEndpointId pulumi.StringPtrInput + Tags ModelServingTagArrayInput } func (ModelServingState) ElementType() reflect.Type { @@ -162,7 +165,8 @@ type modelServingArgs struct { // The model serving endpoint configuration. Config ModelServingConfig `pulumi:"config"` // The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name. - Name *string `pulumi:"name"` + Name *string `pulumi:"name"` + Tags []ModelServingTag `pulumi:"tags"` } // The set of arguments for constructing a ModelServing resource. @@ -171,6 +175,7 @@ type ModelServingArgs struct { Config ModelServingConfigInput // The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name. Name pulumi.StringPtrInput + Tags ModelServingTagArrayInput } func (ModelServingArgs) ElementType() reflect.Type { @@ -299,6 +304,10 @@ func (o ModelServingOutput) ServingEndpointId() pulumi.StringOutput { return o.ApplyT(func(v *ModelServing) pulumi.StringOutput { return v.ServingEndpointId }).(pulumi.StringOutput) } +func (o ModelServingOutput) Tags() ModelServingTagArrayOutput { + return o.ApplyT(func(v *ModelServing) ModelServingTagArrayOutput { return v.Tags }).(ModelServingTagArrayOutput) +} + type ModelServingArrayOutput struct{ *pulumi.OutputState } func (ModelServingArrayOutput) ElementType() reflect.Type { diff --git a/sdk/go/databricks/pulumiTypes.go b/sdk/go/databricks/pulumiTypes.go index ef006916..7c6c658e 100644 --- a/sdk/go/databricks/pulumiTypes.go +++ b/sdk/go/databricks/pulumiTypes.go @@ -32375,6 +32375,7 @@ func (o MetastoreDataAccessAzureServicePrincipalPtrOutput) DirectoryId() pulumi. } type MetastoreDataAccessDatabricksGcpServiceAccount struct { + CredentialId *string `pulumi:"credentialId"` // The email of the GCP service account created, to be granted access to relevant buckets. // // `azureServicePrincipal` optional configuration block for credential details for Azure (Legacy): @@ -32393,6 +32394,7 @@ type MetastoreDataAccessDatabricksGcpServiceAccountInput interface { } type MetastoreDataAccessDatabricksGcpServiceAccountArgs struct { + CredentialId pulumi.StringPtrInput `pulumi:"credentialId"` // The email of the GCP service account created, to be granted access to relevant buckets. // // `azureServicePrincipal` optional configuration block for credential details for Azure (Legacy): @@ -32494,6 +32496,10 @@ func (o MetastoreDataAccessDatabricksGcpServiceAccountOutput) ToOutput(ctx conte } } +func (o MetastoreDataAccessDatabricksGcpServiceAccountOutput) CredentialId() pulumi.StringPtrOutput { + return o.ApplyT(func(v MetastoreDataAccessDatabricksGcpServiceAccount) *string { return v.CredentialId }).(pulumi.StringPtrOutput) +} + // The email of the GCP service account created, to be granted access to relevant buckets. // // `azureServicePrincipal` optional configuration block for credential details for Azure (Legacy): @@ -32531,6 +32537,15 @@ func (o MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput) Elem() Metastor }).(MetastoreDataAccessDatabricksGcpServiceAccountOutput) } +func (o MetastoreDataAccessDatabricksGcpServiceAccountPtrOutput) CredentialId() pulumi.StringPtrOutput { + return o.ApplyT(func(v *MetastoreDataAccessDatabricksGcpServiceAccount) *string { + if v == nil { + return nil + } + return v.CredentialId + }).(pulumi.StringPtrOutput) +} + // The email of the GCP service account created, to be granted access to relevant buckets. // // `azureServicePrincipal` optional configuration block for credential details for Azure (Legacy): @@ -33929,6 +33944,130 @@ func (o ModelServingConfigTrafficConfigRouteArrayOutput) Index(i pulumi.IntInput }).(ModelServingConfigTrafficConfigRouteOutput) } +type ModelServingTag struct { + Key string `pulumi:"key"` + Value *string `pulumi:"value"` +} + +// ModelServingTagInput is an input type that accepts ModelServingTagArgs and ModelServingTagOutput values. +// You can construct a concrete instance of `ModelServingTagInput` via: +// +// ModelServingTagArgs{...} +type ModelServingTagInput interface { + pulumi.Input + + ToModelServingTagOutput() ModelServingTagOutput + ToModelServingTagOutputWithContext(context.Context) ModelServingTagOutput +} + +type ModelServingTagArgs struct { + Key pulumi.StringInput `pulumi:"key"` + Value pulumi.StringPtrInput `pulumi:"value"` +} + +func (ModelServingTagArgs) ElementType() reflect.Type { + return reflect.TypeOf((*ModelServingTag)(nil)).Elem() +} + +func (i ModelServingTagArgs) ToModelServingTagOutput() ModelServingTagOutput { + return i.ToModelServingTagOutputWithContext(context.Background()) +} + +func (i ModelServingTagArgs) ToModelServingTagOutputWithContext(ctx context.Context) ModelServingTagOutput { + return pulumi.ToOutputWithContext(ctx, i).(ModelServingTagOutput) +} + +func (i ModelServingTagArgs) ToOutput(ctx context.Context) pulumix.Output[ModelServingTag] { + return pulumix.Output[ModelServingTag]{ + OutputState: i.ToModelServingTagOutputWithContext(ctx).OutputState, + } +} + +// ModelServingTagArrayInput is an input type that accepts ModelServingTagArray and ModelServingTagArrayOutput values. +// You can construct a concrete instance of `ModelServingTagArrayInput` via: +// +// ModelServingTagArray{ ModelServingTagArgs{...} } +type ModelServingTagArrayInput interface { + pulumi.Input + + ToModelServingTagArrayOutput() ModelServingTagArrayOutput + ToModelServingTagArrayOutputWithContext(context.Context) ModelServingTagArrayOutput +} + +type ModelServingTagArray []ModelServingTagInput + +func (ModelServingTagArray) ElementType() reflect.Type { + return reflect.TypeOf((*[]ModelServingTag)(nil)).Elem() +} + +func (i ModelServingTagArray) ToModelServingTagArrayOutput() ModelServingTagArrayOutput { + return i.ToModelServingTagArrayOutputWithContext(context.Background()) +} + +func (i ModelServingTagArray) ToModelServingTagArrayOutputWithContext(ctx context.Context) ModelServingTagArrayOutput { + return pulumi.ToOutputWithContext(ctx, i).(ModelServingTagArrayOutput) +} + +func (i ModelServingTagArray) ToOutput(ctx context.Context) pulumix.Output[[]ModelServingTag] { + return pulumix.Output[[]ModelServingTag]{ + OutputState: i.ToModelServingTagArrayOutputWithContext(ctx).OutputState, + } +} + +type ModelServingTagOutput struct{ *pulumi.OutputState } + +func (ModelServingTagOutput) ElementType() reflect.Type { + return reflect.TypeOf((*ModelServingTag)(nil)).Elem() +} + +func (o ModelServingTagOutput) ToModelServingTagOutput() ModelServingTagOutput { + return o +} + +func (o ModelServingTagOutput) ToModelServingTagOutputWithContext(ctx context.Context) ModelServingTagOutput { + return o +} + +func (o ModelServingTagOutput) ToOutput(ctx context.Context) pulumix.Output[ModelServingTag] { + return pulumix.Output[ModelServingTag]{ + OutputState: o.OutputState, + } +} + +func (o ModelServingTagOutput) Key() pulumi.StringOutput { + return o.ApplyT(func(v ModelServingTag) string { return v.Key }).(pulumi.StringOutput) +} + +func (o ModelServingTagOutput) Value() pulumi.StringPtrOutput { + return o.ApplyT(func(v ModelServingTag) *string { return v.Value }).(pulumi.StringPtrOutput) +} + +type ModelServingTagArrayOutput struct{ *pulumi.OutputState } + +func (ModelServingTagArrayOutput) ElementType() reflect.Type { + return reflect.TypeOf((*[]ModelServingTag)(nil)).Elem() +} + +func (o ModelServingTagArrayOutput) ToModelServingTagArrayOutput() ModelServingTagArrayOutput { + return o +} + +func (o ModelServingTagArrayOutput) ToModelServingTagArrayOutputWithContext(ctx context.Context) ModelServingTagArrayOutput { + return o +} + +func (o ModelServingTagArrayOutput) ToOutput(ctx context.Context) pulumix.Output[[]ModelServingTag] { + return pulumix.Output[[]ModelServingTag]{ + OutputState: o.OutputState, + } +} + +func (o ModelServingTagArrayOutput) Index(i pulumi.IntInput) ModelServingTagOutput { + return pulumi.All(o, i).ApplyT(func(vs []interface{}) ModelServingTag { + return vs[0].([]ModelServingTag)[vs[1].(int)] + }).(ModelServingTagOutput) +} + type MountAbfs struct { ClientId string `pulumi:"clientId"` ClientSecretKey string `pulumi:"clientSecretKey"` @@ -47747,7 +47886,7 @@ type StorageCredentialAzureManagedIdentity struct { CredentialId *string `pulumi:"credentialId"` // The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. // - // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: + // `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: ManagedIdentityId *string `pulumi:"managedIdentityId"` } @@ -47768,7 +47907,7 @@ type StorageCredentialAzureManagedIdentityArgs struct { CredentialId pulumi.StringPtrInput `pulumi:"credentialId"` // The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. // - // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: + // `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: ManagedIdentityId pulumi.StringPtrInput `pulumi:"managedIdentityId"` } @@ -47878,7 +48017,7 @@ func (o StorageCredentialAzureManagedIdentityOutput) CredentialId() pulumi.Strin // The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. // -// `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: +// `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: func (o StorageCredentialAzureManagedIdentityOutput) ManagedIdentityId() pulumi.StringPtrOutput { return o.ApplyT(func(v StorageCredentialAzureManagedIdentity) *string { return v.ManagedIdentityId }).(pulumi.StringPtrOutput) } @@ -47934,7 +48073,7 @@ func (o StorageCredentialAzureManagedIdentityPtrOutput) CredentialId() pulumi.St // The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. // -// `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: +// `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: func (o StorageCredentialAzureManagedIdentityPtrOutput) ManagedIdentityId() pulumi.StringPtrOutput { return o.ApplyT(func(v *StorageCredentialAzureManagedIdentity) *string { if v == nil { @@ -47948,8 +48087,6 @@ type StorageCredentialAzureServicePrincipal struct { // The application ID of the application registration within the referenced AAD tenant ApplicationId string `pulumi:"applicationId"` // The client secret generated for the above app ID in AAD. **This field is redacted on output** - // - // `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: ClientSecret string `pulumi:"clientSecret"` // The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application DirectoryId string `pulumi:"directoryId"` @@ -47970,8 +48107,6 @@ type StorageCredentialAzureServicePrincipalArgs struct { // The application ID of the application registration within the referenced AAD tenant ApplicationId pulumi.StringInput `pulumi:"applicationId"` // The client secret generated for the above app ID in AAD. **This field is redacted on output** - // - // `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: ClientSecret pulumi.StringInput `pulumi:"clientSecret"` // The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application DirectoryId pulumi.StringInput `pulumi:"directoryId"` @@ -48078,8 +48213,6 @@ func (o StorageCredentialAzureServicePrincipalOutput) ApplicationId() pulumi.Str } // The client secret generated for the above app ID in AAD. **This field is redacted on output** -// -// `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: func (o StorageCredentialAzureServicePrincipalOutput) ClientSecret() pulumi.StringOutput { return o.ApplyT(func(v StorageCredentialAzureServicePrincipal) string { return v.ClientSecret }).(pulumi.StringOutput) } @@ -48130,8 +48263,6 @@ func (o StorageCredentialAzureServicePrincipalPtrOutput) ApplicationId() pulumi. } // The client secret generated for the above app ID in AAD. **This field is redacted on output** -// -// `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: func (o StorageCredentialAzureServicePrincipalPtrOutput) ClientSecret() pulumi.StringPtrOutput { return o.ApplyT(func(v *StorageCredentialAzureServicePrincipal) *string { if v == nil { @@ -48152,6 +48283,7 @@ func (o StorageCredentialAzureServicePrincipalPtrOutput) DirectoryId() pulumi.St } type StorageCredentialDatabricksGcpServiceAccount struct { + CredentialId *string `pulumi:"credentialId"` // The email of the GCP service account created, to be granted access to relevant buckets. Email *string `pulumi:"email"` } @@ -48168,6 +48300,7 @@ type StorageCredentialDatabricksGcpServiceAccountInput interface { } type StorageCredentialDatabricksGcpServiceAccountArgs struct { + CredentialId pulumi.StringPtrInput `pulumi:"credentialId"` // The email of the GCP service account created, to be granted access to relevant buckets. Email pulumi.StringPtrInput `pulumi:"email"` } @@ -48267,6 +48400,10 @@ func (o StorageCredentialDatabricksGcpServiceAccountOutput) ToOutput(ctx context } } +func (o StorageCredentialDatabricksGcpServiceAccountOutput) CredentialId() pulumi.StringPtrOutput { + return o.ApplyT(func(v StorageCredentialDatabricksGcpServiceAccount) *string { return v.CredentialId }).(pulumi.StringPtrOutput) +} + // The email of the GCP service account created, to be granted access to relevant buckets. func (o StorageCredentialDatabricksGcpServiceAccountOutput) Email() pulumi.StringPtrOutput { return o.ApplyT(func(v StorageCredentialDatabricksGcpServiceAccount) *string { return v.Email }).(pulumi.StringPtrOutput) @@ -48302,6 +48439,15 @@ func (o StorageCredentialDatabricksGcpServiceAccountPtrOutput) Elem() StorageCre }).(StorageCredentialDatabricksGcpServiceAccountOutput) } +func (o StorageCredentialDatabricksGcpServiceAccountPtrOutput) CredentialId() pulumi.StringPtrOutput { + return o.ApplyT(func(v *StorageCredentialDatabricksGcpServiceAccount) *string { + if v == nil { + return nil + } + return v.CredentialId + }).(pulumi.StringPtrOutput) +} + // The email of the GCP service account created, to be granted access to relevant buckets. func (o StorageCredentialDatabricksGcpServiceAccountPtrOutput) Email() pulumi.StringPtrOutput { return o.ApplyT(func(v *StorageCredentialDatabricksGcpServiceAccount) *string { @@ -82241,6 +82387,8 @@ func init() { pulumi.RegisterInputType(reflect.TypeOf((*ModelServingConfigTrafficConfigPtrInput)(nil)).Elem(), ModelServingConfigTrafficConfigArgs{}) pulumi.RegisterInputType(reflect.TypeOf((*ModelServingConfigTrafficConfigRouteInput)(nil)).Elem(), ModelServingConfigTrafficConfigRouteArgs{}) pulumi.RegisterInputType(reflect.TypeOf((*ModelServingConfigTrafficConfigRouteArrayInput)(nil)).Elem(), ModelServingConfigTrafficConfigRouteArray{}) + pulumi.RegisterInputType(reflect.TypeOf((*ModelServingTagInput)(nil)).Elem(), ModelServingTagArgs{}) + pulumi.RegisterInputType(reflect.TypeOf((*ModelServingTagArrayInput)(nil)).Elem(), ModelServingTagArray{}) pulumi.RegisterInputType(reflect.TypeOf((*MountAbfsInput)(nil)).Elem(), MountAbfsArgs{}) pulumi.RegisterInputType(reflect.TypeOf((*MountAbfsPtrInput)(nil)).Elem(), MountAbfsArgs{}) pulumi.RegisterInputType(reflect.TypeOf((*MountAdlInput)(nil)).Elem(), MountAdlArgs{}) @@ -83106,6 +83254,8 @@ func init() { pulumi.RegisterOutputType(ModelServingConfigTrafficConfigPtrOutput{}) pulumi.RegisterOutputType(ModelServingConfigTrafficConfigRouteOutput{}) pulumi.RegisterOutputType(ModelServingConfigTrafficConfigRouteArrayOutput{}) + pulumi.RegisterOutputType(ModelServingTagOutput{}) + pulumi.RegisterOutputType(ModelServingTagArrayOutput{}) pulumi.RegisterOutputType(MountAbfsOutput{}) pulumi.RegisterOutputType(MountAbfsPtrOutput{}) pulumi.RegisterOutputType(MountAdlOutput{}) diff --git a/sdk/go/databricks/storageCredential.go b/sdk/go/databricks/storageCredential.go index 6a675ce1..12661522 100644 --- a/sdk/go/databricks/storageCredential.go +++ b/sdk/go/databricks/storageCredential.go @@ -162,17 +162,20 @@ type StorageCredential struct { AzureServicePrincipal StorageCredentialAzureServicePrincipalPtrOutput `pulumi:"azureServicePrincipal"` Comment pulumi.StringPtrOutput `pulumi:"comment"` DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountOutput `pulumi:"databricksGcpServiceAccount"` - ForceDestroy pulumi.BoolPtrOutput `pulumi:"forceDestroy"` - GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrOutput `pulumi:"gcpServiceAccountKey"` + // Delete storage credential regardless of its dependencies. + // + // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrOutput `pulumi:"forceDestroy"` + GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrOutput `pulumi:"gcpServiceAccountKey"` // Unique identifier of the parent Metastore MetastoreId pulumi.StringOutput `pulumi:"metastoreId"` // Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringOutput `pulumi:"name"` // Username/groupname/sp applicationId of the storage credential owner. - // - // `awsIamRole` optional configuration block for credential details for AWS: Owner pulumi.StringOutput `pulumi:"owner"` // Indicates whether the storage credential is only usable for read operations. + // + // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): ReadOnly pulumi.BoolPtrOutput `pulumi:"readOnly"` } @@ -211,17 +214,20 @@ type storageCredentialState struct { AzureServicePrincipal *StorageCredentialAzureServicePrincipal `pulumi:"azureServicePrincipal"` Comment *string `pulumi:"comment"` DatabricksGcpServiceAccount *StorageCredentialDatabricksGcpServiceAccount `pulumi:"databricksGcpServiceAccount"` - ForceDestroy *bool `pulumi:"forceDestroy"` - GcpServiceAccountKey *StorageCredentialGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` + // Delete storage credential regardless of its dependencies. + // + // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy *bool `pulumi:"forceDestroy"` + GcpServiceAccountKey *StorageCredentialGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` // Unique identifier of the parent Metastore MetastoreId *string `pulumi:"metastoreId"` // Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name *string `pulumi:"name"` // Username/groupname/sp applicationId of the storage credential owner. - // - // `awsIamRole` optional configuration block for credential details for AWS: Owner *string `pulumi:"owner"` // Indicates whether the storage credential is only usable for read operations. + // + // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): ReadOnly *bool `pulumi:"readOnly"` } @@ -231,17 +237,20 @@ type StorageCredentialState struct { AzureServicePrincipal StorageCredentialAzureServicePrincipalPtrInput Comment pulumi.StringPtrInput DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountPtrInput - ForceDestroy pulumi.BoolPtrInput - GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrInput + // Delete storage credential regardless of its dependencies. + // + // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrInput + GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrInput // Unique identifier of the parent Metastore MetastoreId pulumi.StringPtrInput // Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringPtrInput // Username/groupname/sp applicationId of the storage credential owner. - // - // `awsIamRole` optional configuration block for credential details for AWS: Owner pulumi.StringPtrInput // Indicates whether the storage credential is only usable for read operations. + // + // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): ReadOnly pulumi.BoolPtrInput } @@ -255,17 +264,20 @@ type storageCredentialArgs struct { AzureServicePrincipal *StorageCredentialAzureServicePrincipal `pulumi:"azureServicePrincipal"` Comment *string `pulumi:"comment"` DatabricksGcpServiceAccount *StorageCredentialDatabricksGcpServiceAccount `pulumi:"databricksGcpServiceAccount"` - ForceDestroy *bool `pulumi:"forceDestroy"` - GcpServiceAccountKey *StorageCredentialGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` + // Delete storage credential regardless of its dependencies. + // + // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy *bool `pulumi:"forceDestroy"` + GcpServiceAccountKey *StorageCredentialGcpServiceAccountKey `pulumi:"gcpServiceAccountKey"` // Unique identifier of the parent Metastore MetastoreId *string `pulumi:"metastoreId"` // Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name *string `pulumi:"name"` // Username/groupname/sp applicationId of the storage credential owner. - // - // `awsIamRole` optional configuration block for credential details for AWS: Owner *string `pulumi:"owner"` // Indicates whether the storage credential is only usable for read operations. + // + // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): ReadOnly *bool `pulumi:"readOnly"` } @@ -276,17 +288,20 @@ type StorageCredentialArgs struct { AzureServicePrincipal StorageCredentialAzureServicePrincipalPtrInput Comment pulumi.StringPtrInput DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountPtrInput - ForceDestroy pulumi.BoolPtrInput - GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrInput + // Delete storage credential regardless of its dependencies. + // + // `awsIamRole` optional configuration block for credential details for AWS: + ForceDestroy pulumi.BoolPtrInput + GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyPtrInput // Unique identifier of the parent Metastore MetastoreId pulumi.StringPtrInput // Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. Name pulumi.StringPtrInput // Username/groupname/sp applicationId of the storage credential owner. - // - // `awsIamRole` optional configuration block for credential details for AWS: Owner pulumi.StringPtrInput // Indicates whether the storage credential is only usable for read operations. + // + // `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): ReadOnly pulumi.BoolPtrInput } @@ -427,6 +442,9 @@ func (o StorageCredentialOutput) DatabricksGcpServiceAccount() StorageCredential }).(StorageCredentialDatabricksGcpServiceAccountOutput) } +// Delete storage credential regardless of its dependencies. +// +// `awsIamRole` optional configuration block for credential details for AWS: func (o StorageCredentialOutput) ForceDestroy() pulumi.BoolPtrOutput { return o.ApplyT(func(v *StorageCredential) pulumi.BoolPtrOutput { return v.ForceDestroy }).(pulumi.BoolPtrOutput) } @@ -448,13 +466,13 @@ func (o StorageCredentialOutput) Name() pulumi.StringOutput { } // Username/groupname/sp applicationId of the storage credential owner. -// -// `awsIamRole` optional configuration block for credential details for AWS: func (o StorageCredentialOutput) Owner() pulumi.StringOutput { return o.ApplyT(func(v *StorageCredential) pulumi.StringOutput { return v.Owner }).(pulumi.StringOutput) } // Indicates whether the storage credential is only usable for read operations. +// +// `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): func (o StorageCredentialOutput) ReadOnly() pulumi.BoolPtrOutput { return o.ApplyT(func(v *StorageCredential) pulumi.BoolPtrOutput { return v.ReadOnly }).(pulumi.BoolPtrOutput) } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/Cluster.java b/sdk/java/src/main/java/com/pulumi/databricks/Cluster.java index 636fc59f..8718f329 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/Cluster.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/Cluster.java @@ -99,6 +99,12 @@ public Output clusterId() { public Output> clusterLogConf() { return Codegen.optional(this.clusterLogConf); } + /** + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ @Export(name="clusterMountInfos", type=List.class, parameters={ClusterClusterMountInfo.class}) private Output> clusterMountInfos; diff --git a/sdk/java/src/main/java/com/pulumi/databricks/ClusterArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/ClusterArgs.java index 146b9846..2e5ab9bd 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/ClusterArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/ClusterArgs.java @@ -95,9 +95,21 @@ public Optional> clusterLogConf() { return Optional.ofNullable(this.clusterLogConf); } + /** + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ @Import(name="clusterMountInfos") private @Nullable Output> clusterMountInfos; + /** + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Optional>> clusterMountInfos() { return Optional.ofNullable(this.clusterMountInfos); } @@ -664,15 +676,39 @@ public Builder clusterLogConf(ClusterClusterLogConfArgs clusterLogConf) { return clusterLogConf(Output.of(clusterLogConf)); } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(@Nullable Output> clusterMountInfos) { $.clusterMountInfos = clusterMountInfos; return this; } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(List clusterMountInfos) { return clusterMountInfos(Output.of(clusterMountInfos)); } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(ClusterClusterMountInfoArgs... clusterMountInfos) { return clusterMountInfos(List.of(clusterMountInfos)); } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccess.java b/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccess.java index 004370dd..86688246 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccess.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccess.java @@ -52,17 +52,35 @@ public Output> azureManagedIde public Output> azureServicePrincipal() { return Codegen.optional(this.azureServicePrincipal); } - @Export(name="configurationType", type=String.class, parameters={}) - private Output configurationType; + @Export(name="comment", type=String.class, parameters={}) + private Output comment; - public Output configurationType() { - return this.configurationType; + public Output> comment() { + return Codegen.optional(this.comment); } @Export(name="databricksGcpServiceAccount", type=MetastoreDataAccessDatabricksGcpServiceAccount.class, parameters={}) - private Output databricksGcpServiceAccount; + private Output databricksGcpServiceAccount; - public Output> databricksGcpServiceAccount() { - return Codegen.optional(this.databricksGcpServiceAccount); + public Output databricksGcpServiceAccount() { + return this.databricksGcpServiceAccount; + } + /** + * Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + @Export(name="forceDestroy", type=Boolean.class, parameters={}) + private Output forceDestroy; + + /** + * @return Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + public Output> forceDestroy() { + return Codegen.optional(this.forceDestroy); } @Export(name="gcpServiceAccountKey", type=MetastoreDataAccessGcpServiceAccountKey.class, parameters={}) private Output gcpServiceAccountKey; @@ -79,8 +97,6 @@ public Output> isDefault() { /** * Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Export(name="metastoreId", type=String.class, parameters={}) private Output metastoreId; @@ -88,8 +104,6 @@ public Output> isDefault() { /** * @return Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Output metastoreId() { return this.metastoreId; @@ -108,6 +122,26 @@ public Output metastoreId() { public Output name() { return this.name; } + /** + * Username/groupname/sp application_id of the data access configuration owner. + * + */ + @Export(name="owner", type=String.class, parameters={}) + private Output owner; + + /** + * @return Username/groupname/sp application_id of the data access configuration owner. + * + */ + public Output owner() { + return this.owner; + } + @Export(name="readOnly", type=Boolean.class, parameters={}) + private Output readOnly; + + public Output> readOnly() { + return Codegen.optional(this.readOnly); + } /** * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccessArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccessArgs.java index 6c59b493..49be4778 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccessArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/MetastoreDataAccessArgs.java @@ -42,11 +42,11 @@ public Optional> azureServi return Optional.ofNullable(this.azureServicePrincipal); } - @Import(name="configurationType") - private @Nullable Output configurationType; + @Import(name="comment") + private @Nullable Output comment; - public Optional> configurationType() { - return Optional.ofNullable(this.configurationType); + public Optional> comment() { + return Optional.ofNullable(this.comment); } @Import(name="databricksGcpServiceAccount") @@ -56,6 +56,25 @@ public Optional> data return Optional.ofNullable(this.databricksGcpServiceAccount); } + /** + * Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + @Import(name="forceDestroy") + private @Nullable Output forceDestroy; + + /** + * @return Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + public Optional> forceDestroy() { + return Optional.ofNullable(this.forceDestroy); + } + @Import(name="gcpServiceAccountKey") private @Nullable Output gcpServiceAccountKey; @@ -73,8 +92,6 @@ public Optional> isDefault() { /** * Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Import(name="metastoreId", required=true) private Output metastoreId; @@ -82,8 +99,6 @@ public Optional> isDefault() { /** * @return Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Output metastoreId() { return this.metastoreId; @@ -104,18 +119,43 @@ public Optional> name() { return Optional.ofNullable(this.name); } + /** + * Username/groupname/sp application_id of the data access configuration owner. + * + */ + @Import(name="owner") + private @Nullable Output owner; + + /** + * @return Username/groupname/sp application_id of the data access configuration owner. + * + */ + public Optional> owner() { + return Optional.ofNullable(this.owner); + } + + @Import(name="readOnly") + private @Nullable Output readOnly; + + public Optional> readOnly() { + return Optional.ofNullable(this.readOnly); + } + private MetastoreDataAccessArgs() {} private MetastoreDataAccessArgs(MetastoreDataAccessArgs $) { this.awsIamRole = $.awsIamRole; this.azureManagedIdentity = $.azureManagedIdentity; this.azureServicePrincipal = $.azureServicePrincipal; - this.configurationType = $.configurationType; + this.comment = $.comment; this.databricksGcpServiceAccount = $.databricksGcpServiceAccount; + this.forceDestroy = $.forceDestroy; this.gcpServiceAccountKey = $.gcpServiceAccountKey; this.isDefault = $.isDefault; this.metastoreId = $.metastoreId; this.name = $.name; + this.owner = $.owner; + this.readOnly = $.readOnly; } public static Builder builder() { @@ -163,13 +203,13 @@ public Builder azureServicePrincipal(MetastoreDataAccessAzureServicePrincipalArg return azureServicePrincipal(Output.of(azureServicePrincipal)); } - public Builder configurationType(@Nullable Output configurationType) { - $.configurationType = configurationType; + public Builder comment(@Nullable Output comment) { + $.comment = comment; return this; } - public Builder configurationType(String configurationType) { - return configurationType(Output.of(configurationType)); + public Builder comment(String comment) { + return comment(Output.of(comment)); } public Builder databricksGcpServiceAccount(@Nullable Output databricksGcpServiceAccount) { @@ -181,6 +221,31 @@ public Builder databricksGcpServiceAccount(MetastoreDataAccessDatabricksGcpServi return databricksGcpServiceAccount(Output.of(databricksGcpServiceAccount)); } + /** + * @param forceDestroy Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ + public Builder forceDestroy(@Nullable Output forceDestroy) { + $.forceDestroy = forceDestroy; + return this; + } + + /** + * @param forceDestroy Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ + public Builder forceDestroy(Boolean forceDestroy) { + return forceDestroy(Output.of(forceDestroy)); + } + public Builder gcpServiceAccountKey(@Nullable Output gcpServiceAccountKey) { $.gcpServiceAccountKey = gcpServiceAccountKey; return this; @@ -202,8 +267,6 @@ public Builder isDefault(Boolean isDefault) { /** * @param metastoreId Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -215,8 +278,6 @@ public Builder metastoreId(Output metastoreId) { /** * @param metastoreId Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -245,6 +306,36 @@ public Builder name(String name) { return name(Output.of(name)); } + /** + * @param owner Username/groupname/sp application_id of the data access configuration owner. + * + * @return builder + * + */ + public Builder owner(@Nullable Output owner) { + $.owner = owner; + return this; + } + + /** + * @param owner Username/groupname/sp application_id of the data access configuration owner. + * + * @return builder + * + */ + public Builder owner(String owner) { + return owner(Output.of(owner)); + } + + public Builder readOnly(@Nullable Output readOnly) { + $.readOnly = readOnly; + return this; + } + + public Builder readOnly(Boolean readOnly) { + return readOnly(Output.of(readOnly)); + } + public MetastoreDataAccessArgs build() { $.metastoreId = Objects.requireNonNull($.metastoreId, "expected parameter 'metastoreId' to be non-null"); return $; diff --git a/sdk/java/src/main/java/com/pulumi/databricks/MlflowModel.java b/sdk/java/src/main/java/com/pulumi/databricks/MlflowModel.java index 4b6ee19c..d1fe4c0e 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/MlflowModel.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/MlflowModel.java @@ -126,6 +126,12 @@ public Output> lastUpdatedTimestamp() { public Output name() { return this.name; } + @Export(name="registeredModelId", type=String.class, parameters={}) + private Output registeredModelId; + + public Output registeredModelId() { + return this.registeredModelId; + } /** * Tags for the MLflow model. * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/ModelServing.java b/sdk/java/src/main/java/com/pulumi/databricks/ModelServing.java index 06b57fea..af12e4fe 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/ModelServing.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/ModelServing.java @@ -11,7 +11,10 @@ import com.pulumi.databricks.Utilities; import com.pulumi.databricks.inputs.ModelServingState; import com.pulumi.databricks.outputs.ModelServingConfig; +import com.pulumi.databricks.outputs.ModelServingTag; import java.lang.String; +import java.util.List; +import java.util.Optional; import javax.annotation.Nullable; /** @@ -143,6 +146,12 @@ public Output name() { public Output servingEndpointId() { return this.servingEndpointId; } + @Export(name="tags", type=List.class, parameters={ModelServingTag.class}) + private Output> tags; + + public Output>> tags() { + return Codegen.optional(this.tags); + } /** * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/ModelServingArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/ModelServingArgs.java index ab3bf595..a0797c93 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/ModelServingArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/ModelServingArgs.java @@ -6,7 +6,9 @@ import com.pulumi.core.Output; import com.pulumi.core.annotations.Import; import com.pulumi.databricks.inputs.ModelServingConfigArgs; +import com.pulumi.databricks.inputs.ModelServingTagArgs; import java.lang.String; +import java.util.List; import java.util.Objects; import java.util.Optional; import javax.annotation.Nullable; @@ -46,11 +48,19 @@ public Optional> name() { return Optional.ofNullable(this.name); } + @Import(name="tags") + private @Nullable Output> tags; + + public Optional>> tags() { + return Optional.ofNullable(this.tags); + } + private ModelServingArgs() {} private ModelServingArgs(ModelServingArgs $) { this.config = $.config; this.name = $.name; + this.tags = $.tags; } public static Builder builder() { @@ -113,6 +123,19 @@ public Builder name(String name) { return name(Output.of(name)); } + public Builder tags(@Nullable Output> tags) { + $.tags = tags; + return this; + } + + public Builder tags(List tags) { + return tags(Output.of(tags)); + } + + public Builder tags(ModelServingTagArgs... tags) { + return tags(List.of(tags)); + } + public ModelServingArgs build() { $.config = Objects.requireNonNull($.config, "expected parameter 'config' to be non-null"); return $; diff --git a/sdk/java/src/main/java/com/pulumi/databricks/StorageCredential.java b/sdk/java/src/main/java/com/pulumi/databricks/StorageCredential.java index a54ee043..f24fec10 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/StorageCredential.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/StorageCredential.java @@ -201,9 +201,21 @@ public Output> comment() { public Output databricksGcpServiceAccount() { return this.databricksGcpServiceAccount; } + /** + * Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ @Export(name="forceDestroy", type=Boolean.class, parameters={}) private Output forceDestroy; + /** + * @return Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ public Output> forceDestroy() { return Codegen.optional(this.forceDestroy); } @@ -244,8 +256,6 @@ public Output name() { /** * Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Export(name="owner", type=String.class, parameters={}) private Output owner; @@ -253,8 +263,6 @@ public Output name() { /** * @return Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Output owner() { return this.owner; @@ -262,6 +270,8 @@ public Output owner() { /** * Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ @Export(name="readOnly", type=Boolean.class, parameters={}) private Output readOnly; @@ -269,6 +279,8 @@ public Output owner() { /** * @return Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ public Output> readOnly() { return Codegen.optional(this.readOnly); diff --git a/sdk/java/src/main/java/com/pulumi/databricks/StorageCredentialArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/StorageCredentialArgs.java index 90615cdb..77c24e97 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/StorageCredentialArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/StorageCredentialArgs.java @@ -56,9 +56,21 @@ public Optional> databr return Optional.ofNullable(this.databricksGcpServiceAccount); } + /** + * Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ @Import(name="forceDestroy") private @Nullable Output forceDestroy; + /** + * @return Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ public Optional> forceDestroy() { return Optional.ofNullable(this.forceDestroy); } @@ -103,8 +115,6 @@ public Optional> name() { /** * Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Import(name="owner") private @Nullable Output owner; @@ -112,8 +122,6 @@ public Optional> name() { /** * @return Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Optional> owner() { return Optional.ofNullable(this.owner); @@ -122,6 +130,8 @@ public Optional> owner() { /** * Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ @Import(name="readOnly") private @Nullable Output readOnly; @@ -129,6 +139,8 @@ public Optional> owner() { /** * @return Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ public Optional> readOnly() { return Optional.ofNullable(this.readOnly); @@ -213,11 +225,27 @@ public Builder databricksGcpServiceAccount(StorageCredentialDatabricksGcpService return databricksGcpServiceAccount(Output.of(databricksGcpServiceAccount)); } + /** + * @param forceDestroy Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ public Builder forceDestroy(@Nullable Output forceDestroy) { $.forceDestroy = forceDestroy; return this; } + /** + * @param forceDestroy Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ public Builder forceDestroy(Boolean forceDestroy) { return forceDestroy(Output.of(forceDestroy)); } @@ -276,8 +304,6 @@ public Builder name(String name) { /** * @param owner Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -289,8 +315,6 @@ public Builder owner(@Nullable Output owner) { /** * @param owner Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -301,6 +325,8 @@ public Builder owner(String owner) { /** * @param readOnly Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * * @return builder * */ @@ -312,6 +338,8 @@ public Builder readOnly(@Nullable Output readOnly) { /** * @param readOnly Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * * @return builder * */ diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/ClusterState.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ClusterState.java index 1837a074..6f362125 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/ClusterState.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ClusterState.java @@ -95,9 +95,21 @@ public Optional> clusterLogConf() { return Optional.ofNullable(this.clusterLogConf); } + /** + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ @Import(name="clusterMountInfos") private @Nullable Output> clusterMountInfos; + /** + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Optional>> clusterMountInfos() { return Optional.ofNullable(this.clusterMountInfos); } @@ -704,15 +716,39 @@ public Builder clusterLogConf(ClusterClusterLogConfArgs clusterLogConf) { return clusterLogConf(Output.of(clusterLogConf)); } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(@Nullable Output> clusterMountInfos) { $.clusterMountInfos = clusterMountInfos; return this; } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(List clusterMountInfos) { return clusterMountInfos(Output.of(clusterMountInfos)); } + /** + * @return builder + * + * @deprecated + * cluster_mount_info block is deprecated due the Clusters API changes. + * + */ + @Deprecated /* cluster_mount_info block is deprecated due the Clusters API changes. */ public Builder clusterMountInfos(ClusterClusterMountInfoArgs... clusterMountInfos) { return clusterMountInfos(List.of(clusterMountInfos)); } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.java index 4efa2fbc..9bfd32f5 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessDatabricksGcpServiceAccountArgs.java @@ -15,6 +15,13 @@ public final class MetastoreDataAccessDatabricksGcpServiceAccountArgs extends co public static final MetastoreDataAccessDatabricksGcpServiceAccountArgs Empty = new MetastoreDataAccessDatabricksGcpServiceAccountArgs(); + @Import(name="credentialId") + private @Nullable Output credentialId; + + public Optional> credentialId() { + return Optional.ofNullable(this.credentialId); + } + /** * The email of the GCP service account created, to be granted access to relevant buckets. * @@ -37,6 +44,7 @@ public Optional> email() { private MetastoreDataAccessDatabricksGcpServiceAccountArgs() {} private MetastoreDataAccessDatabricksGcpServiceAccountArgs(MetastoreDataAccessDatabricksGcpServiceAccountArgs $) { + this.credentialId = $.credentialId; this.email = $.email; } @@ -58,6 +66,15 @@ public Builder(MetastoreDataAccessDatabricksGcpServiceAccountArgs defaults) { $ = new MetastoreDataAccessDatabricksGcpServiceAccountArgs(Objects.requireNonNull(defaults)); } + public Builder credentialId(@Nullable Output credentialId) { + $.credentialId = credentialId; + return this; + } + + public Builder credentialId(String credentialId) { + return credentialId(Output.of(credentialId)); + } + /** * @param email The email of the GCP service account created, to be granted access to relevant buckets. * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessState.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessState.java index 4a3c238b..25ddd071 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessState.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MetastoreDataAccessState.java @@ -42,11 +42,11 @@ public Optional> azureServi return Optional.ofNullable(this.azureServicePrincipal); } - @Import(name="configurationType") - private @Nullable Output configurationType; + @Import(name="comment") + private @Nullable Output comment; - public Optional> configurationType() { - return Optional.ofNullable(this.configurationType); + public Optional> comment() { + return Optional.ofNullable(this.comment); } @Import(name="databricksGcpServiceAccount") @@ -56,6 +56,25 @@ public Optional> data return Optional.ofNullable(this.databricksGcpServiceAccount); } + /** + * Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + @Import(name="forceDestroy") + private @Nullable Output forceDestroy; + + /** + * @return Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ + public Optional> forceDestroy() { + return Optional.ofNullable(this.forceDestroy); + } + @Import(name="gcpServiceAccountKey") private @Nullable Output gcpServiceAccountKey; @@ -73,8 +92,6 @@ public Optional> isDefault() { /** * Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Import(name="metastoreId") private @Nullable Output metastoreId; @@ -82,8 +99,6 @@ public Optional> isDefault() { /** * @return Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Optional> metastoreId() { return Optional.ofNullable(this.metastoreId); @@ -104,18 +119,43 @@ public Optional> name() { return Optional.ofNullable(this.name); } + /** + * Username/groupname/sp application_id of the data access configuration owner. + * + */ + @Import(name="owner") + private @Nullable Output owner; + + /** + * @return Username/groupname/sp application_id of the data access configuration owner. + * + */ + public Optional> owner() { + return Optional.ofNullable(this.owner); + } + + @Import(name="readOnly") + private @Nullable Output readOnly; + + public Optional> readOnly() { + return Optional.ofNullable(this.readOnly); + } + private MetastoreDataAccessState() {} private MetastoreDataAccessState(MetastoreDataAccessState $) { this.awsIamRole = $.awsIamRole; this.azureManagedIdentity = $.azureManagedIdentity; this.azureServicePrincipal = $.azureServicePrincipal; - this.configurationType = $.configurationType; + this.comment = $.comment; this.databricksGcpServiceAccount = $.databricksGcpServiceAccount; + this.forceDestroy = $.forceDestroy; this.gcpServiceAccountKey = $.gcpServiceAccountKey; this.isDefault = $.isDefault; this.metastoreId = $.metastoreId; this.name = $.name; + this.owner = $.owner; + this.readOnly = $.readOnly; } public static Builder builder() { @@ -163,13 +203,13 @@ public Builder azureServicePrincipal(MetastoreDataAccessAzureServicePrincipalArg return azureServicePrincipal(Output.of(azureServicePrincipal)); } - public Builder configurationType(@Nullable Output configurationType) { - $.configurationType = configurationType; + public Builder comment(@Nullable Output comment) { + $.comment = comment; return this; } - public Builder configurationType(String configurationType) { - return configurationType(Output.of(configurationType)); + public Builder comment(String comment) { + return comment(Output.of(comment)); } public Builder databricksGcpServiceAccount(@Nullable Output databricksGcpServiceAccount) { @@ -181,6 +221,31 @@ public Builder databricksGcpServiceAccount(MetastoreDataAccessDatabricksGcpServi return databricksGcpServiceAccount(Output.of(databricksGcpServiceAccount)); } + /** + * @param forceDestroy Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ + public Builder forceDestroy(@Nullable Output forceDestroy) { + $.forceDestroy = forceDestroy; + return this; + } + + /** + * @param forceDestroy Delete the data access configuration regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ + public Builder forceDestroy(Boolean forceDestroy) { + return forceDestroy(Output.of(forceDestroy)); + } + public Builder gcpServiceAccountKey(@Nullable Output gcpServiceAccountKey) { $.gcpServiceAccountKey = gcpServiceAccountKey; return this; @@ -202,8 +267,6 @@ public Builder isDefault(Boolean isDefault) { /** * @param metastoreId Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -215,8 +278,6 @@ public Builder metastoreId(@Nullable Output metastoreId) { /** * @param metastoreId Unique identifier of the parent Metastore * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -245,6 +306,36 @@ public Builder name(String name) { return name(Output.of(name)); } + /** + * @param owner Username/groupname/sp application_id of the data access configuration owner. + * + * @return builder + * + */ + public Builder owner(@Nullable Output owner) { + $.owner = owner; + return this; + } + + /** + * @param owner Username/groupname/sp application_id of the data access configuration owner. + * + * @return builder + * + */ + public Builder owner(String owner) { + return owner(Output.of(owner)); + } + + public Builder readOnly(@Nullable Output readOnly) { + $.readOnly = readOnly; + return this; + } + + public Builder readOnly(Boolean readOnly) { + return readOnly(Output.of(readOnly)); + } + public MetastoreDataAccessState build() { return $; } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MlflowModelState.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MlflowModelState.java index 2880183b..9c34cb8a 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/MlflowModelState.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/MlflowModelState.java @@ -62,6 +62,13 @@ public Optional> name() { return Optional.ofNullable(this.name); } + @Import(name="registeredModelId") + private @Nullable Output registeredModelId; + + public Optional> registeredModelId() { + return Optional.ofNullable(this.registeredModelId); + } + /** * Tags for the MLflow model. * @@ -91,6 +98,7 @@ private MlflowModelState(MlflowModelState $) { this.description = $.description; this.lastUpdatedTimestamp = $.lastUpdatedTimestamp; this.name = $.name; + this.registeredModelId = $.registeredModelId; this.tags = $.tags; this.userId = $.userId; } @@ -173,6 +181,15 @@ public Builder name(String name) { return name(Output.of(name)); } + public Builder registeredModelId(@Nullable Output registeredModelId) { + $.registeredModelId = registeredModelId; + return this; + } + + public Builder registeredModelId(String registeredModelId) { + return registeredModelId(Output.of(registeredModelId)); + } + /** * @param tags Tags for the MLflow model. * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingState.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingState.java index db96735d..f80dff19 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingState.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingState.java @@ -6,7 +6,9 @@ import com.pulumi.core.Output; import com.pulumi.core.annotations.Import; import com.pulumi.databricks.inputs.ModelServingConfigArgs; +import com.pulumi.databricks.inputs.ModelServingTagArgs; import java.lang.String; +import java.util.List; import java.util.Objects; import java.util.Optional; import javax.annotation.Nullable; @@ -61,12 +63,20 @@ public Optional> servingEndpointId() { return Optional.ofNullable(this.servingEndpointId); } + @Import(name="tags") + private @Nullable Output> tags; + + public Optional>> tags() { + return Optional.ofNullable(this.tags); + } + private ModelServingState() {} private ModelServingState(ModelServingState $) { this.config = $.config; this.name = $.name; this.servingEndpointId = $.servingEndpointId; + this.tags = $.tags; } public static Builder builder() { @@ -150,6 +160,19 @@ public Builder servingEndpointId(String servingEndpointId) { return servingEndpointId(Output.of(servingEndpointId)); } + public Builder tags(@Nullable Output> tags) { + $.tags = tags; + return this; + } + + public Builder tags(List tags) { + return tags(Output.of(tags)); + } + + public Builder tags(ModelServingTagArgs... tags) { + return tags(List.of(tags)); + } + public ModelServingState build() { return $; } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingTagArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingTagArgs.java new file mode 100644 index 00000000..79452d9c --- /dev/null +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/ModelServingTagArgs.java @@ -0,0 +1,81 @@ +// *** WARNING: this file was generated by pulumi-java-gen. *** +// *** Do not edit by hand unless you're certain you know what you are doing! *** + +package com.pulumi.databricks.inputs; + +import com.pulumi.core.Output; +import com.pulumi.core.annotations.Import; +import java.lang.String; +import java.util.Objects; +import java.util.Optional; +import javax.annotation.Nullable; + + +public final class ModelServingTagArgs extends com.pulumi.resources.ResourceArgs { + + public static final ModelServingTagArgs Empty = new ModelServingTagArgs(); + + @Import(name="key", required=true) + private Output key; + + public Output key() { + return this.key; + } + + @Import(name="value") + private @Nullable Output value; + + public Optional> value() { + return Optional.ofNullable(this.value); + } + + private ModelServingTagArgs() {} + + private ModelServingTagArgs(ModelServingTagArgs $) { + this.key = $.key; + this.value = $.value; + } + + public static Builder builder() { + return new Builder(); + } + public static Builder builder(ModelServingTagArgs defaults) { + return new Builder(defaults); + } + + public static final class Builder { + private ModelServingTagArgs $; + + public Builder() { + $ = new ModelServingTagArgs(); + } + + public Builder(ModelServingTagArgs defaults) { + $ = new ModelServingTagArgs(Objects.requireNonNull(defaults)); + } + + public Builder key(Output key) { + $.key = key; + return this; + } + + public Builder key(String key) { + return key(Output.of(key)); + } + + public Builder value(@Nullable Output value) { + $.value = value; + return this; + } + + public Builder value(String value) { + return value(Output.of(value)); + } + + public ModelServingTagArgs build() { + $.key = Objects.requireNonNull($.key, "expected parameter 'key' to be non-null"); + return $; + } + } + +} diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureManagedIdentityArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureManagedIdentityArgs.java index 8717887a..acb86460 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureManagedIdentityArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureManagedIdentityArgs.java @@ -40,7 +40,7 @@ public Optional> credentialId() { /** * The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * */ @Import(name="managedIdentityId") @@ -49,7 +49,7 @@ public Optional> credentialId() { /** * @return The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * */ public Optional> managedIdentityId() { @@ -115,7 +115,7 @@ public Builder credentialId(String credentialId) { /** * @param managedIdentityId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * * @return builder * @@ -128,7 +128,7 @@ public Builder managedIdentityId(@Nullable Output managedIdentityId) { /** * @param managedIdentityId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * * @return builder * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureServicePrincipalArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureServicePrincipalArgs.java index f72ecc20..8c99fcb6 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureServicePrincipalArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialAzureServicePrincipalArgs.java @@ -31,8 +31,6 @@ public Output applicationId() { /** * The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * */ @Import(name="clientSecret", required=true) private Output clientSecret; @@ -40,8 +38,6 @@ public Output applicationId() { /** * @return The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * */ public Output clientSecret() { return this.clientSecret; @@ -112,8 +108,6 @@ public Builder applicationId(String applicationId) { /** * @param clientSecret The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * * @return builder * */ @@ -125,8 +119,6 @@ public Builder clientSecret(Output clientSecret) { /** * @param clientSecret The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * * @return builder * */ diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialDatabricksGcpServiceAccountArgs.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialDatabricksGcpServiceAccountArgs.java index f34b0d74..361066c3 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialDatabricksGcpServiceAccountArgs.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialDatabricksGcpServiceAccountArgs.java @@ -15,6 +15,13 @@ public final class StorageCredentialDatabricksGcpServiceAccountArgs extends com. public static final StorageCredentialDatabricksGcpServiceAccountArgs Empty = new StorageCredentialDatabricksGcpServiceAccountArgs(); + @Import(name="credentialId") + private @Nullable Output credentialId; + + public Optional> credentialId() { + return Optional.ofNullable(this.credentialId); + } + /** * The email of the GCP service account created, to be granted access to relevant buckets. * @@ -33,6 +40,7 @@ public Optional> email() { private StorageCredentialDatabricksGcpServiceAccountArgs() {} private StorageCredentialDatabricksGcpServiceAccountArgs(StorageCredentialDatabricksGcpServiceAccountArgs $) { + this.credentialId = $.credentialId; this.email = $.email; } @@ -54,6 +62,15 @@ public Builder(StorageCredentialDatabricksGcpServiceAccountArgs defaults) { $ = new StorageCredentialDatabricksGcpServiceAccountArgs(Objects.requireNonNull(defaults)); } + public Builder credentialId(@Nullable Output credentialId) { + $.credentialId = credentialId; + return this; + } + + public Builder credentialId(String credentialId) { + return credentialId(Output.of(credentialId)); + } + /** * @param email The email of the GCP service account created, to be granted access to relevant buckets. * diff --git a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialState.java b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialState.java index 4694a265..ab1e5bb3 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialState.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/inputs/StorageCredentialState.java @@ -56,9 +56,21 @@ public Optional> databr return Optional.ofNullable(this.databricksGcpServiceAccount); } + /** + * Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ @Import(name="forceDestroy") private @Nullable Output forceDestroy; + /** + * @return Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + */ public Optional> forceDestroy() { return Optional.ofNullable(this.forceDestroy); } @@ -103,8 +115,6 @@ public Optional> name() { /** * Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ @Import(name="owner") private @Nullable Output owner; @@ -112,8 +122,6 @@ public Optional> name() { /** * @return Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * */ public Optional> owner() { return Optional.ofNullable(this.owner); @@ -122,6 +130,8 @@ public Optional> owner() { /** * Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ @Import(name="readOnly") private @Nullable Output readOnly; @@ -129,6 +139,8 @@ public Optional> owner() { /** * @return Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * */ public Optional> readOnly() { return Optional.ofNullable(this.readOnly); @@ -213,11 +225,27 @@ public Builder databricksGcpServiceAccount(StorageCredentialDatabricksGcpService return databricksGcpServiceAccount(Output.of(databricksGcpServiceAccount)); } + /** + * @param forceDestroy Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ public Builder forceDestroy(@Nullable Output forceDestroy) { $.forceDestroy = forceDestroy; return this; } + /** + * @param forceDestroy Delete storage credential regardless of its dependencies. + * + * `aws_iam_role` optional configuration block for credential details for AWS: + * + * @return builder + * + */ public Builder forceDestroy(Boolean forceDestroy) { return forceDestroy(Output.of(forceDestroy)); } @@ -276,8 +304,6 @@ public Builder name(String name) { /** * @param owner Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -289,8 +315,6 @@ public Builder owner(@Nullable Output owner) { /** * @param owner Username/groupname/sp application_id of the storage credential owner. * - * `aws_iam_role` optional configuration block for credential details for AWS: - * * @return builder * */ @@ -301,6 +325,8 @@ public Builder owner(String owner) { /** * @param readOnly Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * * @return builder * */ @@ -312,6 +338,8 @@ public Builder readOnly(@Nullable Output readOnly) { /** * @param readOnly Indicates whether the storage credential is only usable for read operations. * + * `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): + * * @return builder * */ diff --git a/sdk/java/src/main/java/com/pulumi/databricks/outputs/MetastoreDataAccessDatabricksGcpServiceAccount.java b/sdk/java/src/main/java/com/pulumi/databricks/outputs/MetastoreDataAccessDatabricksGcpServiceAccount.java index 9dea9d2b..97e8ff0c 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/outputs/MetastoreDataAccessDatabricksGcpServiceAccount.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/outputs/MetastoreDataAccessDatabricksGcpServiceAccount.java @@ -11,6 +11,7 @@ @CustomType public final class MetastoreDataAccessDatabricksGcpServiceAccount { + private @Nullable String credentialId; /** * @return The email of the GCP service account created, to be granted access to relevant buckets. * @@ -20,6 +21,9 @@ public final class MetastoreDataAccessDatabricksGcpServiceAccount { private @Nullable String email; private MetastoreDataAccessDatabricksGcpServiceAccount() {} + public Optional credentialId() { + return Optional.ofNullable(this.credentialId); + } /** * @return The email of the GCP service account created, to be granted access to relevant buckets. * @@ -39,13 +43,20 @@ public static Builder builder(MetastoreDataAccessDatabricksGcpServiceAccount def } @CustomType.Builder public static final class Builder { + private @Nullable String credentialId; private @Nullable String email; public Builder() {} public Builder(MetastoreDataAccessDatabricksGcpServiceAccount defaults) { Objects.requireNonNull(defaults); + this.credentialId = defaults.credentialId; this.email = defaults.email; } + @CustomType.Setter + public Builder credentialId(@Nullable String credentialId) { + this.credentialId = credentialId; + return this; + } @CustomType.Setter public Builder email(@Nullable String email) { this.email = email; @@ -53,6 +64,7 @@ public Builder email(@Nullable String email) { } public MetastoreDataAccessDatabricksGcpServiceAccount build() { final var o = new MetastoreDataAccessDatabricksGcpServiceAccount(); + o.credentialId = credentialId; o.email = email; return o; } diff --git a/sdk/java/src/main/java/com/pulumi/databricks/outputs/ModelServingTag.java b/sdk/java/src/main/java/com/pulumi/databricks/outputs/ModelServingTag.java new file mode 100644 index 00000000..179186da --- /dev/null +++ b/sdk/java/src/main/java/com/pulumi/databricks/outputs/ModelServingTag.java @@ -0,0 +1,60 @@ +// *** WARNING: this file was generated by pulumi-java-gen. *** +// *** Do not edit by hand unless you're certain you know what you are doing! *** + +package com.pulumi.databricks.outputs; + +import com.pulumi.core.annotations.CustomType; +import java.lang.String; +import java.util.Objects; +import java.util.Optional; +import javax.annotation.Nullable; + +@CustomType +public final class ModelServingTag { + private String key; + private @Nullable String value; + + private ModelServingTag() {} + public String key() { + return this.key; + } + public Optional value() { + return Optional.ofNullable(this.value); + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(ModelServingTag defaults) { + return new Builder(defaults); + } + @CustomType.Builder + public static final class Builder { + private String key; + private @Nullable String value; + public Builder() {} + public Builder(ModelServingTag defaults) { + Objects.requireNonNull(defaults); + this.key = defaults.key; + this.value = defaults.value; + } + + @CustomType.Setter + public Builder key(String key) { + this.key = Objects.requireNonNull(key); + return this; + } + @CustomType.Setter + public Builder value(@Nullable String value) { + this.value = value; + return this; + } + public ModelServingTag build() { + final var o = new ModelServingTag(); + o.key = key; + o.value = value; + return o; + } + } +} diff --git a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureManagedIdentity.java b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureManagedIdentity.java index 7d3c62fd..c9af0558 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureManagedIdentity.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureManagedIdentity.java @@ -20,7 +20,7 @@ public final class StorageCredentialAzureManagedIdentity { /** * @return The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * */ private @Nullable String managedIdentityId; @@ -39,7 +39,7 @@ public Optional credentialId() { /** * @return The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: * */ public Optional managedIdentityId() { diff --git a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureServicePrincipal.java b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureServicePrincipal.java index 03fedda0..fa5dcd6f 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureServicePrincipal.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialAzureServicePrincipal.java @@ -17,8 +17,6 @@ public final class StorageCredentialAzureServicePrincipal { /** * @return The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * */ private String clientSecret; /** @@ -38,8 +36,6 @@ public String applicationId() { /** * @return The client secret generated for the above app ID in AAD. **This field is redacted on output** * - * `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: - * */ public String clientSecret() { return this.clientSecret; diff --git a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialDatabricksGcpServiceAccount.java b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialDatabricksGcpServiceAccount.java index 473e9a76..1402c9cb 100644 --- a/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialDatabricksGcpServiceAccount.java +++ b/sdk/java/src/main/java/com/pulumi/databricks/outputs/StorageCredentialDatabricksGcpServiceAccount.java @@ -11,6 +11,7 @@ @CustomType public final class StorageCredentialDatabricksGcpServiceAccount { + private @Nullable String credentialId; /** * @return The email of the GCP service account created, to be granted access to relevant buckets. * @@ -18,6 +19,9 @@ public final class StorageCredentialDatabricksGcpServiceAccount { private @Nullable String email; private StorageCredentialDatabricksGcpServiceAccount() {} + public Optional credentialId() { + return Optional.ofNullable(this.credentialId); + } /** * @return The email of the GCP service account created, to be granted access to relevant buckets. * @@ -35,13 +39,20 @@ public static Builder builder(StorageCredentialDatabricksGcpServiceAccount defau } @CustomType.Builder public static final class Builder { + private @Nullable String credentialId; private @Nullable String email; public Builder() {} public Builder(StorageCredentialDatabricksGcpServiceAccount defaults) { Objects.requireNonNull(defaults); + this.credentialId = defaults.credentialId; this.email = defaults.email; } + @CustomType.Setter + public Builder credentialId(@Nullable String credentialId) { + this.credentialId = credentialId; + return this; + } @CustomType.Setter public Builder email(@Nullable String email) { this.email = email; @@ -49,6 +60,7 @@ public Builder email(@Nullable String email) { } public StorageCredentialDatabricksGcpServiceAccount build() { final var o = new StorageCredentialDatabricksGcpServiceAccount(); + o.credentialId = credentialId; o.email = email; return o; } diff --git a/sdk/nodejs/cluster.ts b/sdk/nodejs/cluster.ts index 5e51a46e..c859ad7d 100644 --- a/sdk/nodejs/cluster.ts +++ b/sdk/nodejs/cluster.ts @@ -56,6 +56,9 @@ export class Cluster extends pulumi.CustomResource { public readonly azureAttributes!: pulumi.Output; public readonly clusterId!: pulumi.Output; public readonly clusterLogConf!: pulumi.Output; + /** + * @deprecated cluster_mount_info block is deprecated due the Clusters API changes. + */ public readonly clusterMountInfos!: pulumi.Output; /** * Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -285,6 +288,9 @@ export interface ClusterState { azureAttributes?: pulumi.Input; clusterId?: pulumi.Input; clusterLogConf?: pulumi.Input; + /** + * @deprecated cluster_mount_info block is deprecated due the Clusters API changes. + */ clusterMountInfos?: pulumi.Input[]>; /** * Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. @@ -421,6 +427,9 @@ export interface ClusterArgs { azureAttributes?: pulumi.Input; clusterId?: pulumi.Input; clusterLogConf?: pulumi.Input; + /** + * @deprecated cluster_mount_info block is deprecated due the Clusters API changes. + */ clusterMountInfos?: pulumi.Input[]>; /** * Cluster name, which doesn’t have to be unique. If not specified at creation, the cluster name will be an empty string. diff --git a/sdk/nodejs/metastoreDataAccess.ts b/sdk/nodejs/metastoreDataAccess.ts index fe39bbca..f5a07b2f 100644 --- a/sdk/nodejs/metastoreDataAccess.ts +++ b/sdk/nodejs/metastoreDataAccess.ts @@ -48,20 +48,29 @@ export class MetastoreDataAccess extends pulumi.CustomResource { public readonly awsIamRole!: pulumi.Output; public readonly azureManagedIdentity!: pulumi.Output; public readonly azureServicePrincipal!: pulumi.Output; - public readonly configurationType!: pulumi.Output; - public readonly databricksGcpServiceAccount!: pulumi.Output; + public readonly comment!: pulumi.Output; + public readonly databricksGcpServiceAccount!: pulumi.Output; + /** + * Delete the data access configuration regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ + public readonly forceDestroy!: pulumi.Output; public readonly gcpServiceAccountKey!: pulumi.Output; public readonly isDefault!: pulumi.Output; /** * Unique identifier of the parent Metastore - * - * `awsIamRole` optional configuration block for credential details for AWS: */ public readonly metastoreId!: pulumi.Output; /** * Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. */ public readonly name!: pulumi.Output; + /** + * Username/groupname/sp applicationId of the data access configuration owner. + */ + public readonly owner!: pulumi.Output; + public readonly readOnly!: pulumi.Output; /** * Create a MetastoreDataAccess resource with the given unique name, arguments, and options. @@ -79,12 +88,15 @@ export class MetastoreDataAccess extends pulumi.CustomResource { resourceInputs["awsIamRole"] = state ? state.awsIamRole : undefined; resourceInputs["azureManagedIdentity"] = state ? state.azureManagedIdentity : undefined; resourceInputs["azureServicePrincipal"] = state ? state.azureServicePrincipal : undefined; - resourceInputs["configurationType"] = state ? state.configurationType : undefined; + resourceInputs["comment"] = state ? state.comment : undefined; resourceInputs["databricksGcpServiceAccount"] = state ? state.databricksGcpServiceAccount : undefined; + resourceInputs["forceDestroy"] = state ? state.forceDestroy : undefined; resourceInputs["gcpServiceAccountKey"] = state ? state.gcpServiceAccountKey : undefined; resourceInputs["isDefault"] = state ? state.isDefault : undefined; resourceInputs["metastoreId"] = state ? state.metastoreId : undefined; resourceInputs["name"] = state ? state.name : undefined; + resourceInputs["owner"] = state ? state.owner : undefined; + resourceInputs["readOnly"] = state ? state.readOnly : undefined; } else { const args = argsOrState as MetastoreDataAccessArgs | undefined; if ((!args || args.metastoreId === undefined) && !opts.urn) { @@ -93,12 +105,15 @@ export class MetastoreDataAccess extends pulumi.CustomResource { resourceInputs["awsIamRole"] = args ? args.awsIamRole : undefined; resourceInputs["azureManagedIdentity"] = args ? args.azureManagedIdentity : undefined; resourceInputs["azureServicePrincipal"] = args ? args.azureServicePrincipal : undefined; - resourceInputs["configurationType"] = args ? args.configurationType : undefined; + resourceInputs["comment"] = args ? args.comment : undefined; resourceInputs["databricksGcpServiceAccount"] = args ? args.databricksGcpServiceAccount : undefined; + resourceInputs["forceDestroy"] = args ? args.forceDestroy : undefined; resourceInputs["gcpServiceAccountKey"] = args ? args.gcpServiceAccountKey : undefined; resourceInputs["isDefault"] = args ? args.isDefault : undefined; resourceInputs["metastoreId"] = args ? args.metastoreId : undefined; resourceInputs["name"] = args ? args.name : undefined; + resourceInputs["owner"] = args ? args.owner : undefined; + resourceInputs["readOnly"] = args ? args.readOnly : undefined; } opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts); super(MetastoreDataAccess.__pulumiType, name, resourceInputs, opts); @@ -112,20 +127,29 @@ export interface MetastoreDataAccessState { awsIamRole?: pulumi.Input; azureManagedIdentity?: pulumi.Input; azureServicePrincipal?: pulumi.Input; - configurationType?: pulumi.Input; + comment?: pulumi.Input; databricksGcpServiceAccount?: pulumi.Input; + /** + * Delete the data access configuration regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ + forceDestroy?: pulumi.Input; gcpServiceAccountKey?: pulumi.Input; isDefault?: pulumi.Input; /** * Unique identifier of the parent Metastore - * - * `awsIamRole` optional configuration block for credential details for AWS: */ metastoreId?: pulumi.Input; /** * Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. */ name?: pulumi.Input; + /** + * Username/groupname/sp applicationId of the data access configuration owner. + */ + owner?: pulumi.Input; + readOnly?: pulumi.Input; } /** @@ -135,18 +159,27 @@ export interface MetastoreDataAccessArgs { awsIamRole?: pulumi.Input; azureManagedIdentity?: pulumi.Input; azureServicePrincipal?: pulumi.Input; - configurationType?: pulumi.Input; + comment?: pulumi.Input; databricksGcpServiceAccount?: pulumi.Input; + /** + * Delete the data access configuration regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ + forceDestroy?: pulumi.Input; gcpServiceAccountKey?: pulumi.Input; isDefault?: pulumi.Input; /** * Unique identifier of the parent Metastore - * - * `awsIamRole` optional configuration block for credential details for AWS: */ metastoreId: pulumi.Input; /** * Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. */ name?: pulumi.Input; + /** + * Username/groupname/sp applicationId of the data access configuration owner. + */ + owner?: pulumi.Input; + readOnly?: pulumi.Input; } diff --git a/sdk/nodejs/mlflowModel.ts b/sdk/nodejs/mlflowModel.ts index b34f4da0..a8589381 100644 --- a/sdk/nodejs/mlflowModel.ts +++ b/sdk/nodejs/mlflowModel.ts @@ -91,6 +91,7 @@ export class MlflowModel extends pulumi.CustomResource { * Name of MLflow model. Change of name triggers new resource. */ public readonly name!: pulumi.Output; + public /*out*/ readonly registeredModelId!: pulumi.Output; /** * Tags for the MLflow model. */ @@ -114,6 +115,7 @@ export class MlflowModel extends pulumi.CustomResource { resourceInputs["description"] = state ? state.description : undefined; resourceInputs["lastUpdatedTimestamp"] = state ? state.lastUpdatedTimestamp : undefined; resourceInputs["name"] = state ? state.name : undefined; + resourceInputs["registeredModelId"] = state ? state.registeredModelId : undefined; resourceInputs["tags"] = state ? state.tags : undefined; resourceInputs["userId"] = state ? state.userId : undefined; } else { @@ -124,6 +126,7 @@ export class MlflowModel extends pulumi.CustomResource { resourceInputs["name"] = args ? args.name : undefined; resourceInputs["tags"] = args ? args.tags : undefined; resourceInputs["userId"] = args ? args.userId : undefined; + resourceInputs["registeredModelId"] = undefined /*out*/; } opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts); super(MlflowModel.__pulumiType, name, resourceInputs, opts); @@ -144,6 +147,7 @@ export interface MlflowModelState { * Name of MLflow model. Change of name triggers new resource. */ name?: pulumi.Input; + registeredModelId?: pulumi.Input; /** * Tags for the MLflow model. */ diff --git a/sdk/nodejs/modelServing.ts b/sdk/nodejs/modelServing.ts index 0e21f044..fdcaf93d 100644 --- a/sdk/nodejs/modelServing.ts +++ b/sdk/nodejs/modelServing.ts @@ -109,6 +109,7 @@ export class ModelServing extends pulumi.CustomResource { * Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations. */ public /*out*/ readonly servingEndpointId!: pulumi.Output; + public readonly tags!: pulumi.Output; /** * Create a ModelServing resource with the given unique name, arguments, and options. @@ -126,6 +127,7 @@ export class ModelServing extends pulumi.CustomResource { resourceInputs["config"] = state ? state.config : undefined; resourceInputs["name"] = state ? state.name : undefined; resourceInputs["servingEndpointId"] = state ? state.servingEndpointId : undefined; + resourceInputs["tags"] = state ? state.tags : undefined; } else { const args = argsOrState as ModelServingArgs | undefined; if ((!args || args.config === undefined) && !opts.urn) { @@ -133,6 +135,7 @@ export class ModelServing extends pulumi.CustomResource { } resourceInputs["config"] = args ? args.config : undefined; resourceInputs["name"] = args ? args.name : undefined; + resourceInputs["tags"] = args ? args.tags : undefined; resourceInputs["servingEndpointId"] = undefined /*out*/; } opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts); @@ -156,6 +159,7 @@ export interface ModelServingState { * Unique identifier of the serving endpoint primarily used to set permissions and refer to this instance for other operations. */ servingEndpointId?: pulumi.Input; + tags?: pulumi.Input[]>; } /** @@ -170,4 +174,5 @@ export interface ModelServingArgs { * The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name. */ name?: pulumi.Input; + tags?: pulumi.Input[]>; } diff --git a/sdk/nodejs/storageCredential.ts b/sdk/nodejs/storageCredential.ts index 1b2e7189..b7041712 100644 --- a/sdk/nodejs/storageCredential.ts +++ b/sdk/nodejs/storageCredential.ts @@ -113,6 +113,11 @@ export class StorageCredential extends pulumi.CustomResource { public readonly azureServicePrincipal!: pulumi.Output; public readonly comment!: pulumi.Output; public readonly databricksGcpServiceAccount!: pulumi.Output; + /** + * Delete storage credential regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ public readonly forceDestroy!: pulumi.Output; public readonly gcpServiceAccountKey!: pulumi.Output; /** @@ -125,12 +130,12 @@ export class StorageCredential extends pulumi.CustomResource { public readonly name!: pulumi.Output; /** * Username/groupname/sp applicationId of the storage credential owner. - * - * `awsIamRole` optional configuration block for credential details for AWS: */ public readonly owner!: pulumi.Output; /** * Indicates whether the storage credential is only usable for read operations. + * + * `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): */ public readonly readOnly!: pulumi.Output; @@ -186,6 +191,11 @@ export interface StorageCredentialState { azureServicePrincipal?: pulumi.Input; comment?: pulumi.Input; databricksGcpServiceAccount?: pulumi.Input; + /** + * Delete storage credential regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ forceDestroy?: pulumi.Input; gcpServiceAccountKey?: pulumi.Input; /** @@ -198,12 +208,12 @@ export interface StorageCredentialState { name?: pulumi.Input; /** * Username/groupname/sp applicationId of the storage credential owner. - * - * `awsIamRole` optional configuration block for credential details for AWS: */ owner?: pulumi.Input; /** * Indicates whether the storage credential is only usable for read operations. + * + * `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): */ readOnly?: pulumi.Input; } @@ -217,6 +227,11 @@ export interface StorageCredentialArgs { azureServicePrincipal?: pulumi.Input; comment?: pulumi.Input; databricksGcpServiceAccount?: pulumi.Input; + /** + * Delete storage credential regardless of its dependencies. + * + * `awsIamRole` optional configuration block for credential details for AWS: + */ forceDestroy?: pulumi.Input; gcpServiceAccountKey?: pulumi.Input; /** @@ -229,12 +244,12 @@ export interface StorageCredentialArgs { name?: pulumi.Input; /** * Username/groupname/sp applicationId of the storage credential owner. - * - * `awsIamRole` optional configuration block for credential details for AWS: */ owner?: pulumi.Input; /** * Indicates whether the storage credential is only usable for read operations. + * + * `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure (Legacy): */ readOnly?: pulumi.Input; } diff --git a/sdk/nodejs/types/input.ts b/sdk/nodejs/types/input.ts index 2c59d021..2ca64569 100644 --- a/sdk/nodejs/types/input.ts +++ b/sdk/nodejs/types/input.ts @@ -4438,6 +4438,7 @@ export interface MetastoreDataAccessAzureServicePrincipal { } export interface MetastoreDataAccessDatabricksGcpServiceAccount { + credentialId?: pulumi.Input; /** * The email of the GCP service account created, to be granted access to relevant buckets. * @@ -4556,6 +4557,11 @@ export interface ModelServingConfigTrafficConfigRoute { trafficPercentage: pulumi.Input; } +export interface ModelServingTag { + key: pulumi.Input; + value?: pulumi.Input; +} + export interface MountAbfs { clientId: pulumi.Input; clientSecretKey: pulumi.Input; @@ -5249,7 +5255,7 @@ export interface StorageCredentialAzureManagedIdentity { /** * The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: + * `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: */ managedIdentityId?: pulumi.Input; } @@ -5261,8 +5267,6 @@ export interface StorageCredentialAzureServicePrincipal { applicationId: pulumi.Input; /** * The client secret generated for the above app ID in AAD. **This field is redacted on output** - * - * `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: */ clientSecret: pulumi.Input; /** @@ -5272,6 +5276,7 @@ export interface StorageCredentialAzureServicePrincipal { } export interface StorageCredentialDatabricksGcpServiceAccount { + credentialId?: pulumi.Input; /** * The email of the GCP service account created, to be granted access to relevant buckets. */ diff --git a/sdk/nodejs/types/output.ts b/sdk/nodejs/types/output.ts index d62cf3d7..0924a15c 100644 --- a/sdk/nodejs/types/output.ts +++ b/sdk/nodejs/types/output.ts @@ -3128,12 +3128,13 @@ export interface MetastoreDataAccessAzureServicePrincipal { } export interface MetastoreDataAccessDatabricksGcpServiceAccount { + credentialId?: string; /** * The email of the GCP service account created, to be granted access to relevant buckets. * * `azureServicePrincipal` optional configuration block for credential details for Azure (Legacy): */ - email: string; + email?: string; } export interface MetastoreDataAccessGcpServiceAccountKey { @@ -3246,6 +3247,11 @@ export interface ModelServingConfigTrafficConfigRoute { trafficPercentage: number; } +export interface ModelServingTag { + key: string; + value?: string; +} + export interface MountAbfs { clientId: string; clientSecretKey: string; @@ -3939,7 +3945,7 @@ export interface StorageCredentialAzureManagedIdentity { /** * The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. * - * `azureServicePrincipal` optional configuration block to use service principal as credential details for Azure: + * `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: */ managedIdentityId?: string; } @@ -3951,8 +3957,6 @@ export interface StorageCredentialAzureServicePrincipal { applicationId: string; /** * The client secret generated for the above app ID in AAD. **This field is redacted on output** - * - * `databricksGcpServiceAccount` optional configuration block for creating a Databricks-managed GCP Service Account: */ clientSecret: string; /** @@ -3962,10 +3966,11 @@ export interface StorageCredentialAzureServicePrincipal { } export interface StorageCredentialDatabricksGcpServiceAccount { + credentialId?: string; /** * The email of the GCP service account created, to be granted access to relevant buckets. */ - email: string; + email?: string; } export interface StorageCredentialGcpServiceAccountKey { diff --git a/sdk/python/pulumi_databricks/_inputs.py b/sdk/python/pulumi_databricks/_inputs.py index 0f200b2a..f19db595 100644 --- a/sdk/python/pulumi_databricks/_inputs.py +++ b/sdk/python/pulumi_databricks/_inputs.py @@ -190,6 +190,7 @@ 'ModelServingConfigServedModelArgs', 'ModelServingConfigTrafficConfigArgs', 'ModelServingConfigTrafficConfigRouteArgs', + 'ModelServingTagArgs', 'MountAbfsArgs', 'MountAdlArgs', 'MountGsArgs', @@ -9319,15 +9320,27 @@ def directory_id(self, value: pulumi.Input[str]): @pulumi.input_type class MetastoreDataAccessDatabricksGcpServiceAccountArgs: def __init__(__self__, *, + credential_id: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] email: The email of the GCP service account created, to be granted access to relevant buckets. `azure_service_principal` optional configuration block for credential details for Azure (Legacy): """ + if credential_id is not None: + pulumi.set(__self__, "credential_id", credential_id) if email is not None: pulumi.set(__self__, "email", email) + @property + @pulumi.getter(name="credentialId") + def credential_id(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "credential_id") + + @credential_id.setter + def credential_id(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "credential_id", value) + @property @pulumi.getter def email(self) -> Optional[pulumi.Input[str]]: @@ -9757,6 +9770,34 @@ def traffic_percentage(self, value: pulumi.Input[int]): pulumi.set(self, "traffic_percentage", value) +@pulumi.input_type +class ModelServingTagArgs: + def __init__(__self__, *, + key: pulumi.Input[str], + value: Optional[pulumi.Input[str]] = None): + pulumi.set(__self__, "key", key) + if value is not None: + pulumi.set(__self__, "value", value) + + @property + @pulumi.getter + def key(self) -> pulumi.Input[str]: + return pulumi.get(self, "key") + + @key.setter + def key(self, value: pulumi.Input[str]): + pulumi.set(self, "key", value) + + @property + @pulumi.getter + def value(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "value") + + @value.setter + def value(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "value", value) + + @pulumi.input_type class MountAbfsArgs: def __init__(__self__, *, @@ -13458,7 +13499,7 @@ def __init__(__self__, *, :param pulumi.Input[str] access_connector_id: The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`. :param pulumi.Input[str] managed_identity_id: The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. - `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ pulumi.set(__self__, "access_connector_id", access_connector_id) if credential_id is not None: @@ -13493,7 +13534,7 @@ def managed_identity_id(self) -> Optional[pulumi.Input[str]]: """ The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. - `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ return pulumi.get(self, "managed_identity_id") @@ -13511,8 +13552,6 @@ def __init__(__self__, *, """ :param pulumi.Input[str] application_id: The application ID of the application registration within the referenced AAD tenant :param pulumi.Input[str] client_secret: The client secret generated for the above app ID in AAD. **This field is redacted on output** - - `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: :param pulumi.Input[str] directory_id: The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application """ pulumi.set(__self__, "application_id", application_id) @@ -13536,8 +13575,6 @@ def application_id(self, value: pulumi.Input[str]): def client_secret(self) -> pulumi.Input[str]: """ The client secret generated for the above app ID in AAD. **This field is redacted on output** - - `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ return pulumi.get(self, "client_secret") @@ -13561,13 +13598,25 @@ def directory_id(self, value: pulumi.Input[str]): @pulumi.input_type class StorageCredentialDatabricksGcpServiceAccountArgs: def __init__(__self__, *, + credential_id: Optional[pulumi.Input[str]] = None, email: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] email: The email of the GCP service account created, to be granted access to relevant buckets. """ + if credential_id is not None: + pulumi.set(__self__, "credential_id", credential_id) if email is not None: pulumi.set(__self__, "email", email) + @property + @pulumi.getter(name="credentialId") + def credential_id(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "credential_id") + + @credential_id.setter + def credential_id(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "credential_id", value) + @property @pulumi.getter def email(self) -> Optional[pulumi.Input[str]]: diff --git a/sdk/python/pulumi_databricks/cluster.py b/sdk/python/pulumi_databricks/cluster.py index f2d5de6a..56598ab1 100644 --- a/sdk/python/pulumi_databricks/cluster.py +++ b/sdk/python/pulumi_databricks/cluster.py @@ -110,6 +110,9 @@ def __init__(__self__, *, pulumi.set(__self__, "cluster_id", cluster_id) if cluster_log_conf is not None: pulumi.set(__self__, "cluster_log_conf", cluster_log_conf) + if cluster_mount_infos is not None: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") if cluster_mount_infos is not None: pulumi.set(__self__, "cluster_mount_infos", cluster_mount_infos) if cluster_name is not None: @@ -243,6 +246,9 @@ def cluster_log_conf(self, value: Optional[pulumi.Input['ClusterClusterLogConfAr @property @pulumi.getter(name="clusterMountInfos") def cluster_mount_infos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterClusterMountInfoArgs']]]]: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") + return pulumi.get(self, "cluster_mount_infos") @cluster_mount_infos.setter @@ -633,6 +639,9 @@ def __init__(__self__, *, pulumi.set(__self__, "cluster_id", cluster_id) if cluster_log_conf is not None: pulumi.set(__self__, "cluster_log_conf", cluster_log_conf) + if cluster_mount_infos is not None: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") if cluster_mount_infos is not None: pulumi.set(__self__, "cluster_mount_infos", cluster_mount_infos) if cluster_name is not None: @@ -762,6 +771,9 @@ def cluster_log_conf(self, value: Optional[pulumi.Input['ClusterClusterLogConfAr @property @pulumi.getter(name="clusterMountInfos") def cluster_mount_infos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterClusterMountInfoArgs']]]]: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") + return pulumi.get(self, "cluster_mount_infos") @cluster_mount_infos.setter @@ -1268,6 +1280,9 @@ def _internal_init(__self__, __props__.__dict__["azure_attributes"] = azure_attributes __props__.__dict__["cluster_id"] = cluster_id __props__.__dict__["cluster_log_conf"] = cluster_log_conf + if cluster_mount_infos is not None and not opts.urn: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") __props__.__dict__["cluster_mount_infos"] = cluster_mount_infos __props__.__dict__["cluster_name"] = cluster_name __props__.__dict__["custom_tags"] = custom_tags @@ -1482,6 +1497,9 @@ def cluster_log_conf(self) -> pulumi.Output[Optional['outputs.ClusterClusterLogC @property @pulumi.getter(name="clusterMountInfos") def cluster_mount_infos(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterClusterMountInfo']]]: + warnings.warn("""cluster_mount_info block is deprecated due the Clusters API changes.""", DeprecationWarning) + pulumi.log.warn("""cluster_mount_infos is deprecated: cluster_mount_info block is deprecated due the Clusters API changes.""") + return pulumi.get(self, "cluster_mount_infos") @property diff --git a/sdk/python/pulumi_databricks/metastore_data_access.py b/sdk/python/pulumi_databricks/metastore_data_access.py index ed249c45..fbed4572 100644 --- a/sdk/python/pulumi_databricks/metastore_data_access.py +++ b/sdk/python/pulumi_databricks/metastore_data_access.py @@ -20,17 +20,22 @@ def __init__(__self__, *, aws_iam_role: Optional[pulumi.Input['MetastoreDataAccessAwsIamRoleArgs']] = None, azure_managed_identity: Optional[pulumi.Input['MetastoreDataAccessAzureManagedIdentityArgs']] = None, azure_service_principal: Optional[pulumi.Input['MetastoreDataAccessAzureServicePrincipalArgs']] = None, - configuration_type: Optional[pulumi.Input[str]] = None, + comment: Optional[pulumi.Input[str]] = None, databricks_gcp_service_account: Optional[pulumi.Input['MetastoreDataAccessDatabricksGcpServiceAccountArgs']] = None, + force_destroy: Optional[pulumi.Input[bool]] = None, gcp_service_account_key: Optional[pulumi.Input['MetastoreDataAccessGcpServiceAccountKeyArgs']] = None, is_default: Optional[pulumi.Input[bool]] = None, - name: Optional[pulumi.Input[str]] = None): + name: Optional[pulumi.Input[str]] = None, + owner: Optional[pulumi.Input[str]] = None, + read_only: Optional[pulumi.Input[bool]] = None): """ The set of arguments for constructing a MetastoreDataAccess resource. :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore + :param pulumi.Input[bool] force_destroy: Delete the data access configuration regardless of its dependencies. `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[str] name: Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. + :param pulumi.Input[str] owner: Username/groupname/sp application_id of the data access configuration owner. """ pulumi.set(__self__, "metastore_id", metastore_id) if aws_iam_role is not None: @@ -39,24 +44,28 @@ def __init__(__self__, *, pulumi.set(__self__, "azure_managed_identity", azure_managed_identity) if azure_service_principal is not None: pulumi.set(__self__, "azure_service_principal", azure_service_principal) - if configuration_type is not None: - pulumi.set(__self__, "configuration_type", configuration_type) + if comment is not None: + pulumi.set(__self__, "comment", comment) if databricks_gcp_service_account is not None: pulumi.set(__self__, "databricks_gcp_service_account", databricks_gcp_service_account) + if force_destroy is not None: + pulumi.set(__self__, "force_destroy", force_destroy) if gcp_service_account_key is not None: pulumi.set(__self__, "gcp_service_account_key", gcp_service_account_key) if is_default is not None: pulumi.set(__self__, "is_default", is_default) if name is not None: pulumi.set(__self__, "name", name) + if owner is not None: + pulumi.set(__self__, "owner", owner) + if read_only is not None: + pulumi.set(__self__, "read_only", read_only) @property @pulumi.getter(name="metastoreId") def metastore_id(self) -> pulumi.Input[str]: """ Unique identifier of the parent Metastore - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "metastore_id") @@ -92,13 +101,13 @@ def azure_service_principal(self, value: Optional[pulumi.Input['MetastoreDataAcc pulumi.set(self, "azure_service_principal", value) @property - @pulumi.getter(name="configurationType") - def configuration_type(self) -> Optional[pulumi.Input[str]]: - return pulumi.get(self, "configuration_type") + @pulumi.getter + def comment(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "comment") - @configuration_type.setter - def configuration_type(self, value: Optional[pulumi.Input[str]]): - pulumi.set(self, "configuration_type", value) + @comment.setter + def comment(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "comment", value) @property @pulumi.getter(name="databricksGcpServiceAccount") @@ -109,6 +118,20 @@ def databricks_gcp_service_account(self) -> Optional[pulumi.Input['MetastoreData def databricks_gcp_service_account(self, value: Optional[pulumi.Input['MetastoreDataAccessDatabricksGcpServiceAccountArgs']]): pulumi.set(self, "databricks_gcp_service_account", value) + @property + @pulumi.getter(name="forceDestroy") + def force_destroy(self) -> Optional[pulumi.Input[bool]]: + """ + Delete the data access configuration regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ + return pulumi.get(self, "force_destroy") + + @force_destroy.setter + def force_destroy(self, value: Optional[pulumi.Input[bool]]): + pulumi.set(self, "force_destroy", value) + @property @pulumi.getter(name="gcpServiceAccountKey") def gcp_service_account_key(self) -> Optional[pulumi.Input['MetastoreDataAccessGcpServiceAccountKeyArgs']]: @@ -139,6 +162,27 @@ def name(self) -> Optional[pulumi.Input[str]]: def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) + @property + @pulumi.getter + def owner(self) -> Optional[pulumi.Input[str]]: + """ + Username/groupname/sp application_id of the data access configuration owner. + """ + return pulumi.get(self, "owner") + + @owner.setter + def owner(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "owner", value) + + @property + @pulumi.getter(name="readOnly") + def read_only(self) -> Optional[pulumi.Input[bool]]: + return pulumi.get(self, "read_only") + + @read_only.setter + def read_only(self, value: Optional[pulumi.Input[bool]]): + pulumi.set(self, "read_only", value) + @pulumi.input_type class _MetastoreDataAccessState: @@ -146,18 +190,23 @@ def __init__(__self__, *, aws_iam_role: Optional[pulumi.Input['MetastoreDataAccessAwsIamRoleArgs']] = None, azure_managed_identity: Optional[pulumi.Input['MetastoreDataAccessAzureManagedIdentityArgs']] = None, azure_service_principal: Optional[pulumi.Input['MetastoreDataAccessAzureServicePrincipalArgs']] = None, - configuration_type: Optional[pulumi.Input[str]] = None, + comment: Optional[pulumi.Input[str]] = None, databricks_gcp_service_account: Optional[pulumi.Input['MetastoreDataAccessDatabricksGcpServiceAccountArgs']] = None, + force_destroy: Optional[pulumi.Input[bool]] = None, gcp_service_account_key: Optional[pulumi.Input['MetastoreDataAccessGcpServiceAccountKeyArgs']] = None, is_default: Optional[pulumi.Input[bool]] = None, metastore_id: Optional[pulumi.Input[str]] = None, - name: Optional[pulumi.Input[str]] = None): + name: Optional[pulumi.Input[str]] = None, + owner: Optional[pulumi.Input[str]] = None, + read_only: Optional[pulumi.Input[bool]] = None): """ Input properties used for looking up and filtering MetastoreDataAccess resources. - :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore + :param pulumi.Input[bool] force_destroy: Delete the data access configuration regardless of its dependencies. `aws_iam_role` optional configuration block for credential details for AWS: + :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. + :param pulumi.Input[str] owner: Username/groupname/sp application_id of the data access configuration owner. """ if aws_iam_role is not None: pulumi.set(__self__, "aws_iam_role", aws_iam_role) @@ -165,10 +214,12 @@ def __init__(__self__, *, pulumi.set(__self__, "azure_managed_identity", azure_managed_identity) if azure_service_principal is not None: pulumi.set(__self__, "azure_service_principal", azure_service_principal) - if configuration_type is not None: - pulumi.set(__self__, "configuration_type", configuration_type) + if comment is not None: + pulumi.set(__self__, "comment", comment) if databricks_gcp_service_account is not None: pulumi.set(__self__, "databricks_gcp_service_account", databricks_gcp_service_account) + if force_destroy is not None: + pulumi.set(__self__, "force_destroy", force_destroy) if gcp_service_account_key is not None: pulumi.set(__self__, "gcp_service_account_key", gcp_service_account_key) if is_default is not None: @@ -177,6 +228,10 @@ def __init__(__self__, *, pulumi.set(__self__, "metastore_id", metastore_id) if name is not None: pulumi.set(__self__, "name", name) + if owner is not None: + pulumi.set(__self__, "owner", owner) + if read_only is not None: + pulumi.set(__self__, "read_only", read_only) @property @pulumi.getter(name="awsIamRole") @@ -206,13 +261,13 @@ def azure_service_principal(self, value: Optional[pulumi.Input['MetastoreDataAcc pulumi.set(self, "azure_service_principal", value) @property - @pulumi.getter(name="configurationType") - def configuration_type(self) -> Optional[pulumi.Input[str]]: - return pulumi.get(self, "configuration_type") + @pulumi.getter + def comment(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "comment") - @configuration_type.setter - def configuration_type(self, value: Optional[pulumi.Input[str]]): - pulumi.set(self, "configuration_type", value) + @comment.setter + def comment(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "comment", value) @property @pulumi.getter(name="databricksGcpServiceAccount") @@ -223,6 +278,20 @@ def databricks_gcp_service_account(self) -> Optional[pulumi.Input['MetastoreData def databricks_gcp_service_account(self, value: Optional[pulumi.Input['MetastoreDataAccessDatabricksGcpServiceAccountArgs']]): pulumi.set(self, "databricks_gcp_service_account", value) + @property + @pulumi.getter(name="forceDestroy") + def force_destroy(self) -> Optional[pulumi.Input[bool]]: + """ + Delete the data access configuration regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ + return pulumi.get(self, "force_destroy") + + @force_destroy.setter + def force_destroy(self, value: Optional[pulumi.Input[bool]]): + pulumi.set(self, "force_destroy", value) + @property @pulumi.getter(name="gcpServiceAccountKey") def gcp_service_account_key(self) -> Optional[pulumi.Input['MetastoreDataAccessGcpServiceAccountKeyArgs']]: @@ -246,8 +315,6 @@ def is_default(self, value: Optional[pulumi.Input[bool]]): def metastore_id(self) -> Optional[pulumi.Input[str]]: """ Unique identifier of the parent Metastore - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "metastore_id") @@ -267,6 +334,27 @@ def name(self) -> Optional[pulumi.Input[str]]: def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) + @property + @pulumi.getter + def owner(self) -> Optional[pulumi.Input[str]]: + """ + Username/groupname/sp application_id of the data access configuration owner. + """ + return pulumi.get(self, "owner") + + @owner.setter + def owner(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "owner", value) + + @property + @pulumi.getter(name="readOnly") + def read_only(self) -> Optional[pulumi.Input[bool]]: + return pulumi.get(self, "read_only") + + @read_only.setter + def read_only(self, value: Optional[pulumi.Input[bool]]): + pulumi.set(self, "read_only", value) + class MetastoreDataAccess(pulumi.CustomResource): @overload @@ -276,12 +364,15 @@ def __init__(__self__, aws_iam_role: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAwsIamRoleArgs']]] = None, azure_managed_identity: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureManagedIdentityArgs']]] = None, azure_service_principal: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureServicePrincipalArgs']]] = None, - configuration_type: Optional[pulumi.Input[str]] = None, + comment: Optional[pulumi.Input[str]] = None, databricks_gcp_service_account: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessDatabricksGcpServiceAccountArgs']]] = None, + force_destroy: Optional[pulumi.Input[bool]] = None, gcp_service_account_key: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessGcpServiceAccountKeyArgs']]] = None, is_default: Optional[pulumi.Input[bool]] = None, metastore_id: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, + owner: Optional[pulumi.Input[str]] = None, + read_only: Optional[pulumi.Input[bool]] = None, __props__=None): """ Each Metastore requires an IAM role that will be assumed by Unity Catalog to access data. `MetastoreDataAccess` defines this @@ -296,10 +387,12 @@ def __init__(__self__, :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. - :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore + :param pulumi.Input[bool] force_destroy: Delete the data access configuration regardless of its dependencies. `aws_iam_role` optional configuration block for credential details for AWS: + :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. + :param pulumi.Input[str] owner: Username/groupname/sp application_id of the data access configuration owner. """ ... @overload @@ -336,12 +429,15 @@ def _internal_init(__self__, aws_iam_role: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAwsIamRoleArgs']]] = None, azure_managed_identity: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureManagedIdentityArgs']]] = None, azure_service_principal: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureServicePrincipalArgs']]] = None, - configuration_type: Optional[pulumi.Input[str]] = None, + comment: Optional[pulumi.Input[str]] = None, databricks_gcp_service_account: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessDatabricksGcpServiceAccountArgs']]] = None, + force_destroy: Optional[pulumi.Input[bool]] = None, gcp_service_account_key: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessGcpServiceAccountKeyArgs']]] = None, is_default: Optional[pulumi.Input[bool]] = None, metastore_id: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, + owner: Optional[pulumi.Input[str]] = None, + read_only: Optional[pulumi.Input[bool]] = None, __props__=None): opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts) if not isinstance(opts, pulumi.ResourceOptions): @@ -354,14 +450,17 @@ def _internal_init(__self__, __props__.__dict__["aws_iam_role"] = aws_iam_role __props__.__dict__["azure_managed_identity"] = azure_managed_identity __props__.__dict__["azure_service_principal"] = azure_service_principal - __props__.__dict__["configuration_type"] = configuration_type + __props__.__dict__["comment"] = comment __props__.__dict__["databricks_gcp_service_account"] = databricks_gcp_service_account + __props__.__dict__["force_destroy"] = force_destroy __props__.__dict__["gcp_service_account_key"] = gcp_service_account_key __props__.__dict__["is_default"] = is_default if metastore_id is None and not opts.urn: raise TypeError("Missing required property 'metastore_id'") __props__.__dict__["metastore_id"] = metastore_id __props__.__dict__["name"] = name + __props__.__dict__["owner"] = owner + __props__.__dict__["read_only"] = read_only super(MetastoreDataAccess, __self__).__init__( 'databricks:index/metastoreDataAccess:MetastoreDataAccess', resource_name, @@ -375,12 +474,15 @@ def get(resource_name: str, aws_iam_role: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAwsIamRoleArgs']]] = None, azure_managed_identity: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureManagedIdentityArgs']]] = None, azure_service_principal: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessAzureServicePrincipalArgs']]] = None, - configuration_type: Optional[pulumi.Input[str]] = None, + comment: Optional[pulumi.Input[str]] = None, databricks_gcp_service_account: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessDatabricksGcpServiceAccountArgs']]] = None, + force_destroy: Optional[pulumi.Input[bool]] = None, gcp_service_account_key: Optional[pulumi.Input[pulumi.InputType['MetastoreDataAccessGcpServiceAccountKeyArgs']]] = None, is_default: Optional[pulumi.Input[bool]] = None, metastore_id: Optional[pulumi.Input[str]] = None, - name: Optional[pulumi.Input[str]] = None) -> 'MetastoreDataAccess': + name: Optional[pulumi.Input[str]] = None, + owner: Optional[pulumi.Input[str]] = None, + read_only: Optional[pulumi.Input[bool]] = None) -> 'MetastoreDataAccess': """ Get an existing MetastoreDataAccess resource's state with the given name, id, and optional extra properties used to qualify the lookup. @@ -388,10 +490,12 @@ def get(resource_name: str, :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. - :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore + :param pulumi.Input[bool] force_destroy: Delete the data access configuration regardless of its dependencies. `aws_iam_role` optional configuration block for credential details for AWS: + :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Data Access Configuration, which must be unique within the databricks_metastore. Change forces creation of a new resource. + :param pulumi.Input[str] owner: Username/groupname/sp application_id of the data access configuration owner. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) @@ -400,12 +504,15 @@ def get(resource_name: str, __props__.__dict__["aws_iam_role"] = aws_iam_role __props__.__dict__["azure_managed_identity"] = azure_managed_identity __props__.__dict__["azure_service_principal"] = azure_service_principal - __props__.__dict__["configuration_type"] = configuration_type + __props__.__dict__["comment"] = comment __props__.__dict__["databricks_gcp_service_account"] = databricks_gcp_service_account + __props__.__dict__["force_destroy"] = force_destroy __props__.__dict__["gcp_service_account_key"] = gcp_service_account_key __props__.__dict__["is_default"] = is_default __props__.__dict__["metastore_id"] = metastore_id __props__.__dict__["name"] = name + __props__.__dict__["owner"] = owner + __props__.__dict__["read_only"] = read_only return MetastoreDataAccess(resource_name, opts=opts, __props__=__props__) @property @@ -424,15 +531,25 @@ def azure_service_principal(self) -> pulumi.Output[Optional['outputs.MetastoreDa return pulumi.get(self, "azure_service_principal") @property - @pulumi.getter(name="configurationType") - def configuration_type(self) -> pulumi.Output[str]: - return pulumi.get(self, "configuration_type") + @pulumi.getter + def comment(self) -> pulumi.Output[Optional[str]]: + return pulumi.get(self, "comment") @property @pulumi.getter(name="databricksGcpServiceAccount") - def databricks_gcp_service_account(self) -> pulumi.Output[Optional['outputs.MetastoreDataAccessDatabricksGcpServiceAccount']]: + def databricks_gcp_service_account(self) -> pulumi.Output['outputs.MetastoreDataAccessDatabricksGcpServiceAccount']: return pulumi.get(self, "databricks_gcp_service_account") + @property + @pulumi.getter(name="forceDestroy") + def force_destroy(self) -> pulumi.Output[Optional[bool]]: + """ + Delete the data access configuration regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ + return pulumi.get(self, "force_destroy") + @property @pulumi.getter(name="gcpServiceAccountKey") def gcp_service_account_key(self) -> pulumi.Output[Optional['outputs.MetastoreDataAccessGcpServiceAccountKey']]: @@ -448,8 +565,6 @@ def is_default(self) -> pulumi.Output[Optional[bool]]: def metastore_id(self) -> pulumi.Output[str]: """ Unique identifier of the parent Metastore - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "metastore_id") @@ -461,3 +576,16 @@ def name(self) -> pulumi.Output[str]: """ return pulumi.get(self, "name") + @property + @pulumi.getter + def owner(self) -> pulumi.Output[str]: + """ + Username/groupname/sp application_id of the data access configuration owner. + """ + return pulumi.get(self, "owner") + + @property + @pulumi.getter(name="readOnly") + def read_only(self) -> pulumi.Output[Optional[bool]]: + return pulumi.get(self, "read_only") + diff --git a/sdk/python/pulumi_databricks/mlflow_model.py b/sdk/python/pulumi_databricks/mlflow_model.py index f27c4cb5..390983aa 100644 --- a/sdk/python/pulumi_databricks/mlflow_model.py +++ b/sdk/python/pulumi_databricks/mlflow_model.py @@ -112,6 +112,7 @@ def __init__(__self__, *, description: Optional[pulumi.Input[str]] = None, last_updated_timestamp: Optional[pulumi.Input[int]] = None, name: Optional[pulumi.Input[str]] = None, + registered_model_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Sequence[pulumi.Input['MlflowModelTagArgs']]]] = None, user_id: Optional[pulumi.Input[str]] = None): """ @@ -128,6 +129,8 @@ def __init__(__self__, *, pulumi.set(__self__, "last_updated_timestamp", last_updated_timestamp) if name is not None: pulumi.set(__self__, "name", name) + if registered_model_id is not None: + pulumi.set(__self__, "registered_model_id", registered_model_id) if tags is not None: pulumi.set(__self__, "tags", tags) if user_id is not None: @@ -175,6 +178,15 @@ def name(self) -> Optional[pulumi.Input[str]]: def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) + @property + @pulumi.getter(name="registeredModelId") + def registered_model_id(self) -> Optional[pulumi.Input[str]]: + return pulumi.get(self, "registered_model_id") + + @registered_model_id.setter + def registered_model_id(self, value: Optional[pulumi.Input[str]]): + pulumi.set(self, "registered_model_id", value) + @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MlflowModelTagArgs']]]]: @@ -349,6 +361,7 @@ def _internal_init(__self__, __props__.__dict__["name"] = name __props__.__dict__["tags"] = tags __props__.__dict__["user_id"] = user_id + __props__.__dict__["registered_model_id"] = None super(MlflowModel, __self__).__init__( 'databricks:index/mlflowModel:MlflowModel', resource_name, @@ -363,6 +376,7 @@ def get(resource_name: str, description: Optional[pulumi.Input[str]] = None, last_updated_timestamp: Optional[pulumi.Input[int]] = None, name: Optional[pulumi.Input[str]] = None, + registered_model_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MlflowModelTagArgs']]]]] = None, user_id: Optional[pulumi.Input[str]] = None) -> 'MlflowModel': """ @@ -384,6 +398,7 @@ def get(resource_name: str, __props__.__dict__["description"] = description __props__.__dict__["last_updated_timestamp"] = last_updated_timestamp __props__.__dict__["name"] = name + __props__.__dict__["registered_model_id"] = registered_model_id __props__.__dict__["tags"] = tags __props__.__dict__["user_id"] = user_id return MlflowModel(resource_name, opts=opts, __props__=__props__) @@ -414,6 +429,11 @@ def name(self) -> pulumi.Output[str]: """ return pulumi.get(self, "name") + @property + @pulumi.getter(name="registeredModelId") + def registered_model_id(self) -> pulumi.Output[str]: + return pulumi.get(self, "registered_model_id") + @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Sequence['outputs.MlflowModelTag']]]: diff --git a/sdk/python/pulumi_databricks/model_serving.py b/sdk/python/pulumi_databricks/model_serving.py index 7fb689f1..7f72f217 100644 --- a/sdk/python/pulumi_databricks/model_serving.py +++ b/sdk/python/pulumi_databricks/model_serving.py @@ -17,7 +17,8 @@ class ModelServingArgs: def __init__(__self__, *, config: pulumi.Input['ModelServingConfigArgs'], - name: Optional[pulumi.Input[str]] = None): + name: Optional[pulumi.Input[str]] = None, + tags: Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]] = None): """ The set of arguments for constructing a ModelServing resource. :param pulumi.Input['ModelServingConfigArgs'] config: The model serving endpoint configuration. @@ -26,6 +27,8 @@ def __init__(__self__, *, pulumi.set(__self__, "config", config) if name is not None: pulumi.set(__self__, "name", name) + if tags is not None: + pulumi.set(__self__, "tags", tags) @property @pulumi.getter @@ -51,13 +54,23 @@ def name(self) -> Optional[pulumi.Input[str]]: def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) + @property + @pulumi.getter + def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]]: + return pulumi.get(self, "tags") + + @tags.setter + def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]]): + pulumi.set(self, "tags", value) + @pulumi.input_type class _ModelServingState: def __init__(__self__, *, config: Optional[pulumi.Input['ModelServingConfigArgs']] = None, name: Optional[pulumi.Input[str]] = None, - serving_endpoint_id: Optional[pulumi.Input[str]] = None): + serving_endpoint_id: Optional[pulumi.Input[str]] = None, + tags: Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]] = None): """ Input properties used for looking up and filtering ModelServing resources. :param pulumi.Input['ModelServingConfigArgs'] config: The model serving endpoint configuration. @@ -70,6 +83,8 @@ def __init__(__self__, *, pulumi.set(__self__, "name", name) if serving_endpoint_id is not None: pulumi.set(__self__, "serving_endpoint_id", serving_endpoint_id) + if tags is not None: + pulumi.set(__self__, "tags", tags) @property @pulumi.getter @@ -107,6 +122,15 @@ def serving_endpoint_id(self) -> Optional[pulumi.Input[str]]: def serving_endpoint_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "serving_endpoint_id", value) + @property + @pulumi.getter + def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]]: + return pulumi.get(self, "tags") + + @tags.setter + def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ModelServingTagArgs']]]]): + pulumi.set(self, "tags", value) + class ModelServing(pulumi.CustomResource): @overload @@ -115,6 +139,7 @@ def __init__(__self__, opts: Optional[pulumi.ResourceOptions] = None, config: Optional[pulumi.Input[pulumi.InputType['ModelServingConfigArgs']]] = None, name: Optional[pulumi.Input[str]] = None, + tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelServingTagArgs']]]]] = None, __props__=None): """ This resource allows you to manage [Model Serving](https://docs.databricks.com/machine-learning/model-serving/index.html) endpoints in Databricks. @@ -270,6 +295,7 @@ def _internal_init(__self__, opts: Optional[pulumi.ResourceOptions] = None, config: Optional[pulumi.Input[pulumi.InputType['ModelServingConfigArgs']]] = None, name: Optional[pulumi.Input[str]] = None, + tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelServingTagArgs']]]]] = None, __props__=None): opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts) if not isinstance(opts, pulumi.ResourceOptions): @@ -283,6 +309,7 @@ def _internal_init(__self__, raise TypeError("Missing required property 'config'") __props__.__dict__["config"] = config __props__.__dict__["name"] = name + __props__.__dict__["tags"] = tags __props__.__dict__["serving_endpoint_id"] = None super(ModelServing, __self__).__init__( 'databricks:index/modelServing:ModelServing', @@ -296,7 +323,8 @@ def get(resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config: Optional[pulumi.Input[pulumi.InputType['ModelServingConfigArgs']]] = None, name: Optional[pulumi.Input[str]] = None, - serving_endpoint_id: Optional[pulumi.Input[str]] = None) -> 'ModelServing': + serving_endpoint_id: Optional[pulumi.Input[str]] = None, + tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelServingTagArgs']]]]] = None) -> 'ModelServing': """ Get an existing ModelServing resource's state with the given name, id, and optional extra properties used to qualify the lookup. @@ -315,6 +343,7 @@ def get(resource_name: str, __props__.__dict__["config"] = config __props__.__dict__["name"] = name __props__.__dict__["serving_endpoint_id"] = serving_endpoint_id + __props__.__dict__["tags"] = tags return ModelServing(resource_name, opts=opts, __props__=__props__) @property @@ -341,3 +370,8 @@ def serving_endpoint_id(self) -> pulumi.Output[str]: """ return pulumi.get(self, "serving_endpoint_id") + @property + @pulumi.getter + def tags(self) -> pulumi.Output[Optional[Sequence['outputs.ModelServingTag']]]: + return pulumi.get(self, "tags") + diff --git a/sdk/python/pulumi_databricks/outputs.py b/sdk/python/pulumi_databricks/outputs.py index 70b6af68..3e15c949 100644 --- a/sdk/python/pulumi_databricks/outputs.py +++ b/sdk/python/pulumi_databricks/outputs.py @@ -191,6 +191,7 @@ 'ModelServingConfigServedModel', 'ModelServingConfigTrafficConfig', 'ModelServingConfigTrafficConfigRoute', + 'ModelServingTag', 'MountAbfs', 'MountAdl', 'MountGs', @@ -9026,16 +9027,41 @@ def directory_id(self) -> str: @pulumi.output_type class MetastoreDataAccessDatabricksGcpServiceAccount(dict): + @staticmethod + def __key_warning(key: str): + suggest = None + if key == "credentialId": + suggest = "credential_id" + + if suggest: + pulumi.log.warn(f"Key '{key}' not found in MetastoreDataAccessDatabricksGcpServiceAccount. Access the value via the '{suggest}' property getter instead.") + + def __getitem__(self, key: str) -> Any: + MetastoreDataAccessDatabricksGcpServiceAccount.__key_warning(key) + return super().__getitem__(key) + + def get(self, key: str, default = None) -> Any: + MetastoreDataAccessDatabricksGcpServiceAccount.__key_warning(key) + return super().get(key, default) + def __init__(__self__, *, + credential_id: Optional[str] = None, email: Optional[str] = None): """ :param str email: The email of the GCP service account created, to be granted access to relevant buckets. `azure_service_principal` optional configuration block for credential details for Azure (Legacy): """ + if credential_id is not None: + pulumi.set(__self__, "credential_id", credential_id) if email is not None: pulumi.set(__self__, "email", email) + @property + @pulumi.getter(name="credentialId") + def credential_id(self) -> Optional[str]: + return pulumi.get(self, "credential_id") + @property @pulumi.getter def email(self) -> Optional[str]: @@ -9487,6 +9513,26 @@ def traffic_percentage(self) -> int: return pulumi.get(self, "traffic_percentage") +@pulumi.output_type +class ModelServingTag(dict): + def __init__(__self__, *, + key: str, + value: Optional[str] = None): + pulumi.set(__self__, "key", key) + if value is not None: + pulumi.set(__self__, "value", value) + + @property + @pulumi.getter + def key(self) -> str: + return pulumi.get(self, "key") + + @property + @pulumi.getter + def value(self) -> Optional[str]: + return pulumi.get(self, "value") + + @pulumi.output_type class MountAbfs(dict): @staticmethod @@ -13048,7 +13094,7 @@ def __init__(__self__, *, :param str access_connector_id: The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`. :param str managed_identity_id: The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. - `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ pulumi.set(__self__, "access_connector_id", access_connector_id) if credential_id is not None: @@ -13075,7 +13121,7 @@ def managed_identity_id(self) -> Optional[str]: """ The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. - `azure_service_principal` optional configuration block to use service principal as credential details for Azure: + `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ return pulumi.get(self, "managed_identity_id") @@ -13110,8 +13156,6 @@ def __init__(__self__, *, """ :param str application_id: The application ID of the application registration within the referenced AAD tenant :param str client_secret: The client secret generated for the above app ID in AAD. **This field is redacted on output** - - `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: :param str directory_id: The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application """ pulumi.set(__self__, "application_id", application_id) @@ -13131,8 +13175,6 @@ def application_id(self) -> str: def client_secret(self) -> str: """ The client secret generated for the above app ID in AAD. **This field is redacted on output** - - `databricks_gcp_service_account` optional configuration block for creating a Databricks-managed GCP Service Account: """ return pulumi.get(self, "client_secret") @@ -13147,14 +13189,39 @@ def directory_id(self) -> str: @pulumi.output_type class StorageCredentialDatabricksGcpServiceAccount(dict): + @staticmethod + def __key_warning(key: str): + suggest = None + if key == "credentialId": + suggest = "credential_id" + + if suggest: + pulumi.log.warn(f"Key '{key}' not found in StorageCredentialDatabricksGcpServiceAccount. Access the value via the '{suggest}' property getter instead.") + + def __getitem__(self, key: str) -> Any: + StorageCredentialDatabricksGcpServiceAccount.__key_warning(key) + return super().__getitem__(key) + + def get(self, key: str, default = None) -> Any: + StorageCredentialDatabricksGcpServiceAccount.__key_warning(key) + return super().get(key, default) + def __init__(__self__, *, + credential_id: Optional[str] = None, email: Optional[str] = None): """ :param str email: The email of the GCP service account created, to be granted access to relevant buckets. """ + if credential_id is not None: + pulumi.set(__self__, "credential_id", credential_id) if email is not None: pulumi.set(__self__, "email", email) + @property + @pulumi.getter(name="credentialId") + def credential_id(self) -> Optional[str]: + return pulumi.get(self, "credential_id") + @property @pulumi.getter def email(self) -> Optional[str]: diff --git a/sdk/python/pulumi_databricks/storage_credential.py b/sdk/python/pulumi_databricks/storage_credential.py index fdbdd837..5224bc6c 100644 --- a/sdk/python/pulumi_databricks/storage_credential.py +++ b/sdk/python/pulumi_databricks/storage_credential.py @@ -29,12 +29,15 @@ def __init__(__self__, *, read_only: Optional[pulumi.Input[bool]] = None): """ The set of arguments for constructing a StorageCredential resource. + :param pulumi.Input[bool] force_destroy: Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. :param pulumi.Input[str] owner: Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[bool] read_only: Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ if aws_iam_role is not None: pulumi.set(__self__, "aws_iam_role", aws_iam_role) @@ -107,6 +110,11 @@ def databricks_gcp_service_account(self, value: Optional[pulumi.Input['StorageCr @property @pulumi.getter(name="forceDestroy") def force_destroy(self) -> Optional[pulumi.Input[bool]]: + """ + Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ return pulumi.get(self, "force_destroy") @force_destroy.setter @@ -151,8 +159,6 @@ def name(self, value: Optional[pulumi.Input[str]]): def owner(self) -> Optional[pulumi.Input[str]]: """ Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "owner") @@ -165,6 +171,8 @@ def owner(self, value: Optional[pulumi.Input[str]]): def read_only(self) -> Optional[pulumi.Input[bool]]: """ Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ return pulumi.get(self, "read_only") @@ -189,12 +197,15 @@ def __init__(__self__, *, read_only: Optional[pulumi.Input[bool]] = None): """ Input properties used for looking up and filtering StorageCredential resources. + :param pulumi.Input[bool] force_destroy: Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. :param pulumi.Input[str] owner: Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[bool] read_only: Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ if aws_iam_role is not None: pulumi.set(__self__, "aws_iam_role", aws_iam_role) @@ -267,6 +278,11 @@ def databricks_gcp_service_account(self, value: Optional[pulumi.Input['StorageCr @property @pulumi.getter(name="forceDestroy") def force_destroy(self) -> Optional[pulumi.Input[bool]]: + """ + Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ return pulumi.get(self, "force_destroy") @force_destroy.setter @@ -311,8 +327,6 @@ def name(self, value: Optional[pulumi.Input[str]]): def owner(self) -> Optional[pulumi.Input[str]]: """ Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "owner") @@ -325,6 +339,8 @@ def owner(self, value: Optional[pulumi.Input[str]]): def read_only(self) -> Optional[pulumi.Input[bool]]: """ Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ return pulumi.get(self, "read_only") @@ -421,12 +437,15 @@ def __init__(__self__, :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. + :param pulumi.Input[bool] force_destroy: Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. :param pulumi.Input[str] owner: Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[bool] read_only: Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ ... @overload @@ -577,12 +596,15 @@ def get(resource_name: str, :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. + :param pulumi.Input[bool] force_destroy: Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[str] metastore_id: Unique identifier of the parent Metastore :param pulumi.Input[str] name: Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource. :param pulumi.Input[str] owner: Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: :param pulumi.Input[bool] read_only: Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) @@ -629,6 +651,11 @@ def databricks_gcp_service_account(self) -> pulumi.Output['outputs.StorageCreden @property @pulumi.getter(name="forceDestroy") def force_destroy(self) -> pulumi.Output[Optional[bool]]: + """ + Delete storage credential regardless of its dependencies. + + `aws_iam_role` optional configuration block for credential details for AWS: + """ return pulumi.get(self, "force_destroy") @property @@ -657,8 +684,6 @@ def name(self) -> pulumi.Output[str]: def owner(self) -> pulumi.Output[str]: """ Username/groupname/sp application_id of the storage credential owner. - - `aws_iam_role` optional configuration block for credential details for AWS: """ return pulumi.get(self, "owner") @@ -667,6 +692,8 @@ def owner(self) -> pulumi.Output[str]: def read_only(self) -> pulumi.Output[Optional[bool]]: """ Indicates whether the storage credential is only usable for read operations. + + `azure_service_principal` optional configuration block to use service principal as credential details for Azure (Legacy): """ return pulumi.get(self, "read_only")