diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a2ba58aa..68cd2f4b 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -f2385add116e3716c8a90a0b68e204deb40f996c \ No newline at end of file +7016dcbf2e011459416cf408ce21143bcc4b3a25 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index bee3c7e6..7b5558d0 100755 --- a/.gitattributes +++ b/.gitattributes @@ -210,6 +210,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Credentials databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreAssignmentRequest.java linguist-generated=true @@ -269,8 +270,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAP databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpServiceAccountKey.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true @@ -484,6 +485,54 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBi databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoom.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetAssetType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTableLocalDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetNotebook.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetStatusEnum.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetTableLocalDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetView.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetViewLocalDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetVolumeLocalDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogOutputCatalogStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomStatusEnum.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomAssetsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomNotebookTaskRunsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListCleanRoomsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java linguist-generated=true @@ -729,6 +778,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSu databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true @@ -767,6 +817,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateD databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchemaColumn.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -944,6 +996,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns. databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterInstance.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Condition.java linguist-generated=true @@ -1808,6 +1863,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNam databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java linguist-generated=true @@ -1840,6 +1899,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLeg databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyRestrictionMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java linguist-generated=true @@ -1996,6 +2066,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListRecipie databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListSharesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Partition.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionSpecificationPartition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PartitionValueOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 655080d9..ab237555 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -52,6 +52,12 @@ import com.databricks.sdk.service.catalog.VolumesService; import com.databricks.sdk.service.catalog.WorkspaceBindingsAPI; import com.databricks.sdk.service.catalog.WorkspaceBindingsService; +import com.databricks.sdk.service.cleanrooms.CleanRoomAssetsAPI; +import com.databricks.sdk.service.cleanrooms.CleanRoomAssetsService; +import com.databricks.sdk.service.cleanrooms.CleanRoomTaskRunsAPI; +import com.databricks.sdk.service.cleanrooms.CleanRoomTaskRunsService; +import com.databricks.sdk.service.cleanrooms.CleanRoomsAPI; +import com.databricks.sdk.service.cleanrooms.CleanRoomsService; import com.databricks.sdk.service.compute.ClusterPoliciesAPI; import com.databricks.sdk.service.compute.ClusterPoliciesService; import com.databricks.sdk.service.compute.ClustersService; @@ -199,6 +205,9 @@ public class WorkspaceClient { private AppsAPI appsAPI; private ArtifactAllowlistsAPI artifactAllowlistsAPI; private CatalogsAPI catalogsAPI; + private CleanRoomAssetsAPI cleanRoomAssetsAPI; + private CleanRoomTaskRunsAPI cleanRoomTaskRunsAPI; + private CleanRoomsAPI cleanRoomsAPI; private ClusterPoliciesAPI clusterPoliciesAPI; private ClustersExt clustersAPI; private CommandExecutionAPI commandExecutionAPI; @@ -298,6 +307,9 @@ public WorkspaceClient(DatabricksConfig config) { appsAPI = new AppsAPI(apiClient); artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); catalogsAPI = new CatalogsAPI(apiClient); + cleanRoomAssetsAPI = new CleanRoomAssetsAPI(apiClient); + cleanRoomTaskRunsAPI = new CleanRoomTaskRunsAPI(apiClient); + cleanRoomsAPI = new CleanRoomsAPI(apiClient); clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); clustersAPI = new ClustersExt(apiClient); commandExecutionAPI = new CommandExecutionAPI(apiClient); @@ -459,6 +471,28 @@ public CatalogsAPI catalogs() { return catalogsAPI; } + /** + * Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared + * with the clean room. + */ + public CleanRoomAssetsAPI cleanRoomAssets() { + return cleanRoomAssetsAPI; + } + + /** Clean room task runs are the executions of notebooks in a clean room. */ + public CleanRoomTaskRunsAPI cleanRoomTaskRuns() { + return cleanRoomTaskRunsAPI; + } + + /** + * A clean room uses Delta Sharing and serverless compute to provide a secure and + * privacy-protecting environment where multiple parties can work together on sensitive enterprise + * data without direct access to each other’s data. + */ + public CleanRoomsAPI cleanRooms() { + return cleanRoomsAPI; + } + /** * You can use cluster policies to control users' ability to configure clusters based on a set of * rules. These rules specify which attributes or attribute values can be used during cluster @@ -574,8 +608,8 @@ public ConsumerProvidersAPI consumerProviders() { * control which users and groups can access the credential. * *
To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE - * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another - * user or group to manage permissions on it + * CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another + * user or group to manage permissions on it. */ public CredentialsAPI credentials() { return credentialsAPI; @@ -1722,6 +1756,39 @@ public WorkspaceClient withCatalogsAPI(CatalogsAPI catalogs) { return this; } + /** Replace the default CleanRoomAssetsService with a custom implementation. */ + public WorkspaceClient withCleanRoomAssetsImpl(CleanRoomAssetsService cleanRoomAssets) { + return this.withCleanRoomAssetsAPI(new CleanRoomAssetsAPI(cleanRoomAssets)); + } + + /** Replace the default CleanRoomAssetsAPI with a custom implementation. */ + public WorkspaceClient withCleanRoomAssetsAPI(CleanRoomAssetsAPI cleanRoomAssets) { + this.cleanRoomAssetsAPI = cleanRoomAssets; + return this; + } + + /** Replace the default CleanRoomTaskRunsService with a custom implementation. */ + public WorkspaceClient withCleanRoomTaskRunsImpl(CleanRoomTaskRunsService cleanRoomTaskRuns) { + return this.withCleanRoomTaskRunsAPI(new CleanRoomTaskRunsAPI(cleanRoomTaskRuns)); + } + + /** Replace the default CleanRoomTaskRunsAPI with a custom implementation. */ + public WorkspaceClient withCleanRoomTaskRunsAPI(CleanRoomTaskRunsAPI cleanRoomTaskRuns) { + this.cleanRoomTaskRunsAPI = cleanRoomTaskRuns; + return this; + } + + /** Replace the default CleanRoomsService with a custom implementation. */ + public WorkspaceClient withCleanRoomsImpl(CleanRoomsService cleanRooms) { + return this.withCleanRoomsAPI(new CleanRoomsAPI(cleanRooms)); + } + + /** Replace the default CleanRoomsAPI with a custom implementation. */ + public WorkspaceClient withCleanRoomsAPI(CleanRoomsAPI cleanRooms) { + this.cleanRoomsAPI = cleanRooms; + return this; + } + /** Replace the default ClusterPoliciesService with a custom implementation. */ public WorkspaceClient withClusterPoliciesImpl(ClusterPoliciesService clusterPolicies) { return this.withClusterPoliciesAPI(new ClusterPoliciesAPI(clusterPolicies)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java index e7f1d07e..e9d410aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** The Azure service principal configuration. */ +/** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ @Generated public class AzureServicePrincipal { /** The application ID of the application registration within the referenced AAD tenant. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java index 8d1052e3..bb8ced06 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java @@ -41,7 +41,7 @@ public class ColumnInfo { @JsonProperty("type_json") private String typeJson; - /** Name of type (INT, STRUCT, MAP, etc.). */ + /** */ @JsonProperty("type_name") private ColumnTypeName typeName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java index ff2e8f11..54d72d2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Name of type (INT, STRUCT, MAP, etc.). */ @Generated public enum ColumnTypeName { ARRAY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java index 0fe91d00..023a3e68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java @@ -17,7 +17,7 @@ public class CreateCredentialRequest { @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; - /** The Azure service principal configuration. */ + /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; @@ -25,8 +25,9 @@ public class CreateCredentialRequest { @JsonProperty("comment") private String comment; - @JsonProperty("gcp_service_account_key") - private GcpServiceAccountKey gcpServiceAccountKey; + /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; /** * The credential name. The name must be unique among storage and service credentials within the @@ -90,14 +91,14 @@ public String getComment() { return comment; } - public CreateCredentialRequest setGcpServiceAccountKey( - GcpServiceAccountKey gcpServiceAccountKey) { - this.gcpServiceAccountKey = gcpServiceAccountKey; + public CreateCredentialRequest setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; return this; } - public GcpServiceAccountKey getGcpServiceAccountKey() { - return gcpServiceAccountKey; + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; } public CreateCredentialRequest setName(String name) { @@ -145,7 +146,7 @@ public boolean equals(Object o) { && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) && Objects.equals(azureServicePrincipal, that.azureServicePrincipal) && Objects.equals(comment, that.comment) - && Objects.equals(gcpServiceAccountKey, that.gcpServiceAccountKey) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) && Objects.equals(name, that.name) && Objects.equals(purpose, that.purpose) && Objects.equals(readOnly, that.readOnly) @@ -159,7 +160,7 @@ public int hashCode() { azureManagedIdentity, azureServicePrincipal, comment, - gcpServiceAccountKey, + databricksGcpServiceAccount, name, purpose, readOnly, @@ -173,7 +174,7 @@ public String toString() { .add("azureManagedIdentity", azureManagedIdentity) .add("azureServicePrincipal", azureServicePrincipal) .add("comment", comment) - .add("gcpServiceAccountKey", gcpServiceAccountKey) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) .add("name", name) .add("purpose", purpose) .add("readOnly", readOnly) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java index 1e737acd..629f271e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java @@ -17,7 +17,7 @@ public class CredentialInfo { @JsonProperty("azure_managed_identity") private AzureManagedIdentity azureManagedIdentity; - /** The Azure service principal configuration. */ + /** The Azure service principal configuration. Only applicable when purpose is **STORAGE**. */ @JsonProperty("azure_service_principal") private AzureServicePrincipal azureServicePrincipal; @@ -33,6 +33,10 @@ public class CredentialInfo { @JsonProperty("created_by") private String createdBy; + /** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */ + @JsonProperty("databricks_gcp_service_account") + private DatabricksGcpServiceAccount databricksGcpServiceAccount; + /** The full name of the credential. */ @JsonProperty("full_name") private String fullName; @@ -143,6 +147,16 @@ public String getCreatedBy() { return createdBy; } + public CredentialInfo setDatabricksGcpServiceAccount( + DatabricksGcpServiceAccount databricksGcpServiceAccount) { + this.databricksGcpServiceAccount = databricksGcpServiceAccount; + return this; + } + + public DatabricksGcpServiceAccount getDatabricksGcpServiceAccount() { + return databricksGcpServiceAccount; + } + public CredentialInfo setFullName(String fullName) { this.fullName = fullName; return this; @@ -253,6 +267,7 @@ public boolean equals(Object o) { && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) + && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) && Objects.equals(fullName, that.fullName) && Objects.equals(id, that.id) && Objects.equals(isolationMode, that.isolationMode) @@ -275,6 +290,7 @@ public int hashCode() { comment, createdAt, createdBy, + databricksGcpServiceAccount, fullName, id, isolationMode, @@ -297,6 +313,7 @@ public String toString() { .add("comment", comment) .add("createdAt", createdAt) .add("createdBy", createdBy) + .add("databricksGcpServiceAccount", databricksGcpServiceAccount) .add("fullName", fullName) .add("id", id) .add("isolationMode", isolationMode) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java index 132fd9c8..0cfc214b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java @@ -13,8 +13,8 @@ * control which users and groups can access the credential. * *
To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE - * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user - * or group to manage permissions on it + * CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it. */ @Generated public class CredentialsAPI { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java index 40c72204..906445cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java @@ -9,8 +9,8 @@ * control which users and groups can access the credential. * *
To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE - * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user - * or group to manage permissions on it + * CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it. * *
This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java
new file mode 100755
index 00000000..39083337
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** GCP long-lived credential. Databricks-created Google Cloud Storage service account. */
+@Generated
+public class DatabricksGcpServiceAccount {
+ /**
+ * The Databricks internal ID that represents this managed identity. This field is only used to
+ * persist the credential_id once it is fetched from the credentials manager - as we only use the
+ * protobuf serializer to store credentials, this ID gets persisted to the database
+ */
+ @JsonProperty("credential_id")
+ private String credentialId;
+
+ /** The email of the service account. */
+ @JsonProperty("email")
+ private String email;
+
+ /** The ID that represents the private key for this Service Account */
+ @JsonProperty("private_key_id")
+ private String privateKeyId;
+
+ public DatabricksGcpServiceAccount setCredentialId(String credentialId) {
+ this.credentialId = credentialId;
+ return this;
+ }
+
+ public String getCredentialId() {
+ return credentialId;
+ }
+
+ public DatabricksGcpServiceAccount setEmail(String email) {
+ this.email = email;
+ return this;
+ }
+
+ public String getEmail() {
+ return email;
+ }
+
+ public DatabricksGcpServiceAccount setPrivateKeyId(String privateKeyId) {
+ this.privateKeyId = privateKeyId;
+ return this;
+ }
+
+ public String getPrivateKeyId() {
+ return privateKeyId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabricksGcpServiceAccount that = (DatabricksGcpServiceAccount) o;
+ return Objects.equals(credentialId, that.credentialId)
+ && Objects.equals(email, that.email)
+ && Objects.equals(privateKeyId, that.privateKeyId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(credentialId, email, privateKeyId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabricksGcpServiceAccount.class)
+ .add("credentialId", credentialId)
+ .add("email", email)
+ .add("privateKeyId", privateKeyId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
index d0f8f552..794ac824 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
@@ -41,7 +41,7 @@ public class FunctionParameterInfo {
@JsonProperty("type_json")
private String typeJson;
- /** Name of type (INT, STRUCT, MAP, etc.). */
+ /** */
@JsonProperty("type_name")
private ColumnTypeName typeName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpServiceAccountKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpServiceAccountKey.java
deleted file mode 100755
index 85ebb49b..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpServiceAccountKey.java
+++ /dev/null
@@ -1,75 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-/** GCP long-lived credential. GCP Service Account. */
-@Generated
-public class GcpServiceAccountKey {
- /** The email of the service account. */
- @JsonProperty("email")
- private String email;
-
- /** The service account's RSA private key. */
- @JsonProperty("private_key")
- private String privateKey;
-
- /** The ID of the service account's private key. */
- @JsonProperty("private_key_id")
- private String privateKeyId;
-
- public GcpServiceAccountKey setEmail(String email) {
- this.email = email;
- return this;
- }
-
- public String getEmail() {
- return email;
- }
-
- public GcpServiceAccountKey setPrivateKey(String privateKey) {
- this.privateKey = privateKey;
- return this;
- }
-
- public String getPrivateKey() {
- return privateKey;
- }
-
- public GcpServiceAccountKey setPrivateKeyId(String privateKeyId) {
- this.privateKeyId = privateKeyId;
- return this;
- }
-
- public String getPrivateKeyId() {
- return privateKeyId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GcpServiceAccountKey that = (GcpServiceAccountKey) o;
- return Objects.equals(email, that.email)
- && Objects.equals(privateKey, that.privateKey)
- && Objects.equals(privateKeyId, that.privateKeyId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(email, privateKey, privateKeyId);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GcpServiceAccountKey.class)
- .add("email", email)
- .add("privateKey", privateKey)
- .add("privateKeyId", privateKeyId)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java
index 31dca4b9..7ca99910 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java
@@ -8,7 +8,7 @@
import java.util.Collection;
import java.util.Objects;
-/** Options to customize the requested temporary credential */
+/** The Azure cloud options to customize the requested temporary credential */
@Generated
public class GenerateTemporaryServiceCredentialAzureOptions {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java
new file mode 100755
index 00000000..b1739a0a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialGcpOptions.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** The GCP cloud options to customize the requested temporary credential */
+@Generated
+public class GenerateTemporaryServiceCredentialGcpOptions {
+ /**
+ * The scopes to which the temporary GCP credential should apply. These resources are the scopes
+ * that are passed to the token provider (see
+ * https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)
+ */
+ @JsonProperty("scopes")
+ private Collection [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html
+ */
+ @JsonProperty("access_restricted")
+ private CleanRoomAccessRestricted accessRestricted;
+
+ /** */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** When the clean room was created, in epoch milliseconds. */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** The alias of the collaborator tied to the local clean room. */
+ @JsonProperty("local_collaborator_alias")
+ private String localCollaboratorAlias;
+
+ /**
+ * The name of the clean room. It should follow [UC securable naming requirements].
+ *
+ * [UC securable naming requirements]:
+ * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * Output catalog of the clean room. It is an output only field. Output catalog is manipulated
+ * using the separate CreateCleanRoomOutputCatalog API.
+ */
+ @JsonProperty("output_catalog")
+ private CleanRoomOutputCatalog outputCatalog;
+
+ /**
+ * This is Databricks username of the owner of the local clean room securable for permission
+ * management.
+ */
+ @JsonProperty("owner")
+ private String owner;
+
+ /**
+ * Central clean room details. During creation, users need to specify cloud_vendor, region, and
+ * collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call.
+ */
+ @JsonProperty("remote_detailed_info")
+ private CleanRoomRemoteDetail remoteDetailedInfo;
+
+ /** Clean room status. */
+ @JsonProperty("status")
+ private CleanRoomStatusEnum status;
+
+ /** When the clean room was last updated, in epoch milliseconds. */
+ @JsonProperty("updated_at")
+ private Long updatedAt;
+
+ public CleanRoom setAccessRestricted(CleanRoomAccessRestricted accessRestricted) {
+ this.accessRestricted = accessRestricted;
+ return this;
+ }
+
+ public CleanRoomAccessRestricted getAccessRestricted() {
+ return accessRestricted;
+ }
+
+ public CleanRoom setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public CleanRoom setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public CleanRoom setLocalCollaboratorAlias(String localCollaboratorAlias) {
+ this.localCollaboratorAlias = localCollaboratorAlias;
+ return this;
+ }
+
+ public String getLocalCollaboratorAlias() {
+ return localCollaboratorAlias;
+ }
+
+ public CleanRoom setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CleanRoom setOutputCatalog(CleanRoomOutputCatalog outputCatalog) {
+ this.outputCatalog = outputCatalog;
+ return this;
+ }
+
+ public CleanRoomOutputCatalog getOutputCatalog() {
+ return outputCatalog;
+ }
+
+ public CleanRoom setOwner(String owner) {
+ this.owner = owner;
+ return this;
+ }
+
+ public String getOwner() {
+ return owner;
+ }
+
+ public CleanRoom setRemoteDetailedInfo(CleanRoomRemoteDetail remoteDetailedInfo) {
+ this.remoteDetailedInfo = remoteDetailedInfo;
+ return this;
+ }
+
+ public CleanRoomRemoteDetail getRemoteDetailedInfo() {
+ return remoteDetailedInfo;
+ }
+
+ public CleanRoom setStatus(CleanRoomStatusEnum status) {
+ this.status = status;
+ return this;
+ }
+
+ public CleanRoomStatusEnum getStatus() {
+ return status;
+ }
+
+ public CleanRoom setUpdatedAt(Long updatedAt) {
+ this.updatedAt = updatedAt;
+ return this;
+ }
+
+ public Long getUpdatedAt() {
+ return updatedAt;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoom that = (CleanRoom) o;
+ return Objects.equals(accessRestricted, that.accessRestricted)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(localCollaboratorAlias, that.localCollaboratorAlias)
+ && Objects.equals(name, that.name)
+ && Objects.equals(outputCatalog, that.outputCatalog)
+ && Objects.equals(owner, that.owner)
+ && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo)
+ && Objects.equals(status, that.status)
+ && Objects.equals(updatedAt, that.updatedAt);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ accessRestricted,
+ comment,
+ createdAt,
+ localCollaboratorAlias,
+ name,
+ outputCatalog,
+ owner,
+ remoteDetailedInfo,
+ status,
+ updatedAt);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoom.class)
+ .add("accessRestricted", accessRestricted)
+ .add("comment", comment)
+ .add("createdAt", createdAt)
+ .add("localCollaboratorAlias", localCollaboratorAlias)
+ .add("name", name)
+ .add("outputCatalog", outputCatalog)
+ .add("owner", owner)
+ .add("remoteDetailedInfo", remoteDetailedInfo)
+ .add("status", status)
+ .add("updatedAt", updatedAt)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java
new file mode 100755
index 00000000..e8831a6b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAccessRestricted.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum CleanRoomAccessRestricted {
+ CSP_MISMATCH,
+ NO_RESTRICTION,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java
new file mode 100755
index 00000000..367a7360
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java
@@ -0,0 +1,271 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Metadata of the clean room asset */
+@Generated
+public class CleanRoomAsset {
+ /** When the asset is added to the clean room, in epoch milliseconds. */
+ @JsonProperty("added_at")
+ private Long addedAt;
+
+ /** The type of the asset. */
+ @JsonProperty("asset_type")
+ private CleanRoomAssetAssetType assetType;
+
+ /**
+ * Foreign table details available to all collaborators of the clean room. Present if and only if
+ * **asset_type** is **FOREIGN_TABLE**
+ */
+ @JsonProperty("foreign_table")
+ private CleanRoomAssetForeignTable foreignTable;
+
+ /**
+ * Local details for a foreign that are only available to its owner. Present if and only if
+ * **asset_type** is **FOREIGN_TABLE**
+ */
+ @JsonProperty("foreign_table_local_details")
+ private CleanRoomAssetForeignTableLocalDetails foreignTableLocalDetails;
+
+ /**
+ * A fully qualified name that uniquely identifies the asset within the clean room. This is also
+ * the name displayed in the clean room UI.
+ *
+ * For UC securable assets (tables, volumes, etc.), the format is
+ * *shared_catalog*.*shared_schema*.*asset_name*
+ *
+ * For notebooks, the name is the notebook file name.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * Notebook details available to all collaborators of the clean room. Present if and only if
+ * **asset_type** is **NOTEBOOK_FILE**
+ */
+ @JsonProperty("notebook")
+ private CleanRoomAssetNotebook notebook;
+
+ /** The alias of the collaborator who owns this asset */
+ @JsonProperty("owner_collaborator_alias")
+ private String ownerCollaboratorAlias;
+
+ /** Status of the asset */
+ @JsonProperty("status")
+ private CleanRoomAssetStatusEnum status;
+
+ /**
+ * Table details available to all collaborators of the clean room. Present if and only if
+ * **asset_type** is **TABLE**
+ */
+ @JsonProperty("table")
+ private CleanRoomAssetTable table;
+
+ /**
+ * Local details for a table that are only available to its owner. Present if and only if
+ * **asset_type** is **TABLE**
+ */
+ @JsonProperty("table_local_details")
+ private CleanRoomAssetTableLocalDetails tableLocalDetails;
+
+ /**
+ * View details available to all collaborators of the clean room. Present if and only if
+ * **asset_type** is **VIEW**
+ */
+ @JsonProperty("view")
+ private CleanRoomAssetView view;
+
+ /**
+ * Local details for a view that are only available to its owner. Present if and only if
+ * **asset_type** is **VIEW**
+ */
+ @JsonProperty("view_local_details")
+ private CleanRoomAssetViewLocalDetails viewLocalDetails;
+
+ /**
+ * Local details for a volume that are only available to its owner. Present if and only if
+ * **asset_type** is **VOLUME**
+ */
+ @JsonProperty("volume_local_details")
+ private CleanRoomAssetVolumeLocalDetails volumeLocalDetails;
+
+ public CleanRoomAsset setAddedAt(Long addedAt) {
+ this.addedAt = addedAt;
+ return this;
+ }
+
+ public Long getAddedAt() {
+ return addedAt;
+ }
+
+ public CleanRoomAsset setAssetType(CleanRoomAssetAssetType assetType) {
+ this.assetType = assetType;
+ return this;
+ }
+
+ public CleanRoomAssetAssetType getAssetType() {
+ return assetType;
+ }
+
+ public CleanRoomAsset setForeignTable(CleanRoomAssetForeignTable foreignTable) {
+ this.foreignTable = foreignTable;
+ return this;
+ }
+
+ public CleanRoomAssetForeignTable getForeignTable() {
+ return foreignTable;
+ }
+
+ public CleanRoomAsset setForeignTableLocalDetails(
+ CleanRoomAssetForeignTableLocalDetails foreignTableLocalDetails) {
+ this.foreignTableLocalDetails = foreignTableLocalDetails;
+ return this;
+ }
+
+ public CleanRoomAssetForeignTableLocalDetails getForeignTableLocalDetails() {
+ return foreignTableLocalDetails;
+ }
+
+ public CleanRoomAsset setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CleanRoomAsset setNotebook(CleanRoomAssetNotebook notebook) {
+ this.notebook = notebook;
+ return this;
+ }
+
+ public CleanRoomAssetNotebook getNotebook() {
+ return notebook;
+ }
+
+ public CleanRoomAsset setOwnerCollaboratorAlias(String ownerCollaboratorAlias) {
+ this.ownerCollaboratorAlias = ownerCollaboratorAlias;
+ return this;
+ }
+
+ public String getOwnerCollaboratorAlias() {
+ return ownerCollaboratorAlias;
+ }
+
+ public CleanRoomAsset setStatus(CleanRoomAssetStatusEnum status) {
+ this.status = status;
+ return this;
+ }
+
+ public CleanRoomAssetStatusEnum getStatus() {
+ return status;
+ }
+
+ public CleanRoomAsset setTable(CleanRoomAssetTable table) {
+ this.table = table;
+ return this;
+ }
+
+ public CleanRoomAssetTable getTable() {
+ return table;
+ }
+
+ public CleanRoomAsset setTableLocalDetails(CleanRoomAssetTableLocalDetails tableLocalDetails) {
+ this.tableLocalDetails = tableLocalDetails;
+ return this;
+ }
+
+ public CleanRoomAssetTableLocalDetails getTableLocalDetails() {
+ return tableLocalDetails;
+ }
+
+ public CleanRoomAsset setView(CleanRoomAssetView view) {
+ this.view = view;
+ return this;
+ }
+
+ public CleanRoomAssetView getView() {
+ return view;
+ }
+
+ public CleanRoomAsset setViewLocalDetails(CleanRoomAssetViewLocalDetails viewLocalDetails) {
+ this.viewLocalDetails = viewLocalDetails;
+ return this;
+ }
+
+ public CleanRoomAssetViewLocalDetails getViewLocalDetails() {
+ return viewLocalDetails;
+ }
+
+ public CleanRoomAsset setVolumeLocalDetails(CleanRoomAssetVolumeLocalDetails volumeLocalDetails) {
+ this.volumeLocalDetails = volumeLocalDetails;
+ return this;
+ }
+
+ public CleanRoomAssetVolumeLocalDetails getVolumeLocalDetails() {
+ return volumeLocalDetails;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomAsset that = (CleanRoomAsset) o;
+ return Objects.equals(addedAt, that.addedAt)
+ && Objects.equals(assetType, that.assetType)
+ && Objects.equals(foreignTable, that.foreignTable)
+ && Objects.equals(foreignTableLocalDetails, that.foreignTableLocalDetails)
+ && Objects.equals(name, that.name)
+ && Objects.equals(notebook, that.notebook)
+ && Objects.equals(ownerCollaboratorAlias, that.ownerCollaboratorAlias)
+ && Objects.equals(status, that.status)
+ && Objects.equals(table, that.table)
+ && Objects.equals(tableLocalDetails, that.tableLocalDetails)
+ && Objects.equals(view, that.view)
+ && Objects.equals(viewLocalDetails, that.viewLocalDetails)
+ && Objects.equals(volumeLocalDetails, that.volumeLocalDetails);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ addedAt,
+ assetType,
+ foreignTable,
+ foreignTableLocalDetails,
+ name,
+ notebook,
+ ownerCollaboratorAlias,
+ status,
+ table,
+ tableLocalDetails,
+ view,
+ viewLocalDetails,
+ volumeLocalDetails);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomAsset.class)
+ .add("addedAt", addedAt)
+ .add("assetType", assetType)
+ .add("foreignTable", foreignTable)
+ .add("foreignTableLocalDetails", foreignTableLocalDetails)
+ .add("name", name)
+ .add("notebook", notebook)
+ .add("ownerCollaboratorAlias", ownerCollaboratorAlias)
+ .add("status", status)
+ .add("table", table)
+ .add("tableLocalDetails", tableLocalDetails)
+ .add("view", view)
+ .add("viewLocalDetails", viewLocalDetails)
+ .add("volumeLocalDetails", volumeLocalDetails)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetAssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetAssetType.java
new file mode 100755
index 00000000..a344b76b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetAssetType.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum CleanRoomAssetAssetType {
+ FOREIGN_TABLE,
+ NOTEBOOK_FILE,
+ TABLE,
+ VIEW,
+ VOLUME,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java
new file mode 100755
index 00000000..0e21a2f9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetForeignTable.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CleanRoomAssetForeignTable {
+ /** The metadata information of the columns in the foreign table */
+ @JsonProperty("columns")
+ private Collection Create a clean room asset —share an asset like a notebook or table into the clean room. For
+ * each UC asset that is added through this method, the clean room owner must also have enough
+ * privilege on the asset to consume it. The privilege must be maintained indefinitely for the
+ * clean room to be able to access the asset. Typically, you should use a group as the clean room
+ * owner.
+ */
+ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) {
+ return impl.create(request);
+ }
+
+ public void delete(
+ String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) {
+ delete(
+ new DeleteCleanRoomAssetRequest()
+ .setCleanRoomName(cleanRoomName)
+ .setAssetType(assetType)
+ .setAssetFullName(assetFullName));
+ }
+
+ /**
+ * Delete an asset.
+ *
+ * Delete a clean room asset - unshare/remove the asset from the clean room
+ */
+ public void delete(DeleteCleanRoomAssetRequest request) {
+ impl.delete(request);
+ }
+
+ public CleanRoomAsset get(
+ String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) {
+ return get(
+ new GetCleanRoomAssetRequest()
+ .setCleanRoomName(cleanRoomName)
+ .setAssetType(assetType)
+ .setAssetFullName(assetFullName));
+ }
+
+ /**
+ * Get an asset.
+ *
+ * Get the details of a clean room asset by its type and full name.
+ */
+ public CleanRoomAsset get(GetCleanRoomAssetRequest request) {
+ return impl.get(request);
+ }
+
+ public Iterable Update a clean room asset. For example, updating the content of a notebook; changing the
+ * shared partitions of a table; etc.
+ */
+ public CleanRoomAsset update(UpdateCleanRoomAssetRequest request) {
+ return impl.update(request);
+ }
+
+ public CleanRoomAssetsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java
new file mode 100755
index 00000000..fb91ae40
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of CleanRoomAssets */
+@Generated
+class CleanRoomAssetsImpl implements CleanRoomAssetsService {
+ private final ApiClient apiClient;
+
+ public CleanRoomAssetsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) {
+ String path = String.format("/api/2.0/clean-rooms/%s/assets", request.getCleanRoomName());
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface CleanRoomAssetsService {
+ /**
+ * Create an asset.
+ *
+ * Create a clean room asset —share an asset like a notebook or table into the clean room. For
+ * each UC asset that is added through this method, the clean room owner must also have enough
+ * privilege on the asset to consume it. The privilege must be maintained indefinitely for the
+ * clean room to be able to access the asset. Typically, you should use a group as the clean room
+ * owner.
+ */
+ CleanRoomAsset create(CreateCleanRoomAssetRequest createCleanRoomAssetRequest);
+
+ /**
+ * Delete an asset.
+ *
+ * Delete a clean room asset - unshare/remove the asset from the clean room
+ */
+ void delete(DeleteCleanRoomAssetRequest deleteCleanRoomAssetRequest);
+
+ /**
+ * Get an asset.
+ *
+ * Get the details of a clean room asset by its type and full name.
+ */
+ CleanRoomAsset get(GetCleanRoomAssetRequest getCleanRoomAssetRequest);
+
+ /** List assets. */
+ ListCleanRoomAssetsResponse list(ListCleanRoomAssetsRequest listCleanRoomAssetsRequest);
+
+ /**
+ * Update an asset.
+ *
+ * Update a clean room asset. For example, updating the content of a notebook; changing the
+ * shared partitions of a table; etc.
+ */
+ CleanRoomAsset update(UpdateCleanRoomAssetRequest updateCleanRoomAssetRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java
new file mode 100755
index 00000000..88dae550
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomCollaborator.java
@@ -0,0 +1,152 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Publicly visible clean room collaborator. */
+@Generated
+public class CleanRoomCollaborator {
+ /**
+ * Collaborator alias specified by the clean room creator. It is unique across all collaborators
+ * of this clean room, and used to derive multiple values internally such as catalog alias and
+ * clean room name for single metastore clean rooms. It should follow [UC securable naming
+ * requirements].
+ *
+ * [UC securable naming requirements]:
+ * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements
+ */
+ @JsonProperty("collaborator_alias")
+ private String collaboratorAlias;
+
+ /**
+ * Generated display name for the collaborator. In the case of a single metastore clean room, it
+ * is the clean room name. For x-metastore clean rooms, it is the organization name of the
+ * metastore. It is not restricted to these values and could change in the future
+ */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /**
+ * The global Unity Catalog metastore id of the collaborator. The identifier is of format
+ * cloud:region:metastore-uuid.
+ */
+ @JsonProperty("global_metastore_id")
+ private String globalMetastoreId;
+
+ /**
+ * Email of the user who is receiving the clean room "invitation". It should be empty for the
+ * creator of the clean room, and non-empty for the invitees of the clean room. It is only
+ * returned in the output when clean room creator calls GET
+ */
+ @JsonProperty("invite_recipient_email")
+ private String inviteRecipientEmail;
+
+ /**
+ * Workspace ID of the user who is receiving the clean room "invitation". Must be specified if
+ * invite_recipient_email is specified. It should be empty when the collaborator is the creator of
+ * the clean room.
+ */
+ @JsonProperty("invite_recipient_workspace_id")
+ private Long inviteRecipientWorkspaceId;
+
+ /**
+ * [Organization name](:method:metastores/list#metastores-delta_sharing_organization_name)
+ * configured in the metastore
+ */
+ @JsonProperty("organization_name")
+ private String organizationName;
+
+ public CleanRoomCollaborator setCollaboratorAlias(String collaboratorAlias) {
+ this.collaboratorAlias = collaboratorAlias;
+ return this;
+ }
+
+ public String getCollaboratorAlias() {
+ return collaboratorAlias;
+ }
+
+ public CleanRoomCollaborator setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public CleanRoomCollaborator setGlobalMetastoreId(String globalMetastoreId) {
+ this.globalMetastoreId = globalMetastoreId;
+ return this;
+ }
+
+ public String getGlobalMetastoreId() {
+ return globalMetastoreId;
+ }
+
+ public CleanRoomCollaborator setInviteRecipientEmail(String inviteRecipientEmail) {
+ this.inviteRecipientEmail = inviteRecipientEmail;
+ return this;
+ }
+
+ public String getInviteRecipientEmail() {
+ return inviteRecipientEmail;
+ }
+
+ public CleanRoomCollaborator setInviteRecipientWorkspaceId(Long inviteRecipientWorkspaceId) {
+ this.inviteRecipientWorkspaceId = inviteRecipientWorkspaceId;
+ return this;
+ }
+
+ public Long getInviteRecipientWorkspaceId() {
+ return inviteRecipientWorkspaceId;
+ }
+
+ public CleanRoomCollaborator setOrganizationName(String organizationName) {
+ this.organizationName = organizationName;
+ return this;
+ }
+
+ public String getOrganizationName() {
+ return organizationName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomCollaborator that = (CleanRoomCollaborator) o;
+ return Objects.equals(collaboratorAlias, that.collaboratorAlias)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(globalMetastoreId, that.globalMetastoreId)
+ && Objects.equals(inviteRecipientEmail, that.inviteRecipientEmail)
+ && Objects.equals(inviteRecipientWorkspaceId, that.inviteRecipientWorkspaceId)
+ && Objects.equals(organizationName, that.organizationName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ collaboratorAlias,
+ displayName,
+ globalMetastoreId,
+ inviteRecipientEmail,
+ inviteRecipientWorkspaceId,
+ organizationName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomCollaborator.class)
+ .add("collaboratorAlias", collaboratorAlias)
+ .add("displayName", displayName)
+ .add("globalMetastoreId", globalMetastoreId)
+ .add("inviteRecipientEmail", inviteRecipientEmail)
+ .add("inviteRecipientWorkspaceId", inviteRecipientWorkspaceId)
+ .add("organizationName", organizationName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java
new file mode 100755
index 00000000..0097643e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomNotebookTaskRun.java
@@ -0,0 +1,148 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Stores information about a single task run. */
+@Generated
+public class CleanRoomNotebookTaskRun {
+ /**
+ * Job run info of the task in the runner's local workspace. This field is only included in the
+ * LIST API. if the task was run within the same workspace the API is being called. If the task
+ * run was in a different workspace under the same metastore, only the workspace_id is included.
+ */
+ @JsonProperty("collaborator_job_run_info")
+ private CollaboratorJobRunInfo collaboratorJobRunInfo;
+
+ /** State of the task run. */
+ @JsonProperty("notebook_job_run_state")
+ private com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState;
+
+ /** Asset name of the notebook executed in this task run. */
+ @JsonProperty("notebook_name")
+ private String notebookName;
+
+ /** Expiration time of the output schema of the task run (if any), in epoch milliseconds. */
+ @JsonProperty("output_schema_expiration_time")
+ private Long outputSchemaExpirationTime;
+
+ /** Name of the output schema associated with the clean rooms notebook task run. */
+ @JsonProperty("output_schema_name")
+ private String outputSchemaName;
+
+ /** Duration of the task run, in milliseconds. */
+ @JsonProperty("run_duration")
+ private Long runDuration;
+
+ /** When the task run started, in epoch milliseconds. */
+ @JsonProperty("start_time")
+ private Long startTime;
+
+ public CleanRoomNotebookTaskRun setCollaboratorJobRunInfo(
+ CollaboratorJobRunInfo collaboratorJobRunInfo) {
+ this.collaboratorJobRunInfo = collaboratorJobRunInfo;
+ return this;
+ }
+
+ public CollaboratorJobRunInfo getCollaboratorJobRunInfo() {
+ return collaboratorJobRunInfo;
+ }
+
+ public CleanRoomNotebookTaskRun setNotebookJobRunState(
+ com.databricks.sdk.service.jobs.CleanRoomTaskRunState notebookJobRunState) {
+ this.notebookJobRunState = notebookJobRunState;
+ return this;
+ }
+
+ public com.databricks.sdk.service.jobs.CleanRoomTaskRunState getNotebookJobRunState() {
+ return notebookJobRunState;
+ }
+
+ public CleanRoomNotebookTaskRun setNotebookName(String notebookName) {
+ this.notebookName = notebookName;
+ return this;
+ }
+
+ public String getNotebookName() {
+ return notebookName;
+ }
+
+ public CleanRoomNotebookTaskRun setOutputSchemaExpirationTime(Long outputSchemaExpirationTime) {
+ this.outputSchemaExpirationTime = outputSchemaExpirationTime;
+ return this;
+ }
+
+ public Long getOutputSchemaExpirationTime() {
+ return outputSchemaExpirationTime;
+ }
+
+ public CleanRoomNotebookTaskRun setOutputSchemaName(String outputSchemaName) {
+ this.outputSchemaName = outputSchemaName;
+ return this;
+ }
+
+ public String getOutputSchemaName() {
+ return outputSchemaName;
+ }
+
+ public CleanRoomNotebookTaskRun setRunDuration(Long runDuration) {
+ this.runDuration = runDuration;
+ return this;
+ }
+
+ public Long getRunDuration() {
+ return runDuration;
+ }
+
+ public CleanRoomNotebookTaskRun setStartTime(Long startTime) {
+ this.startTime = startTime;
+ return this;
+ }
+
+ public Long getStartTime() {
+ return startTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomNotebookTaskRun that = (CleanRoomNotebookTaskRun) o;
+ return Objects.equals(collaboratorJobRunInfo, that.collaboratorJobRunInfo)
+ && Objects.equals(notebookJobRunState, that.notebookJobRunState)
+ && Objects.equals(notebookName, that.notebookName)
+ && Objects.equals(outputSchemaExpirationTime, that.outputSchemaExpirationTime)
+ && Objects.equals(outputSchemaName, that.outputSchemaName)
+ && Objects.equals(runDuration, that.runDuration)
+ && Objects.equals(startTime, that.startTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ collaboratorJobRunInfo,
+ notebookJobRunState,
+ notebookName,
+ outputSchemaExpirationTime,
+ outputSchemaName,
+ runDuration,
+ startTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomNotebookTaskRun.class)
+ .add("collaboratorJobRunInfo", collaboratorJobRunInfo)
+ .add("notebookJobRunState", notebookJobRunState)
+ .add("notebookName", notebookName)
+ .add("outputSchemaExpirationTime", outputSchemaExpirationTime)
+ .add("outputSchemaName", outputSchemaName)
+ .add("runDuration", runDuration)
+ .add("startTime", startTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java
new file mode 100755
index 00000000..eb864a9e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalog.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CleanRoomOutputCatalog {
+ /**
+ * The name of the output catalog in UC. It should follow [UC securable naming requirements]. The
+ * field will always exist if status is CREATED.
+ *
+ * [UC securable naming requirements]:
+ * https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements
+ */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** */
+ @JsonProperty("status")
+ private CleanRoomOutputCatalogOutputCatalogStatus status;
+
+ public CleanRoomOutputCatalog setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public CleanRoomOutputCatalog setStatus(CleanRoomOutputCatalogOutputCatalogStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public CleanRoomOutputCatalogOutputCatalogStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomOutputCatalog that = (CleanRoomOutputCatalog) o;
+ return Objects.equals(catalogName, that.catalogName) && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogName, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomOutputCatalog.class)
+ .add("catalogName", catalogName)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogOutputCatalogStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogOutputCatalogStatus.java
new file mode 100755
index 00000000..4ba76e74
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomOutputCatalogOutputCatalogStatus.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum CleanRoomOutputCatalogOutputCatalogStatus {
+ CREATED,
+ NOT_CREATED,
+ NOT_ELIGIBLE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java
new file mode 100755
index 00000000..afb1ee35
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java
@@ -0,0 +1,154 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Publicly visible central clean room details. */
+@Generated
+public class CleanRoomRemoteDetail {
+ /** Central clean room ID. */
+ @JsonProperty("central_clean_room_id")
+ private String centralCleanRoomId;
+
+ /** Cloud vendor (aws,azure,gcp) of the central clean room. */
+ @JsonProperty("cloud_vendor")
+ private String cloudVendor;
+
+ /**
+ * Collaborators in the central clean room. There should one and only one collaborator in the list
+ * that satisfies the owner condition:
+ *
+ * 1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom).
+ *
+ * 2. Its invite_recipient_email is empty.
+ */
+ @JsonProperty("collaborators")
+ private Collection List all the historical notebook task runs in a clean room.
+ */
+ public Iterable This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface CleanRoomTaskRunsService {
+ /**
+ * List notebook task runs.
+ *
+ * List all the historical notebook task runs in a clean room.
+ */
+ ListCleanRoomNotebookTaskRunsResponse list(
+ ListCleanRoomNotebookTaskRunsRequest listCleanRoomNotebookTaskRunsRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
new file mode 100755
index 00000000..b1754d4c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
@@ -0,0 +1,129 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
+ * environment where multiple parties can work together on sensitive enterprise data without direct
+ * access to each other’s data.
+ */
+@Generated
+public class CleanRoomsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(CleanRoomsAPI.class);
+
+ private final CleanRoomsService impl;
+
+ /** Regular-use constructor */
+ public CleanRoomsAPI(ApiClient apiClient) {
+ impl = new CleanRoomsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public CleanRoomsAPI(CleanRoomsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Create a clean room.
+ *
+ * Create a new clean room with the specified collaborators. This method is asynchronous; the
+ * returned name field inside the clean_room field can be used to poll the clean room status,
+ * using the :method:cleanrooms/get method. When this method returns, the cluster will be in a
+ * PROVISIONING state. The cluster will be usable once it enters an ACTIVE state.
+ *
+ * The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the
+ * metastore.
+ */
+ public CleanRoom create(CreateCleanRoomRequest request) {
+ return impl.create(request);
+ }
+
+ public CreateCleanRoomOutputCatalogResponse createOutputCatalog(String cleanRoomName) {
+ return createOutputCatalog(
+ new CreateCleanRoomOutputCatalogRequest().setCleanRoomName(cleanRoomName));
+ }
+
+ /**
+ * Create an output catalog.
+ *
+ * Create the output catalog of the clean room.
+ */
+ public CreateCleanRoomOutputCatalogResponse createOutputCatalog(
+ CreateCleanRoomOutputCatalogRequest request) {
+ return impl.createOutputCatalog(request);
+ }
+
+ public void delete(String name) {
+ delete(new DeleteCleanRoomRequest().setName(name));
+ }
+
+ /**
+ * Delete a clean room.
+ *
+ * Delete a clean room. After deletion, the clean room will be removed from the metastore. If
+ * the other collaborators have not deleted the clean room, they will still have the clean room in
+ * their metastore, but it will be in a DELETED state and no operations other than deletion can be
+ * performed on it.
+ */
+ public void delete(DeleteCleanRoomRequest request) {
+ impl.delete(request);
+ }
+
+ public CleanRoom get(String name) {
+ return get(new GetCleanRoomRequest().setName(name));
+ }
+
+ /**
+ * Get a clean room.
+ *
+ * Get the details of a clean room given its name.
+ */
+ public CleanRoom get(GetCleanRoomRequest request) {
+ return impl.get(request);
+ }
+
+ /**
+ * List clean rooms.
+ *
+ * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to
+ * are returned.
+ */
+ public Iterable Update a clean room. The caller must be the owner of the clean room, have
+ * **MODIFY_CLEAN_ROOM** privilege, or be metastore admin.
+ *
+ * When the caller is a metastore admin, only the __owner__ field can be updated.
+ */
+ public CleanRoom update(UpdateCleanRoomRequest request) {
+ return impl.update(request);
+ }
+
+ public CleanRoomsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java
new file mode 100755
index 00000000..60cc9e29
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of CleanRooms */
+@Generated
+class CleanRoomsImpl implements CleanRoomsService {
+ private final ApiClient apiClient;
+
+ public CleanRoomsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CleanRoom create(CreateCleanRoomRequest request) {
+ String path = "/api/2.0/clean-rooms";
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface CleanRoomsService {
+ /**
+ * Create a clean room.
+ *
+ * Create a new clean room with the specified collaborators. This method is asynchronous; the
+ * returned name field inside the clean_room field can be used to poll the clean room status,
+ * using the :method:cleanrooms/get method. When this method returns, the cluster will be in a
+ * PROVISIONING state. The cluster will be usable once it enters an ACTIVE state.
+ *
+ * The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the
+ * metastore.
+ */
+ CleanRoom create(CreateCleanRoomRequest createCleanRoomRequest);
+
+ /**
+ * Create an output catalog.
+ *
+ * Create the output catalog of the clean room.
+ */
+ CreateCleanRoomOutputCatalogResponse createOutputCatalog(
+ CreateCleanRoomOutputCatalogRequest createCleanRoomOutputCatalogRequest);
+
+ /**
+ * Delete a clean room.
+ *
+ * Delete a clean room. After deletion, the clean room will be removed from the metastore. If
+ * the other collaborators have not deleted the clean room, they will still have the clean room in
+ * their metastore, but it will be in a DELETED state and no operations other than deletion can be
+ * performed on it.
+ */
+ void delete(DeleteCleanRoomRequest deleteCleanRoomRequest);
+
+ /**
+ * Get a clean room.
+ *
+ * Get the details of a clean room given its name.
+ */
+ CleanRoom get(GetCleanRoomRequest getCleanRoomRequest);
+
+ /**
+ * List clean rooms.
+ *
+ * Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to
+ * are returned.
+ */
+ ListCleanRoomsResponse list(ListCleanRoomsRequest listCleanRoomsRequest);
+
+ /**
+ * Update a clean room.
+ *
+ * Update a clean room. The caller must be the owner of the clean room, have
+ * **MODIFY_CLEAN_ROOM** privilege, or be metastore admin.
+ *
+ * When the caller is a metastore admin, only the __owner__ field can be updated.
+ */
+ CleanRoom update(UpdateCleanRoomRequest updateCleanRoomRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java
new file mode 100755
index 00000000..ba03b012
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CollaboratorJobRunInfo.java
@@ -0,0 +1,109 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CollaboratorJobRunInfo {
+ /** Alias of the collaborator that triggered the task run. */
+ @JsonProperty("collaborator_alias")
+ private String collaboratorAlias;
+
+ /** Job ID of the task run in the collaborator's workspace. */
+ @JsonProperty("collaborator_job_id")
+ private Long collaboratorJobId;
+
+ /** Job run ID of the task run in the collaborator's workspace. */
+ @JsonProperty("collaborator_job_run_id")
+ private Long collaboratorJobRunId;
+
+ /** Task run ID of the task run in the collaborator's workspace. */
+ @JsonProperty("collaborator_task_run_id")
+ private Long collaboratorTaskRunId;
+
+ /** ID of the collaborator's workspace that triggered the task run. */
+ @JsonProperty("collaborator_workspace_id")
+ private Long collaboratorWorkspaceId;
+
+ public CollaboratorJobRunInfo setCollaboratorAlias(String collaboratorAlias) {
+ this.collaboratorAlias = collaboratorAlias;
+ return this;
+ }
+
+ public String getCollaboratorAlias() {
+ return collaboratorAlias;
+ }
+
+ public CollaboratorJobRunInfo setCollaboratorJobId(Long collaboratorJobId) {
+ this.collaboratorJobId = collaboratorJobId;
+ return this;
+ }
+
+ public Long getCollaboratorJobId() {
+ return collaboratorJobId;
+ }
+
+ public CollaboratorJobRunInfo setCollaboratorJobRunId(Long collaboratorJobRunId) {
+ this.collaboratorJobRunId = collaboratorJobRunId;
+ return this;
+ }
+
+ public Long getCollaboratorJobRunId() {
+ return collaboratorJobRunId;
+ }
+
+ public CollaboratorJobRunInfo setCollaboratorTaskRunId(Long collaboratorTaskRunId) {
+ this.collaboratorTaskRunId = collaboratorTaskRunId;
+ return this;
+ }
+
+ public Long getCollaboratorTaskRunId() {
+ return collaboratorTaskRunId;
+ }
+
+ public CollaboratorJobRunInfo setCollaboratorWorkspaceId(Long collaboratorWorkspaceId) {
+ this.collaboratorWorkspaceId = collaboratorWorkspaceId;
+ return this;
+ }
+
+ public Long getCollaboratorWorkspaceId() {
+ return collaboratorWorkspaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CollaboratorJobRunInfo that = (CollaboratorJobRunInfo) o;
+ return Objects.equals(collaboratorAlias, that.collaboratorAlias)
+ && Objects.equals(collaboratorJobId, that.collaboratorJobId)
+ && Objects.equals(collaboratorJobRunId, that.collaboratorJobRunId)
+ && Objects.equals(collaboratorTaskRunId, that.collaboratorTaskRunId)
+ && Objects.equals(collaboratorWorkspaceId, that.collaboratorWorkspaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ collaboratorAlias,
+ collaboratorJobId,
+ collaboratorJobRunId,
+ collaboratorTaskRunId,
+ collaboratorWorkspaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CollaboratorJobRunInfo.class)
+ .add("collaboratorAlias", collaboratorAlias)
+ .add("collaboratorJobId", collaboratorJobId)
+ .add("collaboratorJobRunId", collaboratorJobRunId)
+ .add("collaboratorTaskRunId", collaboratorTaskRunId)
+ .add("collaboratorWorkspaceId", collaboratorWorkspaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java
new file mode 100755
index 00000000..813e6bf5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ComplianceSecurityProfile.java
@@ -0,0 +1,67 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * The compliance security profile used to process regulated data following compliance standards.
+ */
+@Generated
+public class ComplianceSecurityProfile {
+ /**
+ * The list of compliance standards that the compliance security profile is configured to enforce.
+ */
+ @JsonProperty("compliance_standards")
+ private Collection For UC securable assets (tables, volumes, etc.), the format is
+ * *shared_catalog*.*shared_schema*.*asset_name*
+ *
+ * For notebooks, the name is the notebook file name.
+ */
+ @JsonIgnore private String name;
+
+ public UpdateCleanRoomAssetRequest setAsset(CleanRoomAsset asset) {
+ this.asset = asset;
+ return this;
+ }
+
+ public CleanRoomAsset getAsset() {
+ return asset;
+ }
+
+ public UpdateCleanRoomAssetRequest setAssetType(CleanRoomAssetAssetType assetType) {
+ this.assetType = assetType;
+ return this;
+ }
+
+ public CleanRoomAssetAssetType getAssetType() {
+ return assetType;
+ }
+
+ public UpdateCleanRoomAssetRequest setCleanRoomName(String cleanRoomName) {
+ this.cleanRoomName = cleanRoomName;
+ return this;
+ }
+
+ public String getCleanRoomName() {
+ return cleanRoomName;
+ }
+
+ public UpdateCleanRoomAssetRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateCleanRoomAssetRequest that = (UpdateCleanRoomAssetRequest) o;
+ return Objects.equals(asset, that.asset)
+ && Objects.equals(assetType, that.assetType)
+ && Objects.equals(cleanRoomName, that.cleanRoomName)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(asset, assetType, cleanRoomName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCleanRoomAssetRequest.class)
+ .add("asset", asset)
+ .add("assetType", assetType)
+ .add("cleanRoomName", cleanRoomName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java
new file mode 100755
index 00000000..32097e13
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.cleanrooms;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateCleanRoomRequest {
+ /** */
+ @JsonProperty("clean_room")
+ private CleanRoom cleanRoom;
+
+ /** Name of the clean room. */
+ @JsonIgnore private String name;
+
+ public UpdateCleanRoomRequest setCleanRoom(CleanRoom cleanRoom) {
+ this.cleanRoom = cleanRoom;
+ return this;
+ }
+
+ public CleanRoom getCleanRoom() {
+ return cleanRoom;
+ }
+
+ public UpdateCleanRoomRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateCleanRoomRequest that = (UpdateCleanRoomRequest) o;
+ return Objects.equals(cleanRoom, that.cleanRoom) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(cleanRoom, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCleanRoomRequest.class)
+ .add("cleanRoom", cleanRoom)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java
new file mode 100755
index 00000000..4c6bf3c0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DataType.java
@@ -0,0 +1,26 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DataType {
+ DATA_TYPE_ARRAY,
+ DATA_TYPE_BIG_INT,
+ DATA_TYPE_BINARY,
+ DATA_TYPE_BOOLEAN,
+ DATA_TYPE_DATE,
+ DATA_TYPE_DECIMAL,
+ DATA_TYPE_DOUBLE,
+ DATA_TYPE_FLOAT,
+ DATA_TYPE_INT,
+ DATA_TYPE_INTERVAL,
+ DATA_TYPE_MAP,
+ DATA_TYPE_SMALL_INT,
+ DATA_TYPE_STRING,
+ DATA_TYPE_STRUCT,
+ DATA_TYPE_TIMESTAMP,
+ DATA_TYPE_TINY_INT,
+ DATA_TYPE_VOID,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
index e9975040..c09f42a1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryAttachment.java
@@ -9,6 +9,10 @@
@Generated
public class QueryAttachment {
+ /** */
+ @JsonProperty("cached_query_schema")
+ private QuerySchema cachedQuerySchema;
+
/** Description of the query */
@JsonProperty("description")
private String description;
@@ -40,6 +44,15 @@ public class QueryAttachment {
@JsonProperty("title")
private String title;
+ public QueryAttachment setCachedQuerySchema(QuerySchema cachedQuerySchema) {
+ this.cachedQuerySchema = cachedQuerySchema;
+ return this;
+ }
+
+ public QuerySchema getCachedQuerySchema() {
+ return cachedQuerySchema;
+ }
+
public QueryAttachment setDescription(String description) {
this.description = description;
return this;
@@ -108,7 +121,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryAttachment that = (QueryAttachment) o;
- return Objects.equals(description, that.description)
+ return Objects.equals(cachedQuerySchema, that.cachedQuerySchema)
+ && Objects.equals(description, that.description)
&& Objects.equals(id, that.id)
&& Objects.equals(instructionId, that.instructionId)
&& Objects.equals(instructionTitle, that.instructionTitle)
@@ -120,12 +134,20 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- description, id, instructionId, instructionTitle, lastUpdatedTimestamp, query, title);
+ cachedQuerySchema,
+ description,
+ id,
+ instructionId,
+ instructionTitle,
+ lastUpdatedTimestamp,
+ query,
+ title);
}
@Override
public String toString() {
return new ToStringer(QueryAttachment.class)
+ .add("cachedQuerySchema", cachedQuerySchema)
.add("description", description)
.add("id", id)
.add("instructionId", instructionId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java
new file mode 100755
index 00000000..700920df
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QuerySchema.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class QuerySchema {
+ /** */
+ @JsonProperty("columns")
+ private Collection Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard
- * HTTP file download, not a JSON RPC.
+ * Downloads a file. The file contents are the response body. This is a standard HTTP file
+ * download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
*/
public DownloadResponse download(DownloadRequest request) {
return impl.download(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
index 1f2c2aa2..4354b81f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
@@ -55,8 +55,8 @@ public interface FilesService {
/**
* Download a file.
*
- * Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard
- * HTTP file download, not a JSON RPC.
+ * Downloads a file. The file contents are the response body. This is a standard HTTP file
+ * download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
*/
DownloadResponse download(DownloadRequest downloadRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java
new file mode 100755
index 00000000..7abc50cb
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java
@@ -0,0 +1,22 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove
+ * coupling with jobs API definition
+ */
+@Generated
+public enum CleanRoomTaskRunLifeCycleState {
+ BLOCKED,
+ INTERNAL_ERROR,
+ PENDING,
+ QUEUED,
+ RUNNING,
+ SKIPPED,
+ TERMINATED,
+ TERMINATING,
+ WAITING_FOR_RETRY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java
new file mode 100755
index 00000000..197670f5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java
@@ -0,0 +1,25 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid
+ * cyclic dependency.
+ */
+@Generated
+public enum CleanRoomTaskRunResultState {
+ CANCELED,
+ DISABLED,
+ EVICTED,
+ EXCLUDED,
+ FAILED,
+ MAXIMUM_CONCURRENT_RUNS_REACHED,
+ SUCCESS,
+ SUCCESS_WITH_FAILURES,
+ TIMEDOUT,
+ UPSTREAM_CANCELED,
+ UPSTREAM_EVICTED,
+ UPSTREAM_FAILED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java
new file mode 100755
index 00000000..704f0a77
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java
@@ -0,0 +1,66 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Stores the run state of the clean room notebook V1 task. */
+@Generated
+public class CleanRoomTaskRunState {
+ /**
+ * A value indicating the run's current lifecycle state. This field is always available in the
+ * response.
+ */
+ @JsonProperty("life_cycle_state")
+ private CleanRoomTaskRunLifeCycleState lifeCycleState;
+
+ /**
+ * A value indicating the run's result. This field is only available for terminal lifecycle
+ * states.
+ */
+ @JsonProperty("result_state")
+ private CleanRoomTaskRunResultState resultState;
+
+ public CleanRoomTaskRunState setLifeCycleState(CleanRoomTaskRunLifeCycleState lifeCycleState) {
+ this.lifeCycleState = lifeCycleState;
+ return this;
+ }
+
+ public CleanRoomTaskRunLifeCycleState getLifeCycleState() {
+ return lifeCycleState;
+ }
+
+ public CleanRoomTaskRunState setResultState(CleanRoomTaskRunResultState resultState) {
+ this.resultState = resultState;
+ return this;
+ }
+
+ public CleanRoomTaskRunResultState getResultState() {
+ return resultState;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CleanRoomTaskRunState that = (CleanRoomTaskRunState) o;
+ return Objects.equals(lifeCycleState, that.lifeCycleState)
+ && Objects.equals(resultState, that.resultState);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lifeCycleState, resultState);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CleanRoomTaskRunState.class)
+ .add("lifeCycleState", lifeCycleState)
+ .add("resultState", resultState)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index 9a2bfc86..c065968f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -132,8 +132,8 @@ public class CreateJob {
private QueueSettings queue;
/**
- * Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
- * as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+ * Write-only setting. Specifies the user or service principal that the job runs as. If not
+ * specified, the job runs as the user who created the job.
*
* Either `user_name` or `service_principal_name` should be specified. If not, an error is
* thrown.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
index dbe49479..53dc8389 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
@@ -8,8 +8,8 @@
import java.util.Objects;
/**
- * Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as.
- * If not specified, the job/pipeline runs as the user who created the job/pipeline.
+ * Write-only setting. Specifies the user or service principal that the job runs as. If not
+ * specified, the job runs as the user who created the job.
*
* Either `user_name` or `service_principal_name` should be specified. If not, an error is
* thrown.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index a3826b3b..6a593805 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -128,8 +128,8 @@ public class JobSettings {
private QueueSettings queue;
/**
- * Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
- * as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+ * Write-only setting. Specifies the user or service principal that the job runs as. If not
+ * specified, the job runs as the user who created the job.
*
* Either `user_name` or `service_principal_name` should be specified. If not, an error is
* thrown.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
index 35af0c8f..0a12f2dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
@@ -28,6 +28,16 @@ public AibiDashboardEmbeddingAccessPolicyAPI(AibiDashboardEmbeddingAccessPolicyS
impl = mock;
}
+ /**
+ * Delete the AI/BI dashboard embedding access policy.
+ *
+ * Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+ */
+ public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse delete(
+ DeleteAibiDashboardEmbeddingAccessPolicySettingRequest request) {
+ return impl.delete(request);
+ }
+
/**
* Retrieve the AI/BI dashboard embedding access policy.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java
index b2736799..bca27cc4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java
@@ -15,6 +15,16 @@ public AibiDashboardEmbeddingAccessPolicyImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse delete(
+ DeleteAibiDashboardEmbeddingAccessPolicySettingRequest request) {
+ String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default";
+ Map Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+ */
+ DeleteAibiDashboardEmbeddingAccessPolicySettingResponse delete(
+ DeleteAibiDashboardEmbeddingAccessPolicySettingRequest
+ deleteAibiDashboardEmbeddingAccessPolicySettingRequest);
+
/**
* Retrieve the AI/BI dashboard embedding access policy.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
index 6a096aca..5a2ecc74 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java
@@ -28,6 +28,17 @@ public AibiDashboardEmbeddingApprovedDomainsAPI(
impl = mock;
}
+ /**
+ * Delete AI/BI dashboard embedding approved domains.
+ *
+ * Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the
+ * default empty list.
+ */
+ public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse delete(
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest request) {
+ return impl.delete(request);
+ }
+
/**
* Retrieve the list of domains approved to host embedded AI/BI dashboards.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java
index e026484e..c26b1f5e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java
@@ -16,6 +16,16 @@ public AibiDashboardEmbeddingApprovedDomainsImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse delete(
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest request) {
+ String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default";
+ Map Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the
+ * default empty list.
+ */
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse delete(
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest
+ deleteAibiDashboardEmbeddingApprovedDomainsSettingRequest);
+
/**
* Retrieve the list of domains approved to host embedded AI/BI dashboards.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java
new file mode 100755
index 00000000..468e464f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete the AI/BI dashboard embedding access policy */
+@Generated
+public class DeleteAibiDashboardEmbeddingAccessPolicySettingRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAibiDashboardEmbeddingAccessPolicySettingRequest that =
+ (DeleteAibiDashboardEmbeddingAccessPolicySettingRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAibiDashboardEmbeddingAccessPolicySettingRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java
new file mode 100755
index 00000000..6acb42ae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.java
@@ -0,0 +1,53 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAibiDashboardEmbeddingAccessPolicySettingResponse that =
+ (DeleteAibiDashboardEmbeddingAccessPolicySettingResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
new file mode 100755
index 00000000..686ba154
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete AI/BI dashboard embedding approved domains */
+@Generated
+public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest that =
+ (DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java
new file mode 100755
index 00000000..8ee982c5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.java
@@ -0,0 +1,53 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse that =
+ (DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java
new file mode 100755
index 00000000..a5eab386
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicy.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * The network policies applying for egress traffic. This message is used by the UI/REST API. We
+ * translate this message to the format expected by the dataplane in Lakehouse Network Manager (for
+ * the format expected by the dataplane, see networkconfig.textproto).
+ */
+@Generated
+public class EgressNetworkPolicy {
+ /** The access policy enforced for egress traffic to the internet. */
+ @JsonProperty("internet_access")
+ private EgressNetworkPolicyInternetAccessPolicy internetAccess;
+
+ public EgressNetworkPolicy setInternetAccess(
+ EgressNetworkPolicyInternetAccessPolicy internetAccess) {
+ this.internetAccess = internetAccess;
+ return this;
+ }
+
+ public EgressNetworkPolicyInternetAccessPolicy getInternetAccess() {
+ return internetAccess;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EgressNetworkPolicy that = (EgressNetworkPolicy) o;
+ return Objects.equals(internetAccess, that.internetAccess);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(internetAccess);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(EgressNetworkPolicy.class)
+ .add("internetAccess", internetAccess)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java
new file mode 100755
index 00000000..37029d2c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java
@@ -0,0 +1,107 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class EgressNetworkPolicyInternetAccessPolicy {
+ /** */
+ @JsonProperty("allowed_internet_destinations")
+ private Collection