diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 200f9513f..e5aff5d67 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -633dafff1aa6f0198a576cf83bfa81b2b4f27d46 \ No newline at end of file +a0bc51d001ca139a81dd6d192ae12394a3ca0834 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 06981143e..b9ccbe751 100755 --- a/.gitattributes +++ b/.gitattributes @@ -117,7 +117,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudge databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true @@ -145,7 +144,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDelivery databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/PatchStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/SortSpecField.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationBudget.java linguist-generated=true @@ -184,7 +182,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAll databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true @@ -198,7 +195,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueS databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java linguist-generated=true @@ -208,6 +204,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CloudflareApiToken.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnMask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnRelationship.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java linguist-generated=true @@ -219,7 +216,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLineageRelationshipRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionRequest.java linguist-generated=true @@ -231,7 +230,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetas databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true @@ -252,19 +251,21 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccou databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true @@ -275,7 +276,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharin databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlagInheritedFromType.java linguist-generated=true @@ -283,13 +283,31 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePrivilegeAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadata.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageExternalMetadataInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageFileInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersion.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageModelVersionInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageObject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineagePath.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationship.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageRelationshipInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadataService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true @@ -325,6 +343,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnecti databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true @@ -346,6 +365,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java linguist-generated=true @@ -356,8 +376,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java linguist-generated=true @@ -466,6 +490,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchem databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java linguist-generated=true @@ -487,13 +512,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTa databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssignmentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLineageRelationshipRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true @@ -502,7 +527,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonit databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true @@ -566,7 +591,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCl databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomOutputCatalogResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java linguist-generated=true @@ -580,7 +604,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/ListClea databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java linguist-generated=true @@ -588,9 +611,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAvailabi databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelCommand.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java linguist-generated=true @@ -651,15 +672,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEv databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyContext.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskTypeAzureDiskVolumeType.java linguist-generated=true @@ -668,12 +684,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerBasic databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DockerImage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EnforceClusterComplianceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java linguist-generated=true @@ -716,7 +728,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptE databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java linguist-generated=true @@ -776,9 +787,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanc databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java linguist-generated=true @@ -790,11 +799,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamil databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResultType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Results.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java linguist-generated=true @@ -803,19 +809,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkVersion.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/StartClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibraries.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLibrariesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinClusterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true @@ -893,10 +894,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrip databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashSpaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true @@ -941,6 +940,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDataba databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaimsPermissionSet.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedResource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true @@ -954,21 +956,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CloseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Delete.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteFileRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DirectoryEntry.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DownloadResponse.java linguist-generated=true @@ -977,7 +974,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetDirectoryMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetMetadataResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/GetStatusRequest.java linguist-generated=true @@ -986,15 +982,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectory databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListDirectoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ListStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MkDirsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Move.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/MoveResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Put.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/PutResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/UploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java linguist-generated=true @@ -1027,11 +1019,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGr databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspaceAssignmentRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteWorkspacePermissionAssignmentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java linguist-generated=true @@ -1079,7 +1069,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermiss databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Patch.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchOp.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PatchSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java linguist-generated=true @@ -1107,7 +1096,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipa databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/SetObjectPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateObjectPermissions.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateRuleSetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java linguist-generated=true @@ -1124,9 +1112,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Authentication databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRuns.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunLifeCycleState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunResultState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomTaskRunState.java linguist-generated=true @@ -1155,9 +1141,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTas databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtPlatformTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceForJobResponseJobClusterSettingsChange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyComplianceResponse.java linguist-generated=true @@ -1241,7 +1225,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryI databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedConditionTaskValues.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedDbtTaskValues.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedNotebookTaskValues.java linguist-generated=true @@ -1305,7 +1288,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSetting databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewItem.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.java linguist-generated=true @@ -1354,17 +1336,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateP databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java linguist-generated=true @@ -1454,7 +1430,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Provide databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java linguist-generated=true @@ -1508,36 +1483,24 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateWebhookRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Dataset.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DatasetInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Experiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlResponse.java linguist-generated=true @@ -1550,10 +1513,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentTag.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageFeatureSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureLineageOnlineFeature.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java linguist-generated=true @@ -1614,19 +1579,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListTransitionRequestsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListWebhooksRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatch.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogBatchResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogInputsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogLoggedModelParamsRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetric.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogMetricResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModel.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogOutputsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParam.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LogParamResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo.java linguist-generated=true @@ -1664,6 +1622,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelP databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermissionsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryEmailSubscriptionType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookEvent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegistryWebhookStatus.java linguist-generated=true @@ -1672,9 +1631,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransition databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Run.java linguist-generated=true @@ -1696,18 +1653,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchModelsResp databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRuns.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchRunsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Status.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java linguist-generated=true @@ -1716,7 +1666,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageR databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java linguist-generated=true @@ -1744,11 +1693,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppInt databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteAccountFederationPolicyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteServicePrincipalSecretRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java linguist-generated=true @@ -1787,9 +1733,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrinc databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccountFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true @@ -1797,10 +1741,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigg databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLevel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java linguist-generated=true @@ -1862,7 +1804,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFram databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateCause.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdateResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java linguist-generated=true @@ -1896,7 +1837,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Delete databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteEncryptionKeyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteNetworkRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeletePrivateAccesRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteStorageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteVpcEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteWorkspaceRequest.java linguist-generated=true @@ -1933,14 +1873,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Privat databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessLevel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ReplaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/RootBucketInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StsRole.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpsertPrivateAccessSettingsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java linguist-generated=true @@ -1997,7 +1935,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProvi databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteServingEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElement.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EmbeddingsV1ResponseEmbeddingElementObject.java linguist-generated=true @@ -2145,6 +2082,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNam databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessEnableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true @@ -2156,6 +2097,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDash databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDashboardEmailSubscriptionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyDbfsRequest.java linguist-generated=true @@ -2173,7 +2116,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNoti databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePersonalComputeSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeletePrivateEndpointRuleRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteSqlResultsDownloadRequest.java linguist-generated=true @@ -2214,7 +2156,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebookImpl.java linguist-generated=true @@ -2254,6 +2195,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplia databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDashboardEmailSubscriptionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java linguist-generated=true @@ -2339,7 +2281,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalCo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java linguist-generated=true @@ -2347,8 +2288,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java linguist-generated=true @@ -2381,6 +2320,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComp databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDashboardEmailSubscriptionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWarehouseIdRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java linguist-generated=true @@ -2398,7 +2338,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetw databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateSqlResultsDownloadRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java linguist-generated=true @@ -2418,7 +2357,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java linguist-generated=true @@ -2431,7 +2369,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionPar databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetFederationPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true @@ -2536,7 +2473,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java linguist-generated=true @@ -2586,16 +2522,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboard databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteDashboardWidgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueriesLegacyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteQueryVisualizationsLegacyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteVisualizationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DeleteWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Disposition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditAlert.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java linguist-generated=true @@ -2693,7 +2625,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RedashConfigSer databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RepeatedEndpointConfPairs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreQueriesLegacyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RestoreResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java linguist-generated=true @@ -2706,10 +2637,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigRequestSecurityPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetWorkspaceWarehouseConfigResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SpotInstancePolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StartWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/State.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java linguist-generated=true @@ -2720,7 +2649,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementState. databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Status.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java linguist-generated=true @@ -2740,7 +2668,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateVisualizationRequestVisualization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateWidgetRequest.java linguist-generated=true @@ -2770,9 +2697,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Delete databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java linguist-generated=true @@ -2803,7 +2728,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVe databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/SyncIndexResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpdateEndpointCustomTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataResult.java linguist-generated=true @@ -2828,20 +2752,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCre databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScope.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateScopeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Delete.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAcl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteAclResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScope.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteScopeResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecret.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportResponse.java linguist-generated=true @@ -2864,7 +2781,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCreden databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Language.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListAclsResponse.java linguist-generated=true @@ -2877,13 +2793,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecre databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListSecretsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListWorkspaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Mkdirs.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/MkdirsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ObjectType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAcl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutAclResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecret.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/PutSecretResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoInfo.java linguist-generated=true @@ -2904,9 +2817,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckoutUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 23b1bfc2a..dbcc4e7b9 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -72,3 +72,42 @@ * [Breaking] Removed `HIVE_CUSTOM` and `HIVE_SERDE` enum values for `com.databricks.sdk.service.catalog.DataSourceFormat`. * [Breaking] Removed `UNKNOWN_SECURABLE_TYPE` enum value for `com.databricks.sdk.service.catalog.SecurableType`. * [Breaking] Removed `CANCELLED`, `ERROR`, `QUEUED`, `RUNNING`, `STARTING` and `SUCCESS` enum values for `com.databricks.sdk.service.jobs.DbtCloudRunStatus`. +* Added `workspaceClient.externalLineage()` service and `workspaceClient.externalMetadata()` service. +* Added `workspaceClient.defaultWarehouseId()` service. +* Added `claims` field for `com.databricks.sdk.service.database.GenerateDatabaseCredentialRequest`. +* Added `activity` field for `com.databricks.sdk.service.ml.DeleteTransitionRequestResponse`. +* Added `maxResults` field for `com.databricks.sdk.service.ml.ListWebhooksRequest`. +* Added `body` and `statusCode` fields for `com.databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* Added `modelVersionDatabricks` field for `com.databricks.sdk.service.ml.TransitionStageResponse`. +* Added `registeredModel` field for `com.databricks.sdk.service.ml.UpdateModelResponse`. +* Added `modelVersion` field for `com.databricks.sdk.service.ml.UpdateModelVersionResponse`. +* Added `webhook` field for `com.databricks.sdk.service.ml.UpdateWebhookResponse`. +* Added `runAs` field for `com.databricks.sdk.service.pipelines.GetPipelineResponse`. +* Added `principal` field for `com.databricks.sdk.service.serving.AiGatewayRateLimit`. +* Added `ANY_STATIC_CREDENTIAL` enum value for `com.databricks.sdk.service.catalog.CredentialType`. +* Added `TABLE_DELTA_ICEBERG_DELTASHARING` enum value for `com.databricks.sdk.service.catalog.SecurableKind`. +* Added `SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION` enum value for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* Added `CAN_CREATE_REGISTERED_MODEL` enum value for `com.databricks.sdk.service.ml.PermissionLevel`. +* Added `BIGQUERY` enum value for `com.databricks.sdk.service.pipelines.IngestionSourceType`. +* Added `SERVICE_PRINCIPAL` and `USER_GROUP` enum values for `com.databricks.sdk.service.serving.AiGatewayRateLimitKey`. +* Added `DELTA_ICEBERG_TABLE` enum value for `com.databricks.sdk.service.sharing.TableInternalAttributesSharedTableType`. +* [Breaking] Changed `deleteTransitionRequest()`, `updateModel()`, `updateModelVersion()` and `updateWebhook()` methods for `workspaceClient.modelRegistry()` service return type to become non-empty. +* [Breaking] Changed `deleteWebhook()` method for `workspaceClient.modelRegistry()` service with new required argument order. +* [Breaking] Changed `fromStage` and `toStage` fields for `com.databricks.sdk.service.ml.Activity` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.ApproveTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.CreateTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.DeleteTransitionRequestRequest` to type `String` class. +* [Breaking] Changed `id` field for `com.databricks.sdk.service.ml.DeleteWebhookRequest` to be required. +* [Breaking] Changed `key` field for `com.databricks.sdk.service.ml.FeatureTag` to be required. +* Changed `key` field for `com.databricks.sdk.service.ml.FeatureTag` to be required. +* [Breaking] Changed `capacity` field for `com.databricks.sdk.service.ml.OnlineStore` to be required. +* Changed `capacity` field for `com.databricks.sdk.service.ml.OnlineStore` to be required. +* [Breaking] Changed `onlineTableName` field for `com.databricks.sdk.service.ml.PublishSpec` to be required. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.RejectTransitionRequest` to type `String` class. +* [Breaking] Changed `stage` field for `com.databricks.sdk.service.ml.TransitionModelVersionStageDatabricks` to type `String` class. +* [Breaking] Changed `toStage` field for `com.databricks.sdk.service.ml.TransitionRequest` to type `String` class. +* [Breaking] Removed `allowedOptions` and `requiredOptions` fields for `com.databricks.sdk.service.catalog.SecurableKindManifest`. +* [Breaking] Removed `webhook` field for `com.databricks.sdk.service.ml.TestRegistryWebhookResponse`. +* [Breaking] Removed `modelVersion` field for `com.databricks.sdk.service.ml.TransitionStageResponse`. +* [Breaking] Removed `ARCHIVED`, `NONE`, `PRODUCTION` and `STAGING` enum values for `com.databricks.sdk.service.ml.DeleteTransitionRequestStage`. +* [Breaking] Removed `ARCHIVED`, `NONE`, `PRODUCTION` and `STAGING` enum values for `com.databricks.sdk.service.ml.Stage`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index bf4f6e180..0f4053b8b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -20,8 +20,12 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; +import com.databricks.sdk.service.catalog.ExternalLineageAPI; +import com.databricks.sdk.service.catalog.ExternalLineageService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; +import com.databricks.sdk.service.catalog.ExternalMetadataAPI; +import com.databricks.sdk.service.catalog.ExternalMetadataService; import com.databricks.sdk.service.catalog.FunctionsAPI; import com.databricks.sdk.service.catalog.FunctionsService; import com.databricks.sdk.service.catalog.GrantsAPI; @@ -254,7 +258,9 @@ public class WorkspaceClient { private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; + private ExternalLineageAPI externalLineageAPI; private ExternalLocationsAPI externalLocationsAPI; + private ExternalMetadataAPI externalMetadataAPI; private FeatureStoreAPI featureStoreAPI; private FilesAPI filesAPI; private FunctionsAPI functionsAPI; @@ -367,7 +373,9 @@ public WorkspaceClient(DatabricksConfig config) { dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); + externalLineageAPI = new ExternalLineageAPI(apiClient); externalLocationsAPI = new ExternalLocationsAPI(apiClient); + externalMetadataAPI = new ExternalMetadataAPI(apiClient); featureStoreAPI = new FeatureStoreAPI(apiClient); filesAPI = new FilesAPI(apiClient); functionsAPI = new FunctionsAPI(apiClient); @@ -550,7 +558,7 @@ public CleanRoomTaskRunsAPI cleanRoomTaskRuns() { /** * A clean room uses Delta Sharing and serverless compute to provide a secure and * privacy-protecting environment where multiple parties can work together on sensitive enterprise - * data without direct access to each other’s data. + * data without direct access to each other's data. */ public CleanRoomsAPI cleanRooms() { return cleanRoomsAPI; @@ -778,6 +786,18 @@ public ExperimentsAPI experiments() { return experimentsAPI; } + /** + * External Lineage APIs enable defining and managing lineage relationships between Databricks + * objects and external systems. These APIs allow users to capture data flows connecting + * Databricks tables, models, and file paths with external metadata objects. + * + *
With these APIs, users can create, update, delete, and list lineage relationships with + * support for column-level mappings and custom properties. + */ + public ExternalLineageAPI externalLineage() { + return externalLineageAPI; + } + /** * An external location is an object that combines a cloud storage path with a storage credential * that authorizes access to the cloud storage path. Each external location is subject to Unity @@ -795,6 +815,18 @@ public ExternalLocationsAPI externalLocations() { return externalLocationsAPI; } + /** + * External Metadata objects enable customers to register and manage metadata about external + * systems within Unity Catalog. + * + *
These APIs provide a standardized way to create, update, retrieve, list, and delete external
+ * metadata objects. Fine-grained authorization ensures that only users with appropriate
+ * permissions can view and manage external metadata objects.
+ */
+ public ExternalMetadataAPI externalMetadata() {
+ return externalMetadataAPI;
+ }
+
/**
* A feature store is a centralized repository that enables data scientists to find and share
* features. Using a feature store also ensures that the code used to compute feature values is
@@ -2162,6 +2194,17 @@ public WorkspaceClient withExperimentsAPI(ExperimentsAPI experiments) {
return this;
}
+ /** Replace the default ExternalLineageService with a custom implementation. */
+ public WorkspaceClient withExternalLineageImpl(ExternalLineageService externalLineage) {
+ return this.withExternalLineageAPI(new ExternalLineageAPI(externalLineage));
+ }
+
+ /** Replace the default ExternalLineageAPI with a custom implementation. */
+ public WorkspaceClient withExternalLineageAPI(ExternalLineageAPI externalLineage) {
+ this.externalLineageAPI = externalLineage;
+ return this;
+ }
+
/** Replace the default ExternalLocationsService with a custom implementation. */
public WorkspaceClient withExternalLocationsImpl(ExternalLocationsService externalLocations) {
return this.withExternalLocationsAPI(new ExternalLocationsAPI(externalLocations));
@@ -2173,6 +2216,17 @@ public WorkspaceClient withExternalLocationsAPI(ExternalLocationsAPI externalLoc
return this;
}
+ /** Replace the default ExternalMetadataService with a custom implementation. */
+ public WorkspaceClient withExternalMetadataImpl(ExternalMetadataService externalMetadata) {
+ return this.withExternalMetadataAPI(new ExternalMetadataAPI(externalMetadata));
+ }
+
+ /** Replace the default ExternalMetadataAPI with a custom implementation. */
+ public WorkspaceClient withExternalMetadataAPI(ExternalMetadataAPI externalMetadata) {
+ this.externalMetadataAPI = externalMetadata;
+ return this;
+ }
+
/** Replace the default FeatureStoreService with a custom implementation. */
public WorkspaceClient withFeatureStoreImpl(FeatureStoreService featureStore) {
return this.withFeatureStoreAPI(new FeatureStoreAPI(featureStore));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
index 4b7e6f09c..112c28fb6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/AiBuilderImpl.java
@@ -24,7 +24,7 @@ public void cancelOptimize(CancelCustomLlmOptimizationRunRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CancelOptimizeResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -51,7 +51,7 @@ public void deleteCustomLlm(DeleteCustomLlmRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteCustomLlmResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java
index 40dc96f30..19ef6f88c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java
@@ -13,7 +13,7 @@ public class AppAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private AppPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java
index 39fcd6726..2388ba9d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermission.java
@@ -18,7 +18,7 @@ public class AppPermission {
@JsonProperty("inherited_from_object")
private Collection [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
- */
+ /** */
@JsonProperty("volume_type")
private VolumeType volumeType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
index b5f06caf4..6172c00db 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
@@ -4,9 +4,10 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 12 */
+/** Next Id: 13 */
@Generated
public enum CredentialType {
+ ANY_STATIC_CREDENTIAL,
BEARER_TOKEN,
OAUTH_ACCESS_TOKEN,
OAUTH_M2M,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java
index 1557d0944..ebcce0da4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java
@@ -37,7 +37,7 @@ public void deleteCredential(DeleteCredentialRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteCredentialResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java
deleted file mode 100755
index 1ad278759..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteCredentialResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteCredentialResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java
new file mode 100755
index 000000000..d74cea315
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipRequest.java
@@ -0,0 +1,47 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteExternalLineageRelationshipRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("external_lineage_relationship")
+ private DeleteRequestExternalLineage externalLineageRelationship;
+
+ public DeleteExternalLineageRelationshipRequest setExternalLineageRelationship(
+ DeleteRequestExternalLineage externalLineageRelationship) {
+ this.externalLineageRelationship = externalLineageRelationship;
+ return this;
+ }
+
+ public DeleteRequestExternalLineage getExternalLineageRelationship() {
+ return externalLineageRelationship;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteExternalLineageRelationshipRequest that = (DeleteExternalLineageRelationshipRequest) o;
+ return Objects.equals(externalLineageRelationship, that.externalLineageRelationship);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(externalLineageRelationship);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteExternalLineageRelationshipRequest.class)
+ .add("externalLineageRelationship", externalLineageRelationship)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java
similarity index 78%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java
index 9bb22645b..dcab20f54 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLineageRelationshipResponse.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class DeleteAliasResponse {
+public class DeleteExternalLineageRelationshipResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(DeleteAliasResponse.class).toString();
+ return new ToStringer(DeleteExternalLineageRelationshipResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java
new file mode 100755
index 000000000..f1055606d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteExternalMetadataRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public DeleteExternalMetadataRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteExternalMetadataRequest that = (DeleteExternalMetadataRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteExternalMetadataRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java
similarity index 81%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java
index c2be0e3c2..836fc0483 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalMetadataResponse.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class CreateResponse {
+public class DeleteExternalMetadataResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(CreateResponse.class).toString();
+ return new ToStringer(DeleteExternalMetadataResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java
new file mode 100755
index 000000000..4a6d348b8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRequestExternalLineage.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DeleteRequestExternalLineage {
+ /** Unique identifier of the external lineage relationship. */
+ @JsonProperty("id")
+ @QueryParam("id")
+ private String id;
+
+ /** Source object of the external lineage relationship. */
+ @JsonProperty("source")
+ @QueryParam("source")
+ private ExternalLineageObject source;
+
+ /** Target object of the external lineage relationship. */
+ @JsonProperty("target")
+ @QueryParam("target")
+ private ExternalLineageObject target;
+
+ public DeleteRequestExternalLineage setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public DeleteRequestExternalLineage setSource(ExternalLineageObject source) {
+ this.source = source;
+ return this;
+ }
+
+ public ExternalLineageObject getSource() {
+ return source;
+ }
+
+ public DeleteRequestExternalLineage setTarget(ExternalLineageObject target) {
+ this.target = target;
+ return this;
+ }
+
+ public ExternalLineageObject getTarget() {
+ return target;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteRequestExternalLineage that = (DeleteRequestExternalLineage) o;
+ return Objects.equals(id, that.id)
+ && Objects.equals(source, that.source)
+ && Objects.equals(target, that.target);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, source, target);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteRequestExternalLineage.class)
+ .add("id", id)
+ .add("source", source)
+ .add("target", target)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java
deleted file mode 100755
index 72c77810c..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
index 5ace4876d..11a937f7d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java
@@ -13,19 +13,19 @@
*/
@Generated
public class Dependency {
- /** A connection that is dependent on a SQL object. */
+ /** */
@JsonProperty("connection")
private ConnectionDependency connection;
- /** A credential that is dependent on a SQL object. */
+ /** */
@JsonProperty("credential")
private CredentialDependency credential;
- /** A function that is dependent on a SQL object. */
+ /** */
@JsonProperty("function")
private FunctionDependency function;
- /** A table that is dependent on a SQL object. */
+ /** */
@JsonProperty("table")
private TableDependency table;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java
deleted file mode 100755
index 682492951..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DisableResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DisableResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java
deleted file mode 100755
index 72ccaf810..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EnableResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EnableResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java
new file mode 100755
index 000000000..4fc7f867b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageAPI.java
@@ -0,0 +1,112 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * External Lineage APIs enable defining and managing lineage relationships between Databricks
+ * objects and external systems. These APIs allow users to capture data flows connecting Databricks
+ * tables, models, and file paths with external metadata objects.
+ *
+ * With these APIs, users can create, update, delete, and list lineage relationships with support
+ * for column-level mappings and custom properties.
+ */
+@Generated
+public class ExternalLineageAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(ExternalLineageAPI.class);
+
+ private final ExternalLineageService impl;
+
+ /** Regular-use constructor */
+ public ExternalLineageAPI(ApiClient apiClient) {
+ impl = new ExternalLineageImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public ExternalLineageAPI(ExternalLineageService mock) {
+ impl = mock;
+ }
+
+ public ExternalLineageRelationship createExternalLineageRelationship(
+ CreateRequestExternalLineage externalLineageRelationship) {
+ return createExternalLineageRelationship(
+ new CreateExternalLineageRelationshipRequest()
+ .setExternalLineageRelationship(externalLineageRelationship));
+ }
+
+ /**
+ * Creates an external lineage relationship between a Databricks or external metadata object and
+ * another external metadata object.
+ */
+ public ExternalLineageRelationship createExternalLineageRelationship(
+ CreateExternalLineageRelationshipRequest request) {
+ return impl.createExternalLineageRelationship(request);
+ }
+
+ public void deleteExternalLineageRelationship(
+ DeleteRequestExternalLineage externalLineageRelationship) {
+ deleteExternalLineageRelationship(
+ new DeleteExternalLineageRelationshipRequest()
+ .setExternalLineageRelationship(externalLineageRelationship));
+ }
+
+ /**
+ * Deletes an external lineage relationship between a Databricks or external metadata object and
+ * another external metadata object.
+ */
+ public void deleteExternalLineageRelationship(DeleteExternalLineageRelationshipRequest request) {
+ impl.deleteExternalLineageRelationship(request);
+ }
+
+ public Iterable With these APIs, users can create, update, delete, and list lineage relationships with support
+ * for column-level mappings and custom properties.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface ExternalLineageService {
+ /**
+ * Creates an external lineage relationship between a Databricks or external metadata object and
+ * another external metadata object.
+ */
+ ExternalLineageRelationship createExternalLineageRelationship(
+ CreateExternalLineageRelationshipRequest createExternalLineageRelationshipRequest);
+
+ /**
+ * Deletes an external lineage relationship between a Databricks or external metadata object and
+ * another external metadata object.
+ */
+ void deleteExternalLineageRelationship(
+ DeleteExternalLineageRelationshipRequest deleteExternalLineageRelationshipRequest);
+
+ /**
+ * Lists external lineage relationships of a Databricks object or external metadata given a
+ * supplied direction.
+ */
+ ListExternalLineageRelationshipsResponse listExternalLineageRelationships(
+ ListExternalLineageRelationshipsRequest listExternalLineageRelationshipsRequest);
+
+ /**
+ * Updates an external lineage relationship between a Databricks or external metadata object and
+ * another external metadata object.
+ */
+ ExternalLineageRelationship updateExternalLineageRelationship(
+ UpdateExternalLineageRelationshipRequest updateExternalLineageRelationshipRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java
new file mode 100755
index 000000000..329ca5930
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTable.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ExternalLineageTable {
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ public ExternalLineageTable setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExternalLineageTable that = (ExternalLineageTable) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExternalLineageTable.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java
new file mode 100755
index 000000000..8d31a384f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLineageTableInfo.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Represents the table information in the lineage event. */
+@Generated
+public class ExternalLineageTableInfo {
+ /** Name of Catalog. */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** Timestamp of the lineage event. */
+ @JsonProperty("event_time")
+ private String eventTime;
+
+ /** Name of Table. */
+ @JsonProperty("name")
+ private String name;
+
+ /** Name of Schema. */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ public ExternalLineageTableInfo setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public ExternalLineageTableInfo setEventTime(String eventTime) {
+ this.eventTime = eventTime;
+ return this;
+ }
+
+ public String getEventTime() {
+ return eventTime;
+ }
+
+ public ExternalLineageTableInfo setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public ExternalLineageTableInfo setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExternalLineageTableInfo that = (ExternalLineageTableInfo) o;
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(eventTime, that.eventTime)
+ && Objects.equals(name, that.name)
+ && Objects.equals(schemaName, that.schemaName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogName, eventTime, name, schemaName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExternalLineageTableInfo.class)
+ .add("catalogName", catalogName)
+ .add("eventTime", eventTime)
+ .add("name", name)
+ .add("schemaName", schemaName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
index b8a003e2b..0827a081c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
@@ -36,11 +36,11 @@ public class ExternalLocationInfo {
@JsonProperty("credential_name")
private String credentialName;
- /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */
+ /** Whether to enable file events on this external location. */
@JsonProperty("enable_file_events")
private Boolean enableFileEvents;
- /** Encryption options that apply to clients connecting to cloud storage. */
+ /** */
@JsonProperty("encryption_details")
private EncryptionDetails encryptionDetails;
@@ -52,7 +52,7 @@ public class ExternalLocationInfo {
@JsonProperty("fallback")
private Boolean fallback;
- /** [Create:OPT Update:OPT] File event queue settings. */
+ /** File event queue settings. */
@JsonProperty("file_event_queue")
private FileEventQueue fileEventQueue;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
index 8c3107d8c..323cfed47 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteExternalLocationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java
new file mode 100755
index 000000000..eb20f8f9a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalMetadata.java
@@ -0,0 +1,255 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class ExternalMetadata {
+ /** List of columns associated with the external metadata object. */
+ @JsonProperty("columns")
+ private Collection These APIs provide a standardized way to create, update, retrieve, list, and delete external
+ * metadata objects. Fine-grained authorization ensures that only users with appropriate permissions
+ * can view and manage external metadata objects.
+ */
+@Generated
+public class ExternalMetadataAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(ExternalMetadataAPI.class);
+
+ private final ExternalMetadataService impl;
+
+ /** Regular-use constructor */
+ public ExternalMetadataAPI(ApiClient apiClient) {
+ impl = new ExternalMetadataImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public ExternalMetadataAPI(ExternalMetadataService mock) {
+ impl = mock;
+ }
+
+ public ExternalMetadata createExternalMetadata(ExternalMetadata externalMetadata) {
+ return createExternalMetadata(
+ new CreateExternalMetadataRequest().setExternalMetadata(externalMetadata));
+ }
+
+ /**
+ * Creates a new external metadata object in the parent metastore if the caller is a metastore
+ * admin or has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users
+ * upon creation by default.
+ */
+ public ExternalMetadata createExternalMetadata(CreateExternalMetadataRequest request) {
+ return impl.createExternalMetadata(request);
+ }
+
+ public void deleteExternalMetadata(String name) {
+ deleteExternalMetadata(new DeleteExternalMetadataRequest().setName(name));
+ }
+
+ /**
+ * Deletes the external metadata object that matches the supplied name. The caller must be a
+ * metastore admin, the owner of the external metadata object, or a user that has the **MANAGE**
+ * privilege.
+ */
+ public void deleteExternalMetadata(DeleteExternalMetadataRequest request) {
+ impl.deleteExternalMetadata(request);
+ }
+
+ public ExternalMetadata getExternalMetadata(String name) {
+ return getExternalMetadata(new GetExternalMetadataRequest().setName(name));
+ }
+
+ /**
+ * Gets the specified external metadata object in a metastore. The caller must be a metastore
+ * admin, the owner of the external metadata object, or a user that has the **BROWSE** privilege.
+ */
+ public ExternalMetadata getExternalMetadata(GetExternalMetadataRequest request) {
+ return impl.getExternalMetadata(request);
+ }
+
+ /**
+ * Gets an array of external metadata objects in the metastore. If the caller is the metastore
+ * admin, all external metadata objects will be retrieved. Otherwise, only external metadata
+ * objects that the caller has **BROWSE** on will be retrieved. There is no guarantee of a
+ * specific ordering of the elements in the array.
+ */
+ public Iterable These APIs provide a standardized way to create, update, retrieve, list, and delete external
+ * metadata objects. Fine-grained authorization ensures that only users with appropriate permissions
+ * can view and manage external metadata objects.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface ExternalMetadataService {
+ /**
+ * Creates a new external metadata object in the parent metastore if the caller is a metastore
+ * admin or has the **CREATE_EXTERNAL_METADATA** privilege. Grants **BROWSE** to all account users
+ * upon creation by default.
+ */
+ ExternalMetadata createExternalMetadata(
+ CreateExternalMetadataRequest createExternalMetadataRequest);
+
+ /**
+ * Deletes the external metadata object that matches the supplied name. The caller must be a
+ * metastore admin, the owner of the external metadata object, or a user that has the **MANAGE**
+ * privilege.
+ */
+ void deleteExternalMetadata(DeleteExternalMetadataRequest deleteExternalMetadataRequest);
+
+ /**
+ * Gets the specified external metadata object in a metastore. The caller must be a metastore
+ * admin, the owner of the external metadata object, or a user that has the **BROWSE** privilege.
+ */
+ ExternalMetadata getExternalMetadata(GetExternalMetadataRequest getExternalMetadataRequest);
+
+ /**
+ * Gets an array of external metadata objects in the metastore. If the caller is the metastore
+ * admin, all external metadata objects will be retrieved. Otherwise, only external metadata
+ * objects that the caller has **BROWSE** on will be retrieved. There is no guarantee of a
+ * specific ordering of the elements in the array.
+ */
+ ListExternalMetadataResponse listExternalMetadata(
+ ListExternalMetadataRequest listExternalMetadataRequest);
+
+ /**
+ * Updates the external metadata object that matches the supplied name. The caller can only update
+ * either the owner or other metadata fields in one request. The caller must be a metastore admin,
+ * the owner of the external metadata object, or a user that has the **MODIFY** privilege. If the
+ * caller is updating the owner, they must also have the **MANAGE** privilege.
+ */
+ ExternalMetadata updateExternalMetadata(
+ UpdateExternalMetadataRequest updateExternalMetadataRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
index 794ac8243..7e41e1dc0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionParameterInfo.java
@@ -21,11 +21,11 @@ public class FunctionParameterInfo {
@JsonProperty("parameter_default")
private String parameterDefault;
- /** The mode of the function parameter. */
+ /** */
@JsonProperty("parameter_mode")
private FunctionParameterMode parameterMode;
- /** The type of function parameter. */
+ /** */
@JsonProperty("parameter_type")
private FunctionParameterType parameterType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
index 422449786..387db0b64 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteFunctionRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java
index b834a9ff0..226e0f45c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpPubsub.java
@@ -15,7 +15,7 @@ public class GcpPubsub {
/**
* The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription
- * name} REQUIRED for provided_pubsub.
+ * name} Required for provided_pubsub.
*/
@JsonProperty("subscription_name")
private String subscriptionName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java
index f768675fe..3a67980a0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java
@@ -9,7 +9,7 @@
@Generated
public class GenerateTemporaryServiceCredentialRequest {
- /** The Azure cloud options to customize the requested temporary credential */
+ /** */
@JsonProperty("azure_options")
private GenerateTemporaryServiceCredentialAzureOptions azureOptions;
@@ -17,7 +17,7 @@ public class GenerateTemporaryServiceCredentialRequest {
@JsonProperty("credential_name")
private String credentialName;
- /** The GCP cloud options to customize the requested temporary credential */
+ /** */
@JsonProperty("gcp_options")
private GenerateTemporaryServiceCredentialGcpOptions gcpOptions;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java
index be752eec7..3f1f2cc48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java
@@ -9,25 +9,15 @@
@Generated
public class GenerateTemporaryTableCredentialResponse {
- /**
- * AWS temporary credentials for API authentication. Read more at
- * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.
- */
+ /** */
@JsonProperty("aws_temp_credentials")
private AwsCredentials awsTempCredentials;
- /**
- * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or
- * Managed Identity. Read more at
- * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token
- */
+ /** */
@JsonProperty("azure_aad")
private AzureActiveDirectoryToken azureAad;
- /**
- * Azure temporary credentials for API authentication. Read more at
- * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
- */
+ /** */
@JsonProperty("azure_user_delegation_sas")
private AzureUserDelegationSas azureUserDelegationSas;
@@ -38,17 +28,11 @@ public class GenerateTemporaryTableCredentialResponse {
@JsonProperty("expiration_time")
private Long expirationTime;
- /**
- * GCP temporary credentials for API authentication. Read more at
- * https://developers.google.com/identity/protocols/oauth2/service-account
- */
+ /** */
@JsonProperty("gcp_oauth_token")
private GcpOauthToken gcpOauthToken;
- /**
- * R2 temporary credentials for API authentication. Read more at
- * https://developers.cloudflare.com/r2/api/s3/tokens/.
- */
+ /** */
@JsonProperty("r2_temp_credentials")
private R2Credentials r2TempCredentials;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java
new file mode 100755
index 000000000..b797fe594
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalMetadataRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetExternalMetadataRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public GetExternalMetadataRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetExternalMetadataRequest that = (GetExternalMetadataRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetExternalMetadataRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java
new file mode 100755
index 000000000..cbd8f066d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LineageDirection.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum LineageDirection {
+ DOWNSTREAM,
+ UPSTREAM,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java
new file mode 100755
index 000000000..9db004efc
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsRequest.java
@@ -0,0 +1,95 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListExternalLineageRelationshipsRequest {
+ /** The lineage direction to filter on. */
+ @JsonIgnore
+ @QueryParam("lineage_direction")
+ private LineageDirection lineageDirection;
+
+ /** The object to query external lineage relationship on. */
+ @JsonIgnore
+ @QueryParam("object_info")
+ private ExternalLineageObject objectInfo;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListExternalLineageRelationshipsRequest setLineageDirection(
+ LineageDirection lineageDirection) {
+ this.lineageDirection = lineageDirection;
+ return this;
+ }
+
+ public LineageDirection getLineageDirection() {
+ return lineageDirection;
+ }
+
+ public ListExternalLineageRelationshipsRequest setObjectInfo(ExternalLineageObject objectInfo) {
+ this.objectInfo = objectInfo;
+ return this;
+ }
+
+ public ExternalLineageObject getObjectInfo() {
+ return objectInfo;
+ }
+
+ public ListExternalLineageRelationshipsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListExternalLineageRelationshipsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListExternalLineageRelationshipsRequest that = (ListExternalLineageRelationshipsRequest) o;
+ return Objects.equals(lineageDirection, that.lineageDirection)
+ && Objects.equals(objectInfo, that.objectInfo)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lineageDirection, objectInfo, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListExternalLineageRelationshipsRequest.class)
+ .add("lineageDirection", lineageDirection)
+ .add("objectInfo", objectInfo)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java
new file mode 100755
index 000000000..7bf328c52
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLineageRelationshipsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListExternalLineageRelationshipsResponse {
+ /** */
+ @JsonProperty("external_lineage_relationships")
+ private Collection A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateExternalLineageRelationshipRequest setExternalLineageRelationship(
+ UpdateRequestExternalLineage externalLineageRelationship) {
+ this.externalLineageRelationship = externalLineageRelationship;
+ return this;
+ }
+
+ public UpdateRequestExternalLineage getExternalLineageRelationship() {
+ return externalLineageRelationship;
+ }
+
+ public UpdateExternalLineageRelationshipRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateExternalLineageRelationshipRequest that = (UpdateExternalLineageRelationshipRequest) o;
+ return Objects.equals(externalLineageRelationship, that.externalLineageRelationship)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(externalLineageRelationship, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateExternalLineageRelationshipRequest.class)
+ .add("externalLineageRelationship", externalLineageRelationship)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java
index d2a759d9f..719c6c4d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java
@@ -18,11 +18,11 @@ public class UpdateExternalLocation {
@JsonProperty("credential_name")
private String credentialName;
- /** [Create:OPT Update:OPT] Whether to enable file events on this external location. */
+ /** Whether to enable file events on this external location. */
@JsonProperty("enable_file_events")
private Boolean enableFileEvents;
- /** Encryption options that apply to clients connecting to cloud storage. */
+ /** */
@JsonProperty("encryption_details")
private EncryptionDetails encryptionDetails;
@@ -34,7 +34,7 @@ public class UpdateExternalLocation {
@JsonProperty("fallback")
private Boolean fallback;
- /** [Create:OPT Update:OPT] File event queue settings. */
+ /** File event queue settings. */
@JsonProperty("file_event_queue")
private FileEventQueue fileEventQueue;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java
new file mode 100755
index 000000000..890161e0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalMetadataRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateExternalMetadataRequest {
+ /** */
+ @JsonProperty("external_metadata")
+ private ExternalMetadata externalMetadata;
+
+ /** Name of the external metadata object. */
+ @JsonIgnore private String name;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateExternalMetadataRequest setExternalMetadata(ExternalMetadata externalMetadata) {
+ this.externalMetadata = externalMetadata;
+ return this;
+ }
+
+ public ExternalMetadata getExternalMetadata() {
+ return externalMetadata;
+ }
+
+ public UpdateExternalMetadataRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateExternalMetadataRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateExternalMetadataRequest that = (UpdateExternalMetadataRequest) o;
+ return Objects.equals(externalMetadata, that.externalMetadata)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(externalMetadata, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateExternalMetadataRequest.class)
+ .add("externalMetadata", externalMetadata)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java
new file mode 100755
index 000000000..4ca0b7552
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRequestExternalLineage.java
@@ -0,0 +1,106 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class UpdateRequestExternalLineage {
+ /** List of column relationships between source and target objects. */
+ @JsonProperty("columns")
+ private Collection [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external
- */
+ /** */
@JsonProperty("volume_type")
private VolumeType volumeType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java
index 00fe08a18..9462102a8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java
@@ -36,7 +36,7 @@ public void delete(DeleteVolumeRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java
index 34d8b7b59..d2d695089 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java
@@ -40,7 +40,7 @@ public void delete(DeleteCleanRoomAssetRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteCleanRoomAssetResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java
index afb1ee357..46f713c85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomRemoteDetail.java
@@ -30,9 +30,7 @@ public class CleanRoomRemoteDetail {
@JsonProperty("collaborators")
private Collection This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
index 5cc4c4842..6c0f39d60 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CreateCleanRoomAssetRequest.java
@@ -10,7 +10,7 @@
@Generated
public class CreateCleanRoomAssetRequest {
- /** Metadata of the clean room asset */
+ /** */
@JsonProperty("asset")
private CleanRoomAsset asset;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java
deleted file mode 100755
index 4efe5848d..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetResponse.java
+++ /dev/null
@@ -1,32 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.cleanrooms;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-/**
- * Response for delete clean room request. Using an empty message since the generic Empty proto does
- * not externd UnshadedMessageMarker.
- */
-@Generated
-public class DeleteCleanRoomAssetResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteCleanRoomAssetResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
index bda1f2843..f5c2d54fa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/UpdateCleanRoomAssetRequest.java
@@ -10,7 +10,10 @@
@Generated
public class UpdateCleanRoomAssetRequest {
- /** Metadata of the clean room asset */
+ /**
+ * The asset to update. The asset's `name` and `asset_type` fields are used to identify the asset
+ * to update.
+ */
@JsonProperty("asset")
private CleanRoomAsset asset;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java
deleted file mode 100755
index 2c169a9d7..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class AddResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(AddResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
index 825127bce..377cbeea7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
@@ -10,12 +10,7 @@
/** Attributes set during cluster creation which are related to Amazon Web Services. */
@Generated
public class AwsAttributes {
- /**
- * Availability type used for all subsequent nodes past the `first_on_demand` ones.
- *
- * Note: If `first_on_demand` is zero, this availability type will be used for the entire
- * cluster.
- */
+ /** */
@JsonProperty("availability")
private AwsAvailability availability;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java
deleted file mode 100755
index 2109537fd..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class CancelResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(CancelResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java
deleted file mode 100755
index 36b86fe5f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class ChangeClusterOwnerResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(ChangeClusterOwnerResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java
index c16994224..a123970d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java
@@ -13,7 +13,7 @@ public class ClusterAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private ClusterPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
index 17316f8c5..6cc04ec02 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
@@ -68,33 +68,7 @@ public class ClusterAttributes {
@JsonProperty("custom_tags")
private Map The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -160,21 +134,7 @@ public class ClusterAttributes {
@JsonProperty("is_single_node")
private Boolean isSingleNode;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -269,7 +229,7 @@ public class ClusterAttributes {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
index 671640778..b9901dced 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
@@ -109,33 +109,7 @@ public class ClusterDetails {
@JsonProperty("custom_tags")
private Map The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -235,21 +209,7 @@ public class ClusterDetails {
@JsonProperty("jdbc_port")
private Long jdbcPort;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -409,7 +369,7 @@ public class ClusterDetails {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java
index a96fd99fb..ac0c8d30d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPermission.java
@@ -18,7 +18,7 @@ public class ClusterPermission {
@JsonProperty("inherited_from_object")
private Collection The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -174,21 +148,7 @@ public class ClusterSpec {
@JsonProperty("is_single_node")
private Boolean isSingleNode;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -296,7 +256,7 @@ public class ClusterSpec {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java
index e847a4571..23c02c8d3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java
@@ -24,7 +24,7 @@ public void changeOwner(ChangeClusterOwner request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ChangeClusterOwnerResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -52,7 +52,7 @@ public void delete(DeleteCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -66,7 +66,7 @@ public void edit(EditCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, EditClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -172,7 +172,7 @@ public void permanentDelete(PermanentDeleteCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, PermanentDeleteClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -186,7 +186,7 @@ public void pin(PinCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, PinClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -200,7 +200,7 @@ public void resize(ResizeCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ResizeClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -214,7 +214,7 @@ public void restart(RestartCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, RestartClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -254,7 +254,7 @@ public void start(StartCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, StartClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -268,7 +268,7 @@ public void unpin(UnpinCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UnpinClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -282,7 +282,7 @@ public void update(UpdateCluster request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateClusterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java
index 3cd62c5cb..550dfe69a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java
@@ -24,7 +24,7 @@ public void cancel(CancelCommand request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CancelResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -78,7 +78,7 @@ public void destroy(DestroyContext request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DestroyResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
index b860190d0..eb15d7964 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
@@ -85,33 +85,7 @@ public class CreateCluster {
@JsonProperty("custom_tags")
private Map The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -177,21 +151,7 @@ public class CreateCluster {
@JsonProperty("is_single_node")
private Boolean isSingleNode;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -299,7 +259,7 @@ public class CreateCluster {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java
deleted file mode 100755
index 91fc276a6..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteClusterResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteClusterResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteClusterResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java
deleted file mode 100755
index 3a71cf38e..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePoolResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteInstancePoolResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteInstancePoolResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java
deleted file mode 100755
index 60aa4fcc7..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicyResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeletePolicyResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeletePolicyResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java
deleted file mode 100755
index 62648157e..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java
deleted file mode 100755
index 2bf691176..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DestroyResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DestroyResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DestroyResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java
index 3e04994c6..b3ca16e07 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DiskType.java
@@ -10,17 +10,11 @@
/** Describes the disk type. */
@Generated
public class DiskType {
- /**
- * All Azure Disk types that Databricks supports. See
- * https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks
- */
+ /** */
@JsonProperty("azure_disk_volume_type")
private DiskTypeAzureDiskVolumeType azureDiskVolumeType;
- /**
- * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for
- * details.
- */
+ /** */
@JsonProperty("ebs_volume_type")
private DiskTypeEbsVolumeType ebsVolumeType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
index 7fe108450..f1b840e3a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
@@ -82,33 +82,7 @@ public class EditCluster {
@JsonProperty("custom_tags")
private Map The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -174,21 +148,7 @@ public class EditCluster {
@JsonProperty("is_single_node")
private Boolean isSingleNode;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -296,7 +256,7 @@ public class EditCluster {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java
deleted file mode 100755
index fe1297b0a..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditClusterResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EditClusterResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EditClusterResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java
deleted file mode 100755
index a2c4bbdd3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePoolResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EditInstancePoolResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EditInstancePoolResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java
deleted file mode 100755
index e2931443b..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EditPolicyResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EditPolicyResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java
deleted file mode 100755
index 35a7dba24..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EditResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EditResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java
index 94689ee30..e038a880e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteGlobalInitScriptRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -76,7 +76,7 @@ public void update(GlobalInitScriptUpdateRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java
deleted file mode 100755
index 58d55bb76..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibrariesResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class InstallLibrariesResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(InstallLibrariesResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
index 94ea72be1..5883b80e6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
@@ -13,7 +13,7 @@ public class InstancePoolAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private InstancePoolPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
index a97e496ad..774dcaf5a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java
@@ -10,10 +10,7 @@
/** Attributes set during instance pool creation which are related to GCP. */
@Generated
public class InstancePoolGcpAttributes {
- /**
- * This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or
- * preemptible VMs with a fallback to on-demand VMs if the former is unavailable.
- */
+ /** */
@JsonProperty("gcp_availability")
private GcpAvailability gcpAvailability;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java
index 7d00ebac0..e8aade399 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolPermission.java
@@ -18,7 +18,7 @@ public class InstancePoolPermission {
@JsonProperty("inherited_from_object")
private Collection The following modes can only be used when `kind = CLASSIC_PREVIEW`. *
- * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on
- * your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. *
- * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
- *
- * The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
- * multiple users sharing the cluster. Data governance features are not available in this mode. *
- * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
- * `single_user_name`. Most programming languages, cluster features and data governance features
- * are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
- * users. Cluster users are fully isolated so that they cannot see each other's data and
- * credentials. Most data governance features are supported in this mode. But programming
- * languages and cluster features might be limited.
- *
- * The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed
- * for future Databricks Runtime versions:
- *
- * * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
- * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
- * concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
- * Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way
- * that doesn’t have UC nor passthrough enabled.
- */
+ /** */
@JsonProperty("data_security_mode")
private DataSecurityMode dataSecurityMode;
@@ -163,21 +137,7 @@ public class UpdateClusterResource {
@JsonProperty("is_single_node")
private Boolean isSingleNode;
- /**
- * The kind of compute described by this compute specification.
- *
- * Depending on `kind`, different validations and default values will be applied.
- *
- * Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with
- * no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) *
- * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) *
- * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to
- * `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD`
- *
- * By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`.
- *
- * [simple form]: https://docs.databricks.com/compute/simple-form.html
- */
+ /** */
@JsonProperty("kind")
private Kind kind;
@@ -285,7 +245,7 @@ public class UpdateClusterResource {
@JsonProperty("use_ml_runtime")
private Boolean useMlRuntime;
- /** Cluster Attributes showing for clusters workload types. */
+ /** */
@JsonProperty("workload_type")
private WorkloadType workloadType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java
deleted file mode 100755
index 9647b99d1..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateClusterResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateClusterResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java
deleted file mode 100755
index 62feccc35..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.compute;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
index 33e0c8889..60c0a7fc0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
@@ -13,7 +13,7 @@ public class CreateScheduleRequest {
/** UUID identifying the dashboard to which the schedule belongs. */
@JsonIgnore private String dashboardId;
- /** */
+ /** The schedule to create. A dashboard is limited to 10 schedules. */
@JsonProperty("schedule")
private Schedule schedule;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
index 815f3fc8e..59d03eee8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
@@ -16,7 +16,7 @@ public class CreateSubscriptionRequest {
/** UUID identifying the schedule to which the subscription belongs. */
@JsonIgnore private String scheduleId;
- /** */
+ /** The subscription to create. A schedule is limited to 100 subscriptions. */
@JsonProperty("subscription")
private Subscription subscription;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index c42dec495..2548416fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -280,7 +280,7 @@ public void trashSpace(String spaceId) {
trashSpace(new GenieTrashSpaceRequest().setSpaceId(spaceId));
}
- /** Trash a Genie Space. */
+ /** Move a Genie Space to the trash. */
public void trashSpace(GenieTrashSpaceRequest request) {
impl.trashSpace(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index feed6dd92..d055781dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -43,7 +43,7 @@ public void deleteConversation(GenieDeleteConversationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteConversationResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -221,7 +221,7 @@ public void trashSpace(GenieTrashSpaceRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, TrashSpaceResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java
index 67a45a2b9..7be6e4bea 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java
@@ -20,7 +20,7 @@ public class GenieListConversationsRequest {
@QueryParam("page_token")
private String pageToken;
- /** The ID associated with the Genie space to list conversations from. */
+ /** The ID of the Genie space to retrieve conversations from. */
@JsonIgnore private String spaceId;
public GenieListConversationsRequest setPageSize(Long pageSize) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
index db243673c..be97d6d5c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java
@@ -53,22 +53,7 @@ public class GenieMessage {
@JsonProperty("space_id")
private String spaceId;
- /**
- * MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
- * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
- * `ASKING_AI`: Waiting for the LLM to respond to the user's question. * `PENDING_WAREHOUSE`:
- * Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
- * a generated SQL query. Get the SQL query result by calling
- * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
- * `FAILED`: The response generation or query execution failed. See `error` field. * `COMPLETED`:
- * Message processing is completed. Results are in the `attachments` field. Get the SQL query
- * result by calling
- * [getMessageAttachmentQueryResult](:method:genie/getMessageAttachmentQueryResult) API. *
- * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available
- * anymore. The user needs to rerun the query. Rerun the SQL query result by calling
- * [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. *
- * `CANCELLED`: Message has been cancelled.
- */
+ /** */
@JsonProperty("status")
private MessageStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index 02368b9cd..f0ac59161 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -74,6 +74,6 @@ GenieListConversationsResponse listConversations(
GenieStartConversationResponse startConversation(
GenieStartConversationMessageRequest genieStartConversationMessageRequest);
- /** Trash a Genie Space. */
+ /** Move a Genie Space to the trash. */
void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java
index 5b27effad..d23cc055c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java
@@ -9,7 +9,7 @@
@Generated
public class GenieTrashSpaceRequest {
- /** The ID associated with the Genie space to be trashed. */
+ /** The ID associated with the Genie space to be sent to the trash. */
@JsonIgnore private String spaceId;
public GenieTrashSpaceRequest setSpaceId(String spaceId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
index ac931a0e7..3b744a103 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
@@ -72,7 +72,7 @@ public void deleteSchedule(DeleteScheduleRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteScheduleResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -88,7 +88,7 @@ public void deleteSubscription(DeleteSubscriptionRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteSubscriptionResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -232,7 +232,7 @@ public void trash(TrashDashboardRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, TrashDashboardResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -246,7 +246,7 @@ public void unpublish(UnpublishDashboardRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, UnpublishDashboardResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java
deleted file mode 100755
index 0f43a3a49..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class TrashDashboardResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(TrashDashboardResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java
deleted file mode 100755
index 211e9c010..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UnpublishDashboardResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UnpublishDashboardResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
index f3faa860d..b581278a9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
@@ -13,7 +13,7 @@ public class UpdateScheduleRequest {
/** UUID identifying the dashboard to which the schedule belongs. */
@JsonIgnore private String dashboardId;
- /** */
+ /** The schedule to update. */
@JsonProperty("schedule")
private Schedule schedule;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
index 73af31b6c..59260918b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
@@ -9,10 +9,7 @@
@Generated
public class CreateDatabaseInstanceRequest {
- /**
- * A DatabaseInstance represents a logical Postgres instance, comprised of both compute and
- * storage.
- */
+ /** Instance to create. */
@JsonProperty("database_instance")
private DatabaseInstance databaseInstance;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
index 28c177a98..af69b9394 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRoleRequest.java
@@ -10,7 +10,7 @@
@Generated
public class CreateDatabaseInstanceRoleRequest {
- /** A DatabaseInstanceRole represents a Postgres role in a database instance. */
+ /** */
@JsonProperty("database_instance_role")
private DatabaseInstanceRole databaseInstanceRole;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
index bfadcb9fc..934692059 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateDatabaseTableRequest {
- /** Next field marker: 13 */
+ /** */
@JsonProperty("table")
private DatabaseTable table;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
index 7dc427c0d..1e650387d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateSyncedDatabaseTableRequest {
- /** Next field marker: 12 */
+ /** */
@JsonProperty("synced_table")
private SyncedDatabaseTable syncedTable;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
index ee9b0801e..702b11a4c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
@@ -95,7 +95,7 @@ public void deleteDatabaseCatalog(DeleteDatabaseCatalogRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteDatabaseCatalogResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -108,7 +108,7 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteDatabaseInstanceResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -124,7 +124,7 @@ public void deleteDatabaseInstanceRole(DeleteDatabaseInstanceRoleRequest request
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteDatabaseInstanceRoleResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -137,7 +137,7 @@ public void deleteDatabaseTable(DeleteDatabaseTableRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteDatabaseTableResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -150,7 +150,7 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request)
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteSyncedDatabaseTableResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java
index ba727372f..4653f62dd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java
@@ -11,6 +11,13 @@
/** Generates a credential that can be used to access database instances */
@Generated
public class GenerateDatabaseCredentialRequest {
+ /**
+ * The returned token will be scoped to the union of instance_names and instances containing the
+ * specified UC tables, so instance_names is allowed to be empty.
+ */
+ @JsonProperty("claims")
+ private Collection [dbutils.notebook.run]:
- * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow
- */
+ /** */
@JsonProperty("run_type")
private RunType runType;
@@ -203,7 +196,7 @@ public class BaseRun {
@JsonProperty("state")
private RunState state;
- /** The current status of the run */
+ /** */
@JsonProperty("status")
private RunStatus status;
@@ -216,23 +209,11 @@ public class BaseRun {
@JsonProperty("tasks")
private Collection * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. *
- * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on
- * demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
- * previously failed run. This occurs when you request to re-run the job in case of failures. *
- * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
- * Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is
- * triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update.
- * * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job
- * run. * `MODEL`: Indicates a run that is triggered by a model update.
- */
+ /** */
@JsonProperty("trigger")
private TriggerType trigger;
- /** Additional details about what triggered the run */
+ /** */
@JsonProperty("trigger_info")
private TriggerInfo triggerInfo;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java
deleted file mode 100755
index c0b570c3f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelAllRunsResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class CancelAllRunsResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(CancelAllRunsResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java
deleted file mode 100755
index 8c956bd00..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CancelRunResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class CancelRunResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(CancelRunResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index 527597bec..ae5c94e8c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -87,7 +87,7 @@ public class CreateJob {
@JsonProperty("git_source")
private GitSource gitSource;
- /** An optional set of health rules that can be defined for this job. */
+ /** */
@JsonProperty("health")
private JobsHealthRules health;
@@ -142,13 +142,7 @@ public class CreateJob {
@JsonProperty("queue")
private QueueSettings queue;
- /**
- * Write-only setting. Specifies the user or service principal that the job runs as. If not
- * specified, the job runs as the user who created the job.
- *
- * Either `user_name` or `service_principal_name` should be specified. If not, an error is
- * thrown.
- */
+ /** */
@JsonProperty("run_as")
private JobRunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java
deleted file mode 100755
index 2fb1336ff..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java
deleted file mode 100755
index 125c19ad2..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRunResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteRunResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteRunResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java
index e14ae5f66..88c0dae70 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GitSource.java
@@ -40,10 +40,7 @@ public class GitSource {
@JsonProperty("git_provider")
private GitProvider gitProvider;
- /**
- * Read-only state of the remote repository at the time the job was run. This field is only
- * included on job runs.
- */
+ /** */
@JsonProperty("git_snapshot")
private GitSnapshot gitSnapshot;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
index d2e6553d1..4333dec1e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
@@ -13,7 +13,7 @@ public class JobAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private JobPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
index dc3dcccea..c54674370 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
@@ -13,11 +13,7 @@ public class JobEnvironment {
@JsonProperty("environment_key")
private String environmentKey;
- /**
- * The environment entity used to preserve serverless environment side panel, jobs' environment
- * for non-notebook task, and DLT's environment for classic and serverless pipelines. In this
- * minimal environment spec, only pip dependencies are supported.
- */
+ /** */
@JsonProperty("spec")
private com.databricks.sdk.service.compute.Environment spec;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java
index 4efc5cb2a..faa69a980 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermission.java
@@ -18,7 +18,7 @@ public class JobPermission {
@JsonProperty("inherited_from_object")
private Collection Either `user_name` or `service_principal_name` should be specified. If not, an error is
- * thrown.
- */
+ /** */
@JsonProperty("run_as")
private JobRunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java
index c76e051bb..65f55a858 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsHealthRule.java
@@ -9,23 +9,11 @@
@Generated
public class JobsHealthRule {
- /**
- * Specifies the health metric that is being evaluated for a particular health rule.
- *
- * * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. *
- * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed
- * across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An
- * estimate of the maximum offset lag across all streams. This metric is in Public Preview. *
- * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This
- * metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of
- * outstanding files across all streams. This metric is in Public Preview.
- */
+ /** */
@JsonProperty("metric")
private JobsHealthMetric metric;
- /**
- * Specifies the operator used to compare the health metric value with the specified threshold.
- */
+ /** */
@JsonProperty("op")
private JobsHealthOperator op;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java
index 0902a4b5f..f40489960 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java
@@ -23,7 +23,7 @@ public void cancelAllRuns(CancelAllRuns request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CancelAllRunsResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -36,7 +36,7 @@ public void cancelRun(CancelRun request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CancelRunResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -63,7 +63,7 @@ public void delete(DeleteJob request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -76,7 +76,7 @@ public void deleteRun(DeleteRun request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteRunResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -208,7 +208,7 @@ public void reset(ResetJob request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ResetResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -263,7 +263,7 @@ public void update(UpdateJob request) {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java
index f89c9be21..0729c1863 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/QueueDetails.java
@@ -9,13 +9,7 @@
@Generated
public class QueueDetails {
- /**
- * The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to
- * reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was
- * queued due to reaching the per-job limit of concurrent job runs. *
- * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of
- * active run job tasks.
- */
+ /** */
@JsonProperty("code")
private QueueDetailsCodeCode code;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java
index 24a8b911d..7a0e6d77f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairHistoryItem.java
@@ -40,7 +40,7 @@ public class RepairHistoryItem {
@JsonProperty("state")
private RunState state;
- /** The current status of the run */
+ /** */
@JsonProperty("status")
private RunStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java
deleted file mode 100755
index 34127047c..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResetResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class ResetResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(ResetResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
index 1e113851a..70bb1e68c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java
@@ -175,14 +175,7 @@ public class Run {
@JsonProperty("run_page_url")
private String runPageUrl;
- /**
- * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
- * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
- * run. A run created with :method:jobs/submit.
- *
- * [dbutils.notebook.run]:
- * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow
- */
+ /** */
@JsonProperty("run_type")
private RunType runType;
@@ -212,7 +205,7 @@ public class Run {
@JsonProperty("state")
private RunState state;
- /** The current status of the run */
+ /** */
@JsonProperty("status")
private RunStatus status;
@@ -225,23 +218,11 @@ public class Run {
@JsonProperty("tasks")
private Collection * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. *
- * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on
- * demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
- * previously failed run. This occurs when you request to re-run the job in case of failures. *
- * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
- * Indicates a run that is triggered by a file arrival. * `CONTINUOUS`: Indicates a run that is
- * triggered by a continuous job. * `TABLE`: Indicates a run that is triggered by a table update.
- * * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job
- * run. * `MODEL`: Indicates a run that is triggered by a model update.
- */
+ /** */
@JsonProperty("trigger")
private TriggerType trigger;
- /** Additional details about what triggered the run */
+ /** */
@JsonProperty("trigger_info")
private TriggerInfo triggerInfo;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java
index e9219f045..9b882ec8b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunStatus.java
@@ -14,7 +14,7 @@ public class RunStatus {
@JsonProperty("queue_details")
private QueueDetails queueDetails;
- /** The current state of the run. */
+ /** */
@JsonProperty("state")
private RunLifecycleStateV2State state;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
index 56ffe907b..268a3c4fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java
@@ -296,7 +296,7 @@ public class RunTask {
@JsonProperty("state")
private RunState state;
- /** The current status of the run */
+ /** */
@JsonProperty("status")
private RunStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java
index 1810d913e..a59931c1a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlAlertOutput.java
@@ -10,12 +10,7 @@
@Generated
public class SqlAlertOutput {
- /**
- * The state of the SQL alert.
- *
- * * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger
- * conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions
- */
+ /** */
@JsonProperty("alert_state")
private SqlAlertState alertState;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
index 694a4df67..58c71b012 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
@@ -46,7 +46,7 @@ public class SubmitRun {
@JsonProperty("git_source")
private GitSource gitSource;
- /** An optional set of health rules that can be defined for this job. */
+ /** */
@JsonProperty("health")
private JobsHealthRules health;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
index e17c2d833..07677312a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
@@ -90,7 +90,7 @@ public class SubmitTask {
@JsonProperty("gen_ai_compute_task")
private GenAiComputeTask genAiComputeTask;
- /** An optional set of health rules that can be defined for this job. */
+ /** */
@JsonProperty("health")
private JobsHealthRules health;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
index 9de9e54e8..21d0c3daf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
@@ -95,7 +95,7 @@ public class Task {
@JsonProperty("gen_ai_compute_task")
private GenAiComputeTask genAiComputeTask;
- /** An optional set of health rules that can be defined for this job. */
+ /** */
@JsonProperty("health")
private JobsHealthRules health;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java
index 73025923e..044b0866f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java
@@ -9,46 +9,7 @@
@Generated
public class TerminationDetails {
- /**
- * The code indicates why the run was terminated. Additional codes might be introduced in future
- * releases. * `SUCCESS`: The run was completed successfully. * `SUCCESS_WITH_FAILURES`: The run
- * was completed successfully but some child runs failed. * `USER_CANCELED`: The run was
- * successfully canceled during execution by a user. * `CANCELED`: The run was canceled during
- * execution by the Databricks platform; for example, if the maximum run duration was exceeded. *
- * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency
- * type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The
- * run encountered an unexpected error. Refer to the state message for further details. *
- * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. *
- * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further
- * details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when
- * communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because
- * it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The
- * workspace has reached the quota for the maximum number of concurrent active runs. Consider
- * scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it
- * tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`:
- * The number of cluster creation, start, and upsize requests have exceeded the allotted rate
- * limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`:
- * The run failed due to an error when accessing the customer blob storage. Refer to the state
- * message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures.
- * For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run
- * failed due to a permission issue while accessing a resource. Refer to the state message for
- * further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the
- * user-requested library. Refer to the state message for further details. The causes might
- * include, but are not limited to: The provided library is invalid, there are insufficient
- * permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The
- * scheduled run exceeds the limit of maximum concurrent runs set for the job. *
- * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the
- * maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A
- * resource necessary for run execution does not exist. Refer to the state message for further
- * details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer
- * to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud
- * provider issue. Refer to the state message for further details. *
- * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
- * limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user.
- *
- * [Link]:
- * https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
- */
+ /** */
@JsonProperty("code")
private TerminationCodeCode code;
@@ -59,15 +20,7 @@ public class TerminationDetails {
@JsonProperty("message")
private String message;
- /**
- * * `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the
- * Databricks platform. Please look at the [status page] or contact support if the issue persists.
- * * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job
- * configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud
- * provider.
- *
- * [status page]: https://status.databricks.com/
- */
+ /** */
@JsonProperty("type")
private TerminationTypeType typeValue;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java
deleted file mode 100755
index d53424f25..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.jobs;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java
index c817eb350..919752faf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java
@@ -42,7 +42,7 @@ public void delete(DeleteInstallationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteInstallationResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java
deleted file mode 100755
index b7cca36ab..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteExchangeFilterResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteExchangeFilterResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java
deleted file mode 100755
index be2eb59ea..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteExchangeResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteExchangeResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java
deleted file mode 100755
index f799f3705..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteFileResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteFileResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java
deleted file mode 100755
index 0928712ff..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteInstallationResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteInstallationResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java
deleted file mode 100755
index 1250054c3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteListingResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteListingResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java
deleted file mode 100755
index 41f94447f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteProviderResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteProviderResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java
index 60e960e72..93cacffed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java
@@ -69,7 +69,7 @@ public class ListingSummary {
@JsonProperty("share")
private ShareInfo share;
- /** Enums */
+ /** */
@JsonProperty("status")
private ListingStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
index 8765d123b..11e2e383e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
@@ -17,7 +17,7 @@ public class PersonalizationRequest {
@JsonProperty("consumer_region")
private RegionInfo consumerRegion;
- /** contact info for the consumer requesting data or performing a listing installation */
+ /** */
@JsonProperty("contact_info")
private ContactInfo contactInfo;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java
index 6baa7bce6..be01971e8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteExchangeFilterRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteExchangeFilterResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java
index 5ac2520b3..c40746d43 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java
@@ -51,7 +51,7 @@ public void delete(DeleteExchangeRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteExchangeResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -65,7 +65,7 @@ public void deleteListingFromExchange(RemoveExchangeForListingRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, RemoveExchangeForListingResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java
index e8fe25bac..4fc8f0927 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteFileRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteFileResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java
index ed54330ad..3cddc1b63 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteListingRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteListingResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java
index 618decdf9..4189392dd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteProviderRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteProviderResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java
deleted file mode 100755
index 02a6e7364..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.marketplace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class RemoveExchangeForListingResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(RemoveExchangeForListingResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java
index 5a370dd43..80ecebe7e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java
@@ -7,28 +7,17 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Activity recorded for the action. */
+/**
+ * For activities, this contains the activity recorded for the action. For comments, this contains
+ * the comment details. For transition requests, this contains the transition request details.
+ */
@Generated
public class Activity {
- /**
- * Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding
- * stage transition.
- *
- * * `REQUESTED_TRANSITION`: User requested the corresponding stage transition.
- *
- * * `CANCELLED_REQUEST`: User cancelled an existing transition request.
- *
- * * `APPROVED_REQUEST`: User approved the corresponding stage transition.
- *
- * * `REJECTED_REQUEST`: User rejected the coressponding stage transition.
- *
- * * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing
- * model versions in a stage.
- */
+ /** */
@JsonProperty("activity_type")
private ActivityType activityType;
- /** User-provided comment associated with the activity. */
+ /** User-provided comment associated with the activity, comment, or transition request. */
@JsonProperty("comment")
private String comment;
@@ -48,7 +37,7 @@ public class Activity {
* * `Archived`: Archived stage.
*/
@JsonProperty("from_stage")
- private Stage fromStage;
+ private String fromStage;
/** Unique identifier for the object. */
@JsonProperty("id")
@@ -78,7 +67,7 @@ public class Activity {
* * `Archived`: Archived stage.
*/
@JsonProperty("to_stage")
- private Stage toStage;
+ private String toStage;
/** The username of the user that created the object. */
@JsonProperty("user_id")
@@ -111,12 +100,12 @@ public Long getCreationTimestamp() {
return creationTimestamp;
}
- public Activity setFromStage(Stage fromStage) {
+ public Activity setFromStage(String fromStage) {
this.fromStage = fromStage;
return this;
}
- public Stage getFromStage() {
+ public String getFromStage() {
return fromStage;
}
@@ -147,12 +136,12 @@ public String getSystemComment() {
return systemComment;
}
- public Activity setToStage(Stage toStage) {
+ public Activity setToStage(String toStage) {
this.toStage = toStage;
return this;
}
- public Stage getToStage() {
+ public String getToStage() {
return toStage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java
index 727e1b2c6..8d4436b4b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java
@@ -5,16 +5,23 @@
import com.databricks.sdk.support.Generated;
/**
- * An action that a user (with sufficient permissions) could take on an activity. Valid values are:
- * * `APPROVE_TRANSITION_REQUEST`: Approve a transition request
+ * An action that a user (with sufficient permissions) could take on an activity or comment.
+ *
+ * For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request
*
* * `REJECT_TRANSITION_REQUEST`: Reject a transition request
*
* * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request
+ *
+ * For comments, valid values are: * `EDIT_COMMENT`: Edit the comment
+ *
+ * * `DELETE_COMMENT`: Delete the comment
*/
@Generated
public enum ActivityAction {
APPROVE_TRANSITION_REQUEST, // Approve a transition request
CANCEL_TRANSITION_REQUEST, // Cancel (delete) a transition request
+ DELETE_COMMENT, // Delete the comment
+ EDIT_COMMENT, // Edit the comment
REJECT_TRANSITION_REQUEST, // Reject a transition request
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java
index 2f1b3780c..f8384a434 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details required to identify and approve a model version stage transition request. */
@Generated
public class ApproveTransitionRequest {
/** Specifies whether to archive all current model versions in the target stage. */
@@ -33,7 +34,7 @@ public class ApproveTransitionRequest {
* * `Archived`: Archived stage.
*/
@JsonProperty("stage")
- private Stage stage;
+ private String stage;
/** Version of the model. */
@JsonProperty("version")
@@ -66,12 +67,12 @@ public String getName() {
return name;
}
- public ApproveTransitionRequest setStage(Stage stage) {
+ public ApproveTransitionRequest setStage(String stage) {
this.stage = stage;
return this;
}
- public Stage getStage() {
+ public String getStage() {
return stage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java
index 3bf4ef6b8..cd6e89a72 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java
@@ -9,7 +9,7 @@
@Generated
public class ApproveTransitionRequestResponse {
- /** Activity recorded for the action. */
+ /** New activity generated as a result of this operation. */
@JsonProperty("activity")
private Activity activity;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java
index 519f16283..2e05ec6da 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java
@@ -5,13 +5,23 @@
import com.databricks.sdk.support.Generated;
/**
- * An action that a user (with sufficient permissions) could take on a comment. Valid values are: *
- * `EDIT_COMMENT`: Edit the comment
+ * An action that a user (with sufficient permissions) could take on an activity or comment.
+ *
+ * For activities, valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request
+ *
+ * * `REJECT_TRANSITION_REQUEST`: Reject a transition request
+ *
+ * * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request
+ *
+ * For comments, valid values are: * `EDIT_COMMENT`: Edit the comment
*
* * `DELETE_COMMENT`: Delete the comment
*/
@Generated
public enum CommentActivityAction {
+ APPROVE_TRANSITION_REQUEST, // Approve a transition request
+ CANCEL_TRANSITION_REQUEST, // Cancel (delete) a transition request
DELETE_COMMENT, // Delete the comment
EDIT_COMMENT, // Edit the comment
+ REJECT_TRANSITION_REQUEST, // Reject a transition request
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java
index bc0cac89e..e203c6f82 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java
@@ -8,14 +8,17 @@
import java.util.Collection;
import java.util.Objects;
-/** Comment details. */
+/**
+ * For activities, this contains the activity recorded for the action. For comments, this contains
+ * the comment details. For transition requests, this contains the transition request details.
+ */
@Generated
public class CommentObject {
/** Array of actions on the activity allowed for the current viewer. */
@JsonProperty("available_actions")
private Collection * `Archived`: Archived stage.
*/
@JsonProperty("stage")
- private Stage stage;
+ private String stage;
/** Version of the model. */
@JsonProperty("version")
@@ -53,12 +54,12 @@ public String getName() {
return name;
}
- public CreateTransitionRequest setStage(Stage stage) {
+ public CreateTransitionRequest setStage(String stage) {
this.stage = stage;
return this;
}
- public Stage getStage() {
+ public String getStage() {
return stage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java
index 78d011768..64fd988b5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateTransitionRequestResponse.java
@@ -9,7 +9,7 @@
@Generated
public class CreateTransitionRequestResponse {
- /** Transition request details. */
+ /** New activity generated for stage transition request. */
@JsonProperty("request")
private TransitionRequest request;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java
deleted file mode 100755
index eae7ebb02..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteCommentResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteCommentResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java
deleted file mode 100755
index 2ef42a8bd..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperimentResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteExperimentResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteExperimentResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java
deleted file mode 100755
index 4032513b7..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteLoggedModelResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteLoggedModelResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java
deleted file mode 100755
index 5e9f53856..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteLoggedModelTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteLoggedModelTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java
deleted file mode 100755
index 1b053c73f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteModelResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteModelResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java
deleted file mode 100755
index 983354a74..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteModelTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteModelTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java
deleted file mode 100755
index 1988edda3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteModelVersionResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteModelVersionResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java
deleted file mode 100755
index d359f15af..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteModelVersionTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteModelVersionTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java
deleted file mode 100755
index 8dfe844e7..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteRunResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteRunResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java
deleted file mode 100755
index c40c832de..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java
index a1977c2af..4170e18a5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestRequest.java
@@ -41,7 +41,7 @@ public class DeleteTransitionRequestRequest {
*/
@JsonIgnore
@QueryParam("stage")
- private DeleteTransitionRequestStage stage;
+ private String stage;
/** Version of the model. */
@JsonIgnore
@@ -75,12 +75,12 @@ public String getName() {
return name;
}
- public DeleteTransitionRequestRequest setStage(DeleteTransitionRequestStage stage) {
+ public DeleteTransitionRequestRequest setStage(String stage) {
this.stage = stage;
return this;
}
- public DeleteTransitionRequestStage getStage() {
+ public String getStage() {
return stage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java
index 78bab3b6c..234420b64 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestResponse.java
@@ -4,25 +4,41 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
@Generated
public class DeleteTransitionRequestResponse {
+ /** New activity generated as a result of this operation. */
+ @JsonProperty("activity")
+ private Activity activity;
+
+ public DeleteTransitionRequestResponse setActivity(Activity activity) {
+ this.activity = activity;
+ return this;
+ }
+
+ public Activity getActivity() {
+ return activity;
+ }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- return true;
+ DeleteTransitionRequestResponse that = (DeleteTransitionRequestResponse) o;
+ return Objects.equals(activity, that.activity);
}
@Override
public int hashCode() {
- return Objects.hash();
+ return Objects.hash(activity);
}
@Override
public String toString() {
- return new ToStringer(DeleteTransitionRequestResponse.class).toString();
+ return new ToStringer(DeleteTransitionRequestResponse.class)
+ .add("activity", activity)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java
deleted file mode 100755
index d7325c6b4..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteTransitionRequestStage.java
+++ /dev/null
@@ -1,21 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-@Generated
-public enum DeleteTransitionRequestStage {
- @JsonProperty("Archived")
- ARCHIVED,
-
- @JsonProperty("None")
- NONE,
-
- @JsonProperty("Production")
- PRODUCTION,
-
- @JsonProperty("Staging")
- STAGING,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java
deleted file mode 100755
index 04056259e..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteWebhookResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteWebhookResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteWebhookResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
index 250fa534d..3001c8954 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
@@ -13,7 +13,7 @@ public class ExperimentAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private ExperimentPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java
index 8988beb05..3eb9dabba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentPermission.java
@@ -18,7 +18,7 @@ public class ExperimentPermission {
@JsonProperty("inherited_from_object")
private Collection * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
+ *
+ * * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
+ *
+ * * `COMMENT_CREATED`: A user wrote a comment on a registered model.
+ *
+ * * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only
+ * be specified for a registry-wide webhook, which can be created by not specifying a model name
+ * in the create request.
+ *
+ * * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
+ *
+ * * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
+ *
+ * * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to
+ * production.
+ *
+ * * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
+ *
+ * * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned
+ * to staging.
+ *
+ * * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be
+ * transitioned to production.
+ *
+ * * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
+ *
+ * If `events` is specified, any webhook with one or more of the specified trigger events is
* included in the output. If `events` is not specified, webhooks of all event types are included
* in the output.
*/
@@ -20,9 +50,14 @@ public class ListWebhooksRequest {
@QueryParam("events")
private Collection Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
+ * Creates a new registered model with the name specified in the request body. Throws
+ * `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
*/
public CreateModelResponse createModel(CreateModelRequest request) {
return impl.createModel(request);
@@ -85,7 +84,7 @@ public CreateModelVersionResponse createModelVersion(CreateModelVersionRequest r
}
public CreateTransitionRequestResponse createTransitionRequest(
- String name, String version, Stage stage) {
+ String name, String version, String stage) {
return createTransitionRequest(
new CreateTransitionRequest().setName(name).setVersion(version).setStage(stage));
}
@@ -99,11 +98,7 @@ public CreateWebhookResponse createWebhook(Collection Creates a registry webhook.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Creates a registry webhook. */
public CreateWebhookResponse createWebhook(CreateRegistryWebhook request) {
return impl.createWebhook(request);
}
@@ -154,9 +149,9 @@ public void deleteModelVersionTag(DeleteModelVersionTagRequest request) {
impl.deleteModelVersionTag(request);
}
- public void deleteTransitionRequest(
- String name, String version, DeleteTransitionRequestStage stage, String creator) {
- deleteTransitionRequest(
+ public DeleteTransitionRequestResponse deleteTransitionRequest(
+ String name, String version, String stage, String creator) {
+ return deleteTransitionRequest(
new DeleteTransitionRequestRequest()
.setName(name)
.setVersion(version)
@@ -165,15 +160,16 @@ public void deleteTransitionRequest(
}
/** Cancels a model version stage transition request. */
- public void deleteTransitionRequest(DeleteTransitionRequestRequest request) {
- impl.deleteTransitionRequest(request);
+ public DeleteTransitionRequestResponse deleteTransitionRequest(
+ DeleteTransitionRequestRequest request) {
+ return impl.deleteTransitionRequest(request);
}
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Deletes a registry webhook.
- */
+ public void deleteWebhook(String id) {
+ deleteWebhook(new DeleteWebhookRequest().setId(id));
+ }
+
+ /** **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. */
public void deleteWebhook(DeleteWebhookRequest request) {
impl.deleteWebhook(request);
}
@@ -280,11 +276,7 @@ public Iterable Lists all registry webhooks.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. */
public Iterable Tests a registry webhook.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. */
public TestRegistryWebhookResponse testRegistryWebhook(TestRegistryWebhookRequest request) {
return impl.testRegistryWebhook(request);
}
public TransitionStageResponse transitionStage(
- String name, String version, Stage stage, boolean archiveExistingVersions) {
+ String name, String version, String stage, boolean archiveExistingVersions) {
return transitionStage(
new TransitionModelVersionStageDatabricks()
.setName(name)
@@ -410,7 +398,7 @@ public TransitionStageResponse transitionStage(
/**
* Transition a model version's stage. This is a Databricks workspace version of the [MLflow
- * endpoint] that also accepts a comment associated with the transition to be recorded.",
+ * endpoint] that also accepts a comment associated with the transition to be recorded.
*
* [MLflow endpoint]:
* https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
@@ -428,22 +416,22 @@ public UpdateCommentResponse updateComment(UpdateComment request) {
return impl.updateComment(request);
}
- public void updateModel(String name) {
- updateModel(new UpdateModelRequest().setName(name));
+ public UpdateModelResponse updateModel(String name) {
+ return updateModel(new UpdateModelRequest().setName(name));
}
/** Updates a registered model. */
- public void updateModel(UpdateModelRequest request) {
- impl.updateModel(request);
+ public UpdateModelResponse updateModel(UpdateModelRequest request) {
+ return impl.updateModel(request);
}
- public void updateModelVersion(String name, String version) {
- updateModelVersion(new UpdateModelVersionRequest().setName(name).setVersion(version));
+ public UpdateModelVersionResponse updateModelVersion(String name, String version) {
+ return updateModelVersion(new UpdateModelVersionRequest().setName(name).setVersion(version));
}
/** Updates the model version. */
- public void updateModelVersion(UpdateModelVersionRequest request) {
- impl.updateModelVersion(request);
+ public UpdateModelVersionResponse updateModelVersion(UpdateModelVersionRequest request) {
+ return impl.updateModelVersion(request);
}
public RegisteredModelPermissions updatePermissions(String registeredModelId) {
@@ -459,17 +447,13 @@ public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRe
return impl.updatePermissions(request);
}
- public void updateWebhook(String id) {
- updateWebhook(new UpdateRegistryWebhook().setId(id));
+ public UpdateWebhookResponse updateWebhook(String id) {
+ return updateWebhook(new UpdateRegistryWebhook().setId(id));
}
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Updates a registry webhook.
- */
- public void updateWebhook(UpdateRegistryWebhook request) {
- impl.updateWebhook(request);
+ /** **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. */
+ public UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook request) {
+ return impl.updateWebhook(request);
}
public ModelRegistryService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java
index 357684e9c..f74777e4c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java
@@ -108,7 +108,7 @@ public void deleteComment(DeleteCommentRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteCommentResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -121,7 +121,7 @@ public void deleteModel(DeleteModelRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteModelResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -134,7 +134,7 @@ public void deleteModelTag(DeleteModelTagRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteModelTagResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -147,7 +147,7 @@ public void deleteModelVersion(DeleteModelVersionRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteModelVersionResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -160,20 +160,21 @@ public void deleteModelVersionTag(DeleteModelVersionTagRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteModelVersionTagResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public void deleteTransitionRequest(DeleteTransitionRequestRequest request) {
+ public DeleteTransitionRequestResponse deleteTransitionRequest(
+ DeleteTransitionRequestRequest request) {
String path = "/api/2.0/mlflow/transition-requests/delete";
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteTransitionRequestResponse.class);
+ return apiClient.execute(req, DeleteTransitionRequestResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -186,7 +187,7 @@ public void deleteWebhook(DeleteWebhookRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteWebhookResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -379,7 +380,7 @@ public void setModelTag(SetModelTagRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, SetModelTagResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -393,7 +394,7 @@ public void setModelVersionTag(SetModelVersionTagRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, SetModelVersionTagResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -457,28 +458,28 @@ public UpdateCommentResponse updateComment(UpdateComment request) {
}
@Override
- public void updateModel(UpdateModelRequest request) {
+ public UpdateModelResponse updateModel(UpdateModelRequest request) {
String path = "/api/2.0/mlflow/registered-models/update";
try {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateModelResponse.class);
+ return apiClient.execute(req, UpdateModelResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public void updateModelVersion(UpdateModelVersionRequest request) {
+ public UpdateModelVersionResponse updateModelVersion(UpdateModelVersionRequest request) {
String path = "/api/2.0/mlflow/model-versions/update";
try {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateModelVersionResponse.class);
+ return apiClient.execute(req, UpdateModelVersionResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -500,14 +501,14 @@ public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRe
}
@Override
- public void updateWebhook(UpdateRegistryWebhook request) {
+ public UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook request) {
String path = "/api/2.0/mlflow/registry-webhooks/update";
try {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateWebhookResponse.class);
+ return apiClient.execute(req, UpdateWebhookResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
index ece320439..416afd402 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
@@ -30,9 +30,8 @@ ApproveTransitionRequestResponse approveTransitionRequest(
CreateCommentResponse createComment(CreateComment createComment);
/**
- * Creates a new registered model with the name specified in the request body.
- *
- * Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
+ * Creates a new registered model with the name specified in the request body. Throws
+ * `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
*/
CreateModelResponse createModel(CreateModelRequest createModelRequest);
@@ -44,11 +43,7 @@ CreateModelVersionResponse createModelVersion(
CreateTransitionRequestResponse createTransitionRequest(
CreateTransitionRequest createTransitionRequest);
- /**
- * **NOTE**: This endpoint is in Public Preview.
- *
- * Creates a registry webhook.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Creates a registry webhook. */
CreateWebhookResponse createWebhook(CreateRegistryWebhook createRegistryWebhook);
/** Deletes a comment on a model version. */
@@ -67,13 +62,10 @@ CreateTransitionRequestResponse createTransitionRequest(
void deleteModelVersionTag(DeleteModelVersionTagRequest deleteModelVersionTagRequest);
/** Cancels a model version stage transition request. */
- void deleteTransitionRequest(DeleteTransitionRequestRequest deleteTransitionRequestRequest);
+ DeleteTransitionRequestResponse deleteTransitionRequest(
+ DeleteTransitionRequestRequest deleteTransitionRequestRequest);
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Deletes a registry webhook.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Deletes a registry webhook. */
void deleteWebhook(DeleteWebhookRequest deleteWebhookRequest);
/** Gets the latest version of a registered model. */
@@ -113,11 +105,7 @@ RegisteredModelPermissions getPermissions(
ListTransitionRequestsResponse listTransitionRequests(
ListTransitionRequestsRequest listTransitionRequestsRequest);
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Lists all registry webhooks.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Lists all registry webhooks. */
ListRegistryWebhooks listWebhooks(ListWebhooksRequest listWebhooksRequest);
/** Rejects a model version stage transition request. */
@@ -147,17 +135,13 @@ SearchModelVersionsResponse searchModelVersions(
RegisteredModelPermissions setPermissions(
RegisteredModelPermissionsRequest registeredModelPermissionsRequest);
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Tests a registry webhook.
- */
+ /** **NOTE:** This endpoint is in Public Preview. Tests a registry webhook. */
TestRegistryWebhookResponse testRegistryWebhook(
TestRegistryWebhookRequest testRegistryWebhookRequest);
/**
* Transition a model version's stage. This is a Databricks workspace version of the [MLflow
- * endpoint] that also accepts a comment associated with the transition to be recorded.",
+ * endpoint] that also accepts a comment associated with the transition to be recorded.
*
* [MLflow endpoint]:
* https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
@@ -169,10 +153,11 @@ TransitionStageResponse transitionStage(
UpdateCommentResponse updateComment(UpdateComment updateComment);
/** Updates a registered model. */
- void updateModel(UpdateModelRequest updateModelRequest);
+ UpdateModelResponse updateModel(UpdateModelRequest updateModelRequest);
/** Updates the model version. */
- void updateModelVersion(UpdateModelVersionRequest updateModelVersionRequest);
+ UpdateModelVersionResponse updateModelVersion(
+ UpdateModelVersionRequest updateModelVersionRequest);
/**
* Updates the permissions on a registered model. Registered models can inherit permissions from
@@ -181,10 +166,6 @@ TransitionStageResponse transitionStage(
RegisteredModelPermissions updatePermissions(
RegisteredModelPermissionsRequest registeredModelPermissionsRequest);
- /**
- * **NOTE:** This endpoint is in Public Preview.
- *
- * Updates a registry webhook.
- */
- void updateWebhook(UpdateRegistryWebhook updateRegistryWebhook);
+ /** **NOTE:** This endpoint is in Public Preview. Updates a registry webhook. */
+ UpdateWebhookResponse updateWebhook(UpdateRegistryWebhook updateRegistryWebhook);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java
index e96f3ad84..1731145a3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelTag.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Tag for a registered model */
@Generated
public class ModelTag {
/** The tag key. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java
index 89602be70..b11b61291 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java
@@ -14,24 +14,25 @@ public class ModelVersionDatabricks {
@JsonProperty("creation_timestamp")
private Long creationTimestamp;
- /**
- * Stage of the model version. Valid values are:
- *
- * * `None`: The initial stage of a model version.
- *
- * * `Staging`: Staging or pre-production stage.
- *
- * * `Production`: Production stage.
- *
- * * `Archived`: Archived stage.
- */
+ /** */
@JsonProperty("current_stage")
- private Stage currentStage;
+ private String currentStage;
/** User-specified description for the object. */
@JsonProperty("description")
private String description;
+ /**
+ * Email Subscription Status: This is the subscription status of the user to the model version
+ * Users get subscribed by interacting with the model version.
+ */
+ @JsonProperty("email_subscription_status")
+ private RegistryEmailSubscriptionType emailSubscriptionStatus;
+
+ /** Feature lineage of `model_version`. */
+ @JsonProperty("feature_list")
+ private FeatureList featureList;
+
/** Time of the object at last update, as a Unix timestamp in milliseconds. */
@JsonProperty("last_updated_timestamp")
private Long lastUpdatedTimestamp;
@@ -41,9 +42,13 @@ public class ModelVersionDatabricks {
private String name;
/**
- * Permission level of the requesting user on the object. For what is allowed at each level, see
- * [MLflow Model permissions](..).
+ * Open requests for this `model_versions`. Gap in sequence number is intentional and is done in
+ * order to match field sequence numbers of `ModelVersion` proto message
*/
+ @JsonProperty("open_requests")
+ private Collection * `FAILED_REGISTRATION`: Request to register a new model version has failed.
- *
- * * `READY`: Model version is ready for use.
- */
+ /** */
@JsonProperty("status")
private Status status;
@@ -102,12 +100,12 @@ public Long getCreationTimestamp() {
return creationTimestamp;
}
- public ModelVersionDatabricks setCurrentStage(Stage currentStage) {
+ public ModelVersionDatabricks setCurrentStage(String currentStage) {
this.currentStage = currentStage;
return this;
}
- public Stage getCurrentStage() {
+ public String getCurrentStage() {
return currentStage;
}
@@ -120,6 +118,25 @@ public String getDescription() {
return description;
}
+ public ModelVersionDatabricks setEmailSubscriptionStatus(
+ RegistryEmailSubscriptionType emailSubscriptionStatus) {
+ this.emailSubscriptionStatus = emailSubscriptionStatus;
+ return this;
+ }
+
+ public RegistryEmailSubscriptionType getEmailSubscriptionStatus() {
+ return emailSubscriptionStatus;
+ }
+
+ public ModelVersionDatabricks setFeatureList(FeatureList featureList) {
+ this.featureList = featureList;
+ return this;
+ }
+
+ public FeatureList getFeatureList() {
+ return featureList;
+ }
+
public ModelVersionDatabricks setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
this.lastUpdatedTimestamp = lastUpdatedTimestamp;
return this;
@@ -138,6 +155,15 @@ public String getName() {
return name;
}
+ public ModelVersionDatabricks setOpenRequests(Collection * `FAILED_REGISTRATION`: Request to register a new model version has failed.
+ *
+ * * `READY`: Model version is ready for use.
+ */
@Generated
public enum ModelVersionStatus {
- FAILED_REGISTRATION,
- PENDING_REGISTRATION,
- READY,
+ FAILED_REGISTRATION, // Request to register a new model version has failed.
+ PENDING_REGISTRATION, // Request to register a new model version is pending as server performs
+ // background tasks.
+ READY, // Model version is ready for use.
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java
index 1ff6988ed..d4aa69a14 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java
@@ -10,6 +10,7 @@
*/
@Generated
public enum PermissionLevel {
+ CAN_CREATE_REGISTERED_MODEL,
CAN_EDIT,
CAN_MANAGE,
CAN_MANAGE_PRODUCTION_VERSIONS,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
index 6444b7a8a..16874b850 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PublishSpec.java
@@ -13,10 +13,7 @@ public class PublishSpec {
@JsonProperty("online_store")
private String onlineStore;
- /**
- * The full three-part (catalog, schema, table) name of the online table. Auto-generated if not
- * specified.
- */
+ /** The full three-part (catalog, schema, table) name of the online table. */
@JsonProperty("online_table_name")
private String onlineTableName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
index ec6ad3f55..586d99242 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
@@ -13,7 +13,7 @@ public class RegisteredModelAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private RegisteredModelPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java
index 3bc0130f2..034aeff12 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelPermission.java
@@ -18,7 +18,7 @@ public class RegisteredModelPermission {
@JsonProperty("inherited_from_object")
private Collection * `DISABLED`: Webhook is not triggered.
- *
- * * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a
- * real event.
- */
+ /** */
@JsonProperty("status")
private RegistryWebhookStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java
index c781e08a7..eabdba926 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequest.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details required to identify and reject a model version stage transition request. */
@Generated
public class RejectTransitionRequest {
/** User-provided comment on the action. */
@@ -29,7 +30,7 @@ public class RejectTransitionRequest {
* * `Archived`: Archived stage.
*/
@JsonProperty("stage")
- private Stage stage;
+ private String stage;
/** Version of the model. */
@JsonProperty("version")
@@ -53,12 +54,12 @@ public String getName() {
return name;
}
- public RejectTransitionRequest setStage(Stage stage) {
+ public RejectTransitionRequest setStage(String stage) {
this.stage = stage;
return this;
}
- public Stage getStage() {
+ public String getStage() {
return stage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java
index 8a568ffba..94daacea7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java
@@ -9,7 +9,7 @@
@Generated
public class RejectTransitionRequestResponse {
- /** Activity recorded for the action. */
+ /** New activity generated as a result of this operation. */
@JsonProperty("activity")
private Activity activity;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java
deleted file mode 100755
index eb0e4f4e3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperimentResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class RestoreExperimentResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(RestoreExperimentResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java
deleted file mode 100755
index 3fa8ef75f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRunResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class RestoreRunResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(RestoreRunResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java
deleted file mode 100755
index 2f62954b2..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetExperimentTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetExperimentTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetExperimentTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java
deleted file mode 100755
index 924dacc20..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetLoggedModelTagsResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetLoggedModelTagsResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetLoggedModelTagsResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java
deleted file mode 100755
index a741183c3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetModelTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetModelTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java
deleted file mode 100755
index 26d6245bf..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetModelVersionTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetModelVersionTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java
deleted file mode 100755
index 89d485ce0..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTagResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetTagResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetTagResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java
deleted file mode 100755
index 7810e70c6..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Stage.java
+++ /dev/null
@@ -1,32 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * Stage of the model version. Valid values are:
- *
- * * `None`: The initial stage of a model version.
- *
- * * `Staging`: Staging or pre-production stage.
- *
- * * `Production`: Production stage.
- *
- * * `Archived`: Archived stage.
- */
-@Generated
-public enum Stage {
- @JsonProperty("Archived")
- ARCHIVED, // Archived stage.
-
- @JsonProperty("None")
- NONE, // The initial stage of a model version.
-
- @JsonProperty("Production")
- PRODUCTION, // Production stage.
-
- @JsonProperty("Staging")
- STAGING, // Staging or pre-production stage.
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java
deleted file mode 100755
index 16729c8e2..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhook.java
+++ /dev/null
@@ -1,59 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-/** Test webhook response object. */
-@Generated
-public class TestRegistryWebhook {
- /** Body of the response from the webhook URL */
- @JsonProperty("body")
- private String body;
-
- /** Status code returned by the webhook URL */
- @JsonProperty("status_code")
- private Long statusCode;
-
- public TestRegistryWebhook setBody(String body) {
- this.body = body;
- return this;
- }
-
- public String getBody() {
- return body;
- }
-
- public TestRegistryWebhook setStatusCode(Long statusCode) {
- this.statusCode = statusCode;
- return this;
- }
-
- public Long getStatusCode() {
- return statusCode;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- TestRegistryWebhook that = (TestRegistryWebhook) o;
- return Objects.equals(body, that.body) && Objects.equals(statusCode, that.statusCode);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(body, statusCode);
- }
-
- @Override
- public String toString() {
- return new ToStringer(TestRegistryWebhook.class)
- .add("body", body)
- .add("statusCode", statusCode)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java
index be3a7d261..22e497c89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details required to test a registry webhook. */
@Generated
public class TestRegistryWebhookRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java
index f74b558bb..cf3bda13c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java
@@ -9,17 +9,30 @@
@Generated
public class TestRegistryWebhookResponse {
- /** Test webhook response object. */
- @JsonProperty("webhook")
- private TestRegistryWebhook webhook;
+ /** Body of the response from the webhook URL */
+ @JsonProperty("body")
+ private String body;
- public TestRegistryWebhookResponse setWebhook(TestRegistryWebhook webhook) {
- this.webhook = webhook;
+ /** Status code returned by the webhook URL */
+ @JsonProperty("status_code")
+ private Long statusCode;
+
+ public TestRegistryWebhookResponse setBody(String body) {
+ this.body = body;
+ return this;
+ }
+
+ public String getBody() {
+ return body;
+ }
+
+ public TestRegistryWebhookResponse setStatusCode(Long statusCode) {
+ this.statusCode = statusCode;
return this;
}
- public TestRegistryWebhook getWebhook() {
- return webhook;
+ public Long getStatusCode() {
+ return statusCode;
}
@Override
@@ -27,16 +40,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestRegistryWebhookResponse that = (TestRegistryWebhookResponse) o;
- return Objects.equals(webhook, that.webhook);
+ return Objects.equals(body, that.body) && Objects.equals(statusCode, that.statusCode);
}
@Override
public int hashCode() {
- return Objects.hash(webhook);
+ return Objects.hash(body, statusCode);
}
@Override
public String toString() {
- return new ToStringer(TestRegistryWebhookResponse.class).add("webhook", webhook).toString();
+ return new ToStringer(TestRegistryWebhookResponse.class)
+ .add("body", body)
+ .add("statusCode", statusCode)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java
index 0e69b1bf3..01b3c0fb8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionModelVersionStageDatabricks.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details required to transition a model version's stage. */
@Generated
public class TransitionModelVersionStageDatabricks {
/** Specifies whether to archive all current model versions in the target stage. */
@@ -33,7 +34,7 @@ public class TransitionModelVersionStageDatabricks {
* * `Archived`: Archived stage.
*/
@JsonProperty("stage")
- private Stage stage;
+ private String stage;
/** Version of the model. */
@JsonProperty("version")
@@ -67,12 +68,12 @@ public String getName() {
return name;
}
- public TransitionModelVersionStageDatabricks setStage(Stage stage) {
+ public TransitionModelVersionStageDatabricks setStage(String stage) {
this.stage = stage;
return this;
}
- public Stage getStage() {
+ public String getStage() {
return stage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java
index ebfb7c60e..7296f391a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionRequest.java
@@ -8,14 +8,17 @@
import java.util.Collection;
import java.util.Objects;
-/** Transition request details. */
+/**
+ * For activities, this contains the activity recorded for the action. For comments, this contains
+ * the comment details. For transition requests, this contains the transition request details.
+ */
@Generated
public class TransitionRequest {
/** Array of actions on the activity allowed for the current viewer. */
@JsonProperty("available_actions")
private Collection * `Archived`: Archived stage.
*/
@JsonProperty("to_stage")
- private Stage toStage;
+ private String toStage;
/** The username of the user that created the object. */
@JsonProperty("user_id")
@@ -68,12 +71,12 @@ public Long getCreationTimestamp() {
return creationTimestamp;
}
- public TransitionRequest setToStage(Stage toStage) {
+ public TransitionRequest setToStage(String toStage) {
this.toStage = toStage;
return this;
}
- public Stage getToStage() {
+ public String getToStage() {
return toStage;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java
index 22e210045..7a488f16b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TransitionStageResponse.java
@@ -9,17 +9,18 @@
@Generated
public class TransitionStageResponse {
- /** */
- @JsonProperty("model_version")
- private ModelVersionDatabricks modelVersion;
+ /** Updated model version */
+ @JsonProperty("model_version_databricks")
+ private ModelVersionDatabricks modelVersionDatabricks;
- public TransitionStageResponse setModelVersion(ModelVersionDatabricks modelVersion) {
- this.modelVersion = modelVersion;
+ public TransitionStageResponse setModelVersionDatabricks(
+ ModelVersionDatabricks modelVersionDatabricks) {
+ this.modelVersionDatabricks = modelVersionDatabricks;
return this;
}
- public ModelVersionDatabricks getModelVersion() {
- return modelVersion;
+ public ModelVersionDatabricks getModelVersionDatabricks() {
+ return modelVersionDatabricks;
}
@Override
@@ -27,18 +28,18 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TransitionStageResponse that = (TransitionStageResponse) o;
- return Objects.equals(modelVersion, that.modelVersion);
+ return Objects.equals(modelVersionDatabricks, that.modelVersionDatabricks);
}
@Override
public int hashCode() {
- return Objects.hash(modelVersion);
+ return Objects.hash(modelVersionDatabricks);
}
@Override
public String toString() {
return new ToStringer(TransitionStageResponse.class)
- .add("modelVersion", modelVersion)
+ .add("modelVersionDatabricks", modelVersionDatabricks)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java
index 22cec7325..b51ece0fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateComment.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Details required to edit a comment on a model version. */
@Generated
public class UpdateComment {
/** User-provided comment on the action. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java
index e453a67b6..ebfcdafba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentResponse.java
@@ -9,7 +9,7 @@
@Generated
public class UpdateCommentResponse {
- /** Comment details. */
+ /** Updated comment object */
@JsonProperty("comment")
private CommentObject comment;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java
deleted file mode 100755
index 4ee79bb74..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperimentResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateExperimentResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateExperimentResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java
index a651bd1cf..3e8c0091e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java
@@ -14,7 +14,7 @@ public class UpdateFeatureTagRequest {
/** */
@JsonIgnore private String featureName;
- /** Represents a tag on a feature in a feature table. */
+ /** */
@JsonProperty("feature_tag")
private FeatureTag featureTag;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java
index 759e6d5e7..bbf1fc01d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java
@@ -4,25 +4,41 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
@Generated
public class UpdateModelResponse {
+ /** */
+ @JsonProperty("registered_model")
+ private Model registeredModel;
+
+ public UpdateModelResponse setRegisteredModel(Model registeredModel) {
+ this.registeredModel = registeredModel;
+ return this;
+ }
+
+ public Model getRegisteredModel() {
+ return registeredModel;
+ }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- return true;
+ UpdateModelResponse that = (UpdateModelResponse) o;
+ return Objects.equals(registeredModel, that.registeredModel);
}
@Override
public int hashCode() {
- return Objects.hash();
+ return Objects.hash(registeredModel);
}
@Override
public String toString() {
- return new ToStringer(UpdateModelResponse.class).toString();
+ return new ToStringer(UpdateModelResponse.class)
+ .add("registeredModel", registeredModel)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java
index acdc9d1cc..524f42913 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionResponse.java
@@ -4,25 +4,41 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
@Generated
public class UpdateModelVersionResponse {
+ /** Return new version number generated for this model in registry. */
+ @JsonProperty("model_version")
+ private ModelVersion modelVersion;
+
+ public UpdateModelVersionResponse setModelVersion(ModelVersion modelVersion) {
+ this.modelVersion = modelVersion;
+ return this;
+ }
+
+ public ModelVersion getModelVersion() {
+ return modelVersion;
+ }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- return true;
+ UpdateModelVersionResponse that = (UpdateModelVersionResponse) o;
+ return Objects.equals(modelVersion, that.modelVersion);
}
@Override
public int hashCode() {
- return Objects.hash();
+ return Objects.hash(modelVersion);
}
@Override
public String toString() {
- return new ToStringer(UpdateModelVersionResponse.class).toString();
+ return new ToStringer(UpdateModelVersionResponse.class)
+ .add("modelVersion", modelVersion)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java
index d6daac367..09868bdf0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateOnlineStoreRequest.java
@@ -14,7 +14,7 @@ public class UpdateOnlineStoreRequest {
/** The name of the online store. This is the unique identifier for the online store. */
@JsonIgnore private String name;
- /** An OnlineStore is a logical database instance that stores and serves features online. */
+ /** Online store to update. */
@JsonProperty("online_store")
private OnlineStore onlineStore;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java
index 285496290..01f993e99 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateRegistryWebhook.java
@@ -8,6 +8,10 @@
import java.util.Collection;
import java.util.Objects;
+/**
+ * Details required to update a registry webhook. Only the fields that need to be updated should be
+ * specified, and both `http_url_spec` and `job_spec` should not be specified in the same request.
+ */
@Generated
public class UpdateRegistryWebhook {
/** User-specified description for the webhook. */
@@ -60,15 +64,7 @@ public class UpdateRegistryWebhook {
@JsonProperty("job_spec")
private JobSpec jobSpec;
- /**
- * Enable or disable triggering the webhook, or put the webhook into test mode. The default is
- * `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
- *
- * * `DISABLED`: Webhook is not triggered.
- *
- * * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a
- * real event.
- */
+ /** */
@JsonProperty("status")
private RegistryWebhookStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java
index 64b5de737..1003dd8f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateWebhookResponse.java
@@ -4,25 +4,39 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
@Generated
public class UpdateWebhookResponse {
+ /** */
+ @JsonProperty("webhook")
+ private RegistryWebhook webhook;
+
+ public UpdateWebhookResponse setWebhook(RegistryWebhook webhook) {
+ this.webhook = webhook;
+ return this;
+ }
+
+ public RegistryWebhook getWebhook() {
+ return webhook;
+ }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- return true;
+ UpdateWebhookResponse that = (UpdateWebhookResponse) o;
+ return Objects.equals(webhook, that.webhook);
}
@Override
public int hashCode() {
- return Objects.hash();
+ return Objects.hash(webhook);
}
@Override
public String toString() {
- return new ToStringer(UpdateWebhookResponse.class).toString();
+ return new ToStringer(UpdateWebhookResponse.class).add("webhook", webhook).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java
index e61b618f6..193bd3d5a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java
@@ -41,7 +41,7 @@ public void delete(DeleteAccountFederationPolicyRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java
index 3aa7acba1..b15f132b7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java
@@ -42,7 +42,7 @@ public void delete(DeleteCustomAppIntegrationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteCustomAppIntegrationOutput.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -90,7 +90,7 @@ public void update(UpdateCustomAppIntegration request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateCustomAppIntegrationOutput.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java
deleted file mode 100755
index 9739ec0e8..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteCustomAppIntegrationOutput.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.oauth2;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteCustomAppIntegrationOutput {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteCustomAppIntegrationOutput.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java
deleted file mode 100755
index cbc352c7d..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeletePublishedAppIntegrationOutput.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.oauth2;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeletePublishedAppIntegrationOutput {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeletePublishedAppIntegrationOutput.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java
deleted file mode 100755
index 6c5276a49..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.oauth2;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java
index 18db0601f..842c5bea1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/FederationPolicy.java
@@ -29,7 +29,7 @@ public class FederationPolicy {
@JsonProperty("name")
private String name;
- /** Specifies the policy to use for validating OIDC claims in your federated tokens. */
+ /** */
@JsonProperty("oidc_policy")
private OidcFederationPolicy oidcPolicy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java
index e2592779a..84fba336f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java
@@ -43,7 +43,7 @@ public void delete(DeletePublishedAppIntegrationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeletePublishedAppIntegrationOutput.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -92,7 +92,7 @@ public void update(UpdatePublishedAppIntegration request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdatePublishedAppIntegrationOutput.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java
index 322518bc9..3d1eb0349 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java
@@ -45,7 +45,7 @@ public void delete(DeleteServicePrincipalFederationPolicyRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java
index 3579430b5..75cee9e08 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java
@@ -44,7 +44,7 @@ public void delete(DeleteServicePrincipalSecretRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java
deleted file mode 100755
index 45f262320..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegrationOutput.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.oauth2;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateCustomAppIntegrationOutput {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateCustomAppIntegrationOutput.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java
deleted file mode 100755
index efa8717d1..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegrationOutput.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.oauth2;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdatePublishedAppIntegrationOutput {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdatePublishedAppIntegrationOutput.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
index 85f4d3dbc..167282b32 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
@@ -115,14 +115,7 @@ public class CreatePipeline {
@JsonProperty("root_path")
private String rootPath;
- /**
- * Write-only setting, available only in Create/Update calls. Specifies the user or service
- * principal that the pipeline runs as. If not specified, the pipeline runs as the user who
- * created the pipeline.
- *
- * Only `user_name` or `service_principal_name` can be specified. If both are specified, an
- * error is thrown.
- */
+ /** */
@JsonProperty("run_as")
private RunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java
deleted file mode 100755
index 103293d0f..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.pipelines;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeletePipelineResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeletePipelineResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
index add58b4bd..5b8e3c4f2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
@@ -124,14 +124,7 @@ public class EditPipeline {
@JsonProperty("root_path")
private String rootPath;
- /**
- * Write-only setting, available only in Create/Update calls. Specifies the user or service
- * principal that the pipeline runs as. If not specified, the pipeline runs as the user who
- * created the pipeline.
- *
- * Only `user_name` or `service_principal_name` can be specified. If both are specified, an
- * error is thrown.
- */
+ /** */
@JsonProperty("run_as")
private RunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java
deleted file mode 100755
index 2bb8b38a5..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.pipelines;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class EditPipelineResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(EditPipelineResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
index 0654879e3..ff158fa65 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
@@ -46,6 +46,14 @@ public class GetPipelineResponse {
@JsonProperty("pipeline_id")
private String pipelineId;
+ /**
+ * The user or service principal that the pipeline runs as, if specified in the request. This
+ * field indicates the explicit configuration of `run_as` for the pipeline. To find the value in
+ * all cases, explicit or implicit, use `run_as_user_name`.
+ */
+ @JsonProperty("run_as")
+ private RunAs runAs;
+
/** Username of the user that the pipeline will run on behalf of. */
@JsonProperty("run_as_user_name")
private String runAsUserName;
@@ -139,6 +147,15 @@ public String getPipelineId() {
return pipelineId;
}
+ public GetPipelineResponse setRunAs(RunAs runAs) {
+ this.runAs = runAs;
+ return this;
+ }
+
+ public RunAs getRunAs() {
+ return runAs;
+ }
+
public GetPipelineResponse setRunAsUserName(String runAsUserName) {
this.runAsUserName = runAsUserName;
return this;
@@ -180,6 +197,7 @@ public boolean equals(Object o) {
&& Objects.equals(latestUpdates, that.latestUpdates)
&& Objects.equals(name, that.name)
&& Objects.equals(pipelineId, that.pipelineId)
+ && Objects.equals(runAs, that.runAs)
&& Objects.equals(runAsUserName, that.runAsUserName)
&& Objects.equals(spec, that.spec)
&& Objects.equals(state, that.state);
@@ -197,6 +215,7 @@ public int hashCode() {
latestUpdates,
name,
pipelineId,
+ runAs,
runAsUserName,
spec,
state);
@@ -214,6 +233,7 @@ public String toString() {
.add("latestUpdates", latestUpdates)
.add("name", name)
.add("pipelineId", pipelineId)
+ .add("runAs", runAs)
.add("runAsUserName", runAsUserName)
.add("spec", spec)
.add("state", state)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
index 272a8235d..bf68ec227 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
@@ -6,6 +6,7 @@
@Generated
public enum IngestionSourceType {
+ BIGQUERY,
DYNAMICS365,
GA4_RAW_DATA,
MANAGED_POSTGRESQL,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
index 27b567277..4aa4ca42f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
@@ -13,7 +13,7 @@ public class PipelineAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private PipelinePermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java
index 3911806c8..880e47d08 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java
@@ -18,7 +18,7 @@ public class PipelinePermission {
@JsonProperty("inherited_from_object")
private Collection [AWS PrivateLink]: https://aws.amazon.com/privatelink/
- */
+ /** */
@JsonProperty("vpc_endpoints")
private NetworkVpcEndpoints vpcEndpoints;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
index fdc409d47..17bbcebc8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateStorageConfigurationRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateStorageConfigurationRequest {
- /** Root S3 bucket information. */
+ /** */
@JsonProperty("root_bucket_info")
private RootBucketInfo rootBucketInfo;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
index 1d87036a1..00ed46856 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateVpcEndpointRequest.java
@@ -13,7 +13,7 @@ public class CreateVpcEndpointRequest {
@JsonProperty("aws_vpc_endpoint_id")
private String awsVpcEndpointId;
- /** The Google Cloud specific information for this Private Service Connect endpoint. */
+ /** */
@JsonProperty("gcp_vpc_endpoint_info")
private GcpVpcEndpointInfo gcpVpcEndpointInfo;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
index 2cc6ec80a..31d107a91 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
@@ -21,7 +21,7 @@ public class CreateWorkspaceRequest {
@JsonProperty("cloud")
private String cloud;
- /** The general workspace configurations that are specific to cloud providers. */
+ /** */
@JsonProperty("cloud_resource_container")
private CloudResourceContainer cloudResourceContainer;
@@ -65,31 +65,11 @@ public class CreateWorkspaceRequest {
@JsonProperty("deployment_name")
private String deploymentName;
- /**
- * The network settings for the workspace. The configurations are only for Databricks-managed
- * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the
- * IP range configurations must be mutually exclusive. An attempt to create a workspace fails if
- * Databricks detects an IP range overlap.
- *
- * Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap,
- * and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`,
- * `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
- *
- * The sizes of these IP ranges affect the maximum number of nodes for the workspace.
- *
- * **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
- * workspace. You cannot change them after your workspace is deployed. If the IP address ranges
- * for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to
- * fail. To determine the address range sizes that you need, Databricks provides a calculator as a
- * Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace].
- *
- * [calculate subnet sizes for a new workspace]:
- * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
- */
+ /** */
@JsonProperty("gcp_managed_network_config")
private GcpManagedNetworkConfig gcpManagedNetworkConfig;
- /** The configurations for the GKE cluster of a Databricks workspace. */
+ /** */
@JsonProperty("gke_config")
private GkeConfig gkeConfig;
@@ -117,11 +97,7 @@ public class CreateWorkspaceRequest {
@JsonProperty("network_id")
private String networkId;
- /**
- * The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
- *
- * [AWS Pricing]: https://databricks.com/product/aws-pricing
- */
+ /** */
@JsonProperty("pricing_tier")
private PricingTier pricingTier;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java
index 581cd163a..4aca2d8bb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java
@@ -42,7 +42,7 @@ public void delete(DeleteCredentialRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java
deleted file mode 100755
index 053e59bd5..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.provisioning;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java
index 8e103d747..20424aec0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java
@@ -43,7 +43,7 @@ public void delete(DeleteEncryptionKeyRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java
index 428d49863..b149a4d48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Network.java
@@ -22,10 +22,7 @@ public class Network {
@JsonProperty("error_messages")
private Collection [AWS PrivateLink]: https://aws.amazon.com/privatelink/
- */
+ /** */
@JsonProperty("vpc_endpoints")
private NetworkVpcEndpoints vpcEndpoints;
@@ -61,10 +53,7 @@ public class Network {
@JsonProperty("vpc_id")
private String vpcId;
- /**
- * The status of this network configuration object in terms of its use in a workspace: *
- * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.
- */
+ /** */
@JsonProperty("vpc_status")
private VpcStatus vpcStatus;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java
index 8d00d7c3e..45b0dc60a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkHealth.java
@@ -13,10 +13,7 @@ public class NetworkHealth {
@JsonProperty("error_message")
private String errorMessage;
- /**
- * The AWS resource associated with this error: credentials, VPC, subnet, security group, or
- * network ACL.
- */
+ /** */
@JsonProperty("error_type")
private ErrorType errorType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java
index 8db624574..8e7d14a8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworkWarning.java
@@ -13,7 +13,7 @@ public class NetworkWarning {
@JsonProperty("warning_message")
private String warningMessage;
- /** The AWS resource associated with this warning: a subnet or a security group. */
+ /** */
@JsonProperty("warning_type")
private WarningType warningType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java
index cdd5f594b..5a6c8d710 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java
@@ -41,7 +41,7 @@ public void delete(DeleteNetworkRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java
index 5fd0babfc..948896927 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java
@@ -43,7 +43,7 @@ public void delete(DeletePrivateAccesRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -86,7 +86,7 @@ public void replace(UpsertPrivateAccessSettingsRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ReplaceResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java
index 7be0b8b37..03c466d78 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessSettings.java
@@ -18,13 +18,7 @@ public class PrivateAccessSettings {
@JsonProperty("allowed_vpc_endpoint_ids")
private Collection [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html
- */
+ /** */
@JsonProperty("use_case")
private EndpointUseCase useCase;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java
index 68a709bc6..c6141bb98 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java
@@ -42,7 +42,7 @@ public void delete(DeleteVpcEndpointRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
index 4d6b61c9d..ea983a894 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
@@ -26,7 +26,7 @@ public class Workspace {
@JsonProperty("cloud")
private String cloud;
- /** The general workspace configurations that are specific to cloud providers. */
+ /** */
@JsonProperty("cloud_resource_container")
private CloudResourceContainer cloudResourceContainer;
@@ -62,31 +62,11 @@ public class Workspace {
@JsonProperty("external_customer_info")
private ExternalCustomerInfo externalCustomerInfo;
- /**
- * The network settings for the workspace. The configurations are only for Databricks-managed
- * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the
- * IP range configurations must be mutually exclusive. An attempt to create a workspace fails if
- * Databricks detects an IP range overlap.
- *
- * Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap,
- * and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`,
- * `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
- *
- * The sizes of these IP ranges affect the maximum number of nodes for the workspace.
- *
- * **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
- * workspace. You cannot change them after your workspace is deployed. If the IP address ranges
- * for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to
- * fail. To determine the address range sizes that you need, Databricks provides a calculator as a
- * Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace].
- *
- * [calculate subnet sizes for a new workspace]:
- * https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
- */
+ /** */
@JsonProperty("gcp_managed_network_config")
private GcpManagedNetworkConfig gcpManagedNetworkConfig;
- /** The configurations for the GKE cluster of a Databricks workspace. */
+ /** */
@JsonProperty("gke_config")
private GkeConfig gkeConfig;
@@ -112,11 +92,7 @@ public class Workspace {
@JsonProperty("network_id")
private String networkId;
- /**
- * The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
- *
- * [AWS Pricing]: https://databricks.com/product/aws-pricing
- */
+ /** */
@JsonProperty("pricing_tier")
private PricingTier pricingTier;
@@ -150,10 +126,7 @@ public class Workspace {
@JsonProperty("workspace_name")
private String workspaceName;
- /**
- * The status of the workspace. For workspace creation, usually it is set to `PROVISIONING`
- * initially. Continue to check the status until the status is `RUNNING`.
- */
+ /** */
@JsonProperty("workspace_status")
private WorkspaceStatus workspaceStatus;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java
index e700dac93..61557b7dd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java
@@ -41,7 +41,7 @@ public void delete(DeleteWorkspaceRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -82,7 +82,7 @@ public void update(UpdateWorkspaceRequest request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java
index 0880dbd86..eea8ba539 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java
@@ -39,7 +39,7 @@ public void deleteQualityMonitor(DeleteQualityMonitorRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteQualityMonitorResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
index 5c1066f20..c39679117 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
@@ -14,12 +14,19 @@ public class AiGatewayRateLimit {
private Long calls;
/**
- * Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with
- * 'endpoint' being the default if not specified.
+ * Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint'
+ * are supported, with 'endpoint' being the default if not specified.
*/
@JsonProperty("key")
private AiGatewayRateLimitKey key;
+ /**
+ * Principal field for a user, user group, or service principal to apply rate limiting to. Accepts
+ * a user email, group name, or service principal application ID.
+ */
+ @JsonProperty("principal")
+ private String principal;
+
/** Renewal period field for a rate limit. Currently, only 'minute' is supported. */
@JsonProperty("renewal_period")
private AiGatewayRateLimitRenewalPeriod renewalPeriod;
@@ -42,6 +49,15 @@ public AiGatewayRateLimitKey getKey() {
return key;
}
+ public AiGatewayRateLimit setPrincipal(String principal) {
+ this.principal = principal;
+ return this;
+ }
+
+ public String getPrincipal() {
+ return principal;
+ }
+
public AiGatewayRateLimit setRenewalPeriod(AiGatewayRateLimitRenewalPeriod renewalPeriod) {
this.renewalPeriod = renewalPeriod;
return this;
@@ -58,12 +74,13 @@ public boolean equals(Object o) {
AiGatewayRateLimit that = (AiGatewayRateLimit) o;
return Objects.equals(calls, that.calls)
&& Objects.equals(key, that.key)
+ && Objects.equals(principal, that.principal)
&& Objects.equals(renewalPeriod, that.renewalPeriod);
}
@Override
public int hashCode() {
- return Objects.hash(calls, key, renewalPeriod);
+ return Objects.hash(calls, key, principal, renewalPeriod);
}
@Override
@@ -71,6 +88,7 @@ public String toString() {
return new ToStringer(AiGatewayRateLimit.class)
.add("calls", calls)
.add("key", key)
+ .add("principal", principal)
.add("renewalPeriod", renewalPeriod)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
index a2870c1f5..3b731847b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java
@@ -10,6 +10,12 @@ public enum AiGatewayRateLimitKey {
@JsonProperty("endpoint")
ENDPOINT,
+ @JsonProperty("service_principal")
+ SERVICE_PRINCIPAL,
+
@JsonProperty("user")
USER,
+
+ @JsonProperty("user_group")
+ USER_GROUP,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java
deleted file mode 100755
index 94d8eb1f5..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.serving;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
index 129841ac9..b2764b5c1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
@@ -52,10 +52,7 @@ public class ServedEntityOutput {
@JsonProperty("external_model")
private ExternalModel externalModel;
- /**
- * All fields are not sensitive as they are hard-coded in the system and made available to
- * customers.
- */
+ /** */
@JsonProperty("foundation_model")
private FoundationModel foundationModel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
index 8ed57eb23..ec9eb293f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java
@@ -21,10 +21,7 @@ public class ServedEntitySpec {
@JsonProperty("external_model")
private ExternalModel externalModel;
- /**
- * All fields are not sensitive as they are hard-coded in the system and made available to
- * customers.
- */
+ /** */
@JsonProperty("foundation_model")
private FoundationModel foundationModel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
index a1c66a4f3..7b120c29b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
@@ -13,7 +13,7 @@ public class ServingEndpointAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private ServingEndpointPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java
index 5f2f50a35..eb682a764 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointPermission.java
@@ -18,7 +18,7 @@ public class ServingEndpointPermission {
@JsonProperty("inherited_from_object")
private Collection * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP
- * or range. IP addresses in the block list are excluded even if they are included in an allow
- * list.
- */
+ /** */
@JsonProperty("list_type")
private ListType listType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java
index dba141ff5..8d51f57fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java
@@ -10,7 +10,7 @@
/** An IP access list was successfully created. */
@Generated
public class CreateIpAccessListResponse {
- /** Definition of an IP Access list */
+ /** */
@JsonProperty("ip_access_list")
private IpAccessListInfo ipAccessList;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
index 85ca6762f..6a4aa431c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateNetworkConnectivityConfigRequest {
- /** Properties of the new network connectivity configuration. */
+ /** */
@JsonProperty("network_connectivity_config")
private CreateNetworkConnectivityConfiguration networkConnectivityConfig;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java
index dc6876eb9..1c33e6191 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkPolicyRequest.java
@@ -9,7 +9,7 @@
@Generated
public class CreateNetworkPolicyRequest {
- /** */
+ /** Network policy configuration details. */
@JsonProperty("network_policy")
private AccountNetworkPolicy networkPolicy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
index fc369b6c2..57ce53f8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
@@ -13,10 +13,7 @@ public class CreatePrivateEndpointRuleRequest {
/** Your Network Connectivity Configuration ID. */
@JsonIgnore private String networkConnectivityConfigId;
- /**
- * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
- * portal after initialization.
- */
+ /** */
@JsonProperty("private_endpoint_rule")
private CreatePrivateEndpointRule privateEndpointRule;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java
index 83ee147bf..7697448b5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java
@@ -9,7 +9,7 @@
@Generated
public class CspEnablementAccountSetting {
- /** Account level policy for CSP */
+ /** */
@JsonProperty("csp_enablement_account")
private CspEnablementAccount cspEnablementAccount;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java
new file mode 100755
index 000000000..d92f69459
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseId.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DefaultWarehouseId {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ /** */
+ @JsonProperty("string_val")
+ private StringMessage stringVal;
+
+ public DefaultWarehouseId setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public DefaultWarehouseId setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ public DefaultWarehouseId setStringVal(StringMessage stringVal) {
+ this.stringVal = stringVal;
+ return this;
+ }
+
+ public StringMessage getStringVal() {
+ return stringVal;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DefaultWarehouseId that = (DefaultWarehouseId) o;
+ return Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName)
+ && Objects.equals(stringVal, that.stringVal);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag, settingName, stringVal);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DefaultWarehouseId.class)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .add("stringVal", stringVal)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java
new file mode 100755
index 000000000..41ea96633
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdAPI.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and
+ * can be overridden by users.
+ */
+@Generated
+public class DefaultWarehouseIdAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultWarehouseIdAPI.class);
+
+ private final DefaultWarehouseIdService impl;
+
+ /** Regular-use constructor */
+ public DefaultWarehouseIdAPI(ApiClient apiClient) {
+ impl = new DefaultWarehouseIdImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DefaultWarehouseIdAPI(DefaultWarehouseIdService mock) {
+ impl = mock;
+ }
+
+ /** Reverts the Default Warehouse Id setting to its default value. */
+ public DeleteDefaultWarehouseIdResponse delete(DeleteDefaultWarehouseIdRequest request) {
+ return impl.delete(request);
+ }
+
+ /** Gets the Default Warehouse Id setting. */
+ public DefaultWarehouseId get(GetDefaultWarehouseIdRequest request) {
+ return impl.get(request);
+ }
+
+ public DefaultWarehouseId update(
+ boolean allowMissing, DefaultWarehouseId setting, String fieldMask) {
+ return update(
+ new UpdateDefaultWarehouseIdRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /** Updates the Default Warehouse Id setting. */
+ public DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest request) {
+ return impl.update(request);
+ }
+
+ public DefaultWarehouseIdService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java
new file mode 100755
index 000000000..b88356619
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdImpl.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of DefaultWarehouseId */
+@Generated
+class DefaultWarehouseIdImpl implements DefaultWarehouseIdService {
+ private final ApiClient apiClient;
+
+ public DefaultWarehouseIdImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteDefaultWarehouseIdResponse delete(DeleteDefaultWarehouseIdRequest request) {
+ String path = "/api/2.0/settings/types/default_warehouse_id/names/default";
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DeleteDefaultWarehouseIdResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DefaultWarehouseId get(GetDefaultWarehouseIdRequest request) {
+ String path = "/api/2.0/settings/types/default_warehouse_id/names/default";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DefaultWarehouseId.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest request) {
+ String path = "/api/2.0/settings/types/default_warehouse_id/names/default";
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultWarehouseId.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java
new file mode 100755
index 000000000..fdc487b50
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultWarehouseIdService.java
@@ -0,0 +1,25 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and
+ * can be overridden by users.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DefaultWarehouseIdService {
+ /** Reverts the Default Warehouse Id setting to its default value. */
+ DeleteDefaultWarehouseIdResponse delete(
+ DeleteDefaultWarehouseIdRequest deleteDefaultWarehouseIdRequest);
+
+ /** Gets the Default Warehouse Id setting. */
+ DefaultWarehouseId get(GetDefaultWarehouseIdRequest getDefaultWarehouseIdRequest);
+
+ /** Updates the Default Warehouse Id setting. */
+ DefaultWarehouseId update(UpdateDefaultWarehouseIdRequest updateDefaultWarehouseIdRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java
new file mode 100755
index 000000000..9867c4f60
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdRequest.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDefaultWarehouseIdRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteDefaultWarehouseIdRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDefaultWarehouseIdRequest that = (DeleteDefaultWarehouseIdRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDefaultWarehouseIdRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java
new file mode 100755
index 000000000..8da0557b1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWarehouseIdResponse.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteDefaultWarehouseIdResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteDefaultWarehouseIdResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDefaultWarehouseIdResponse that = (DeleteDefaultWarehouseIdResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDefaultWarehouseIdResponse.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java
deleted file mode 100755
index b8bc53d67..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java
index 37029d2c5..82c1fd5fc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicy.java
@@ -24,13 +24,7 @@ public class EgressNetworkPolicyInternetAccessPolicy {
@JsonProperty("log_only_mode")
private EgressNetworkPolicyInternetAccessPolicyLogOnlyMode logOnlyMode;
- /**
- * At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS:
- * Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can
- * only access explicitly allowed internet and storage destinations, as well as UC connections and
- * external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
- * private link.
- */
+ /** */
@JsonProperty("restriction_mode")
private EgressNetworkPolicyInternetAccessPolicyRestrictionMode restrictionMode;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java
index 6c97950e2..5fa93fd1c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyInternetDestination.java
@@ -18,12 +18,7 @@ public class EgressNetworkPolicyInternetAccessPolicyInternetDestination {
@JsonProperty("destination")
private String destination;
- /**
- * The filtering protocol used by the DP. For private and public preview, SEG will only support
- * TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will
- * be set to TCP by default and hidden from the user. In the future, users may be able to select
- * HTTP filtering (i.e. SNI based filtering, filtering by FQDN).
- */
+ /** */
@JsonProperty("protocol")
private
EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java
deleted file mode 100755
index 43f12c991..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class Empty {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(Empty.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java
index c0b0a4544..5a7f6e6ae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringSetting.java
@@ -9,7 +9,7 @@
@Generated
public class EnhancedSecurityMonitoringSetting {
- /** SHIELD feature: ESM */
+ /** */
@JsonProperty("enhanced_security_monitoring_workspace")
private EnhancedSecurityMonitoring enhancedSecurityMonitoringWorkspace;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java
index 9dddd8ab0..999819258 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java
@@ -9,7 +9,7 @@
@Generated
public class EsmEnablementAccountSetting {
- /** Account level policy for ESM */
+ /** */
@JsonProperty("esm_enablement_account")
private EsmEnablementAccount esmEnablementAccount;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java
index 45e0c9c3a..4e74c0b8d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java
@@ -10,7 +10,7 @@
/** An IP access list was successfully returned. */
@Generated
public class FetchIpAccessListResponse {
- /** Definition of an IP Access list */
+ /** */
@JsonProperty("ip_access_list")
private IpAccessListInfo ipAccessList;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java
new file mode 100755
index 000000000..9e2b62400
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultWarehouseIdRequest.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetDefaultWarehouseIdRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetDefaultWarehouseIdRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDefaultWarehouseIdRequest that = (GetDefaultWarehouseIdRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDefaultWarehouseIdRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
index 88afa428f..47925b4bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
@@ -9,7 +9,7 @@
@Generated
public class GetIpAccessListResponse {
- /** Definition of an IP Access list */
+ /** */
@JsonProperty("ip_access_list")
private IpAccessListInfo ipAccessList;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java
index c2ac51992..439e4055e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java
@@ -39,13 +39,7 @@ public class IpAccessListInfo {
@JsonProperty("list_id")
private String listId;
- /**
- * Type of IP access list. Valid values are as follows and are case-sensitive:
- *
- * * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP
- * or range. IP addresses in the block list are excluded even if they are included in an allow
- * list.
- */
+ /** */
@JsonProperty("list_type")
private ListType listType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java
index b9cca1598..f70522d2a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java
@@ -36,7 +36,7 @@ public void delete(DeleteIpAccessListRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -74,7 +74,7 @@ public void replace(ReplaceIpAccessList request) {
Request req = new Request("PUT", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ReplaceResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -87,7 +87,7 @@ public void update(UpdateIpAccessList request) {
Request req = new Request("PATCH", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, UpdateResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
index e46162f5d..d4f40b77f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
@@ -10,17 +10,11 @@
/** Default rules don't have specific targets. */
@Generated
public class NccEgressDefaultRules {
- /**
- * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to
- * allow traffic from your Databricks workspace.
- */
+ /** */
@JsonProperty("aws_stable_ip_rule")
private NccAwsStableIpRule awsStableIpRule;
- /**
- * The stable Azure service endpoints. You can configure the firewall of your Azure resources to
- * allow traffic from your Databricks serverless compute resources.
- */
+ /** */
@JsonProperty("azure_service_endpoint_rule")
private NccAzureServiceEndpointRule azureServiceEndpointRule;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
index 4bd996e8a..7884cf74f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
@@ -64,7 +64,7 @@ public void deleteNetworkConnectivityConfiguration(
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteNetworkConnectivityConfigurationResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java
index 1f9f29054..ee4e9a8ba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkPoliciesImpl.java
@@ -41,7 +41,7 @@ public void deleteNetworkPolicyRpc(DeleteNetworkPolicyRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, DeleteNetworkPolicyRpcResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java
index 498afdf6b..ab692d437 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java
@@ -37,7 +37,7 @@ public void delete(DeleteNotificationDestinationRequest request) {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- apiClient.execute(req, Empty.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java
index 5dfe405f2..b0167e23d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java
@@ -9,13 +9,7 @@
@Generated
public class PersonalComputeMessage {
- /**
- * ON: Grants all users in all workspaces access to the Personal Compute default policy, allowing
- * all users to create single-machine compute resources. DELEGATE: Moves access control for the
- * Personal Compute default policy to individual workspaces and requires a workspace’s users or
- * groups to be added to the ACLs of that workspace’s Personal Compute default policy before they
- * will be able to create compute resources through that policy.
- */
+ /** */
@JsonProperty("value")
private PersonalComputeMessageEnum value;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
index b7c9d65e1..82be25f3a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java
@@ -27,13 +27,7 @@ public class ReplaceIpAccessList {
@JsonProperty("label")
private String label;
- /**
- * Type of IP access list. Valid values are as follows and are case-sensitive:
- *
- * * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP
- * or range. IP addresses in the block list are excluded even if they are included in an allow
- * list.
- */
+ /** */
@JsonProperty("list_type")
private ListType listType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java
deleted file mode 100755
index 5a3ba7e51..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class ReplaceResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(ReplaceResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java
deleted file mode 100755
index 63ac738c7..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class RevokeTokenResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(RevokeTokenResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java
deleted file mode 100755
index 6d4a07fab..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SetStatusResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class SetStatusResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(SetStatusResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index 16fa226ef..6e7933a7c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -25,6 +25,8 @@ public class SettingsAPI {
private DefaultNamespaceAPI defaultNamespaceAPI;
+ private DefaultWarehouseIdAPI defaultWarehouseIdAPI;
+
private DisableLegacyAccessAPI disableLegacyAccessAPI;
private DisableLegacyDbfsAPI disableLegacyDbfsAPI;
@@ -60,6 +62,8 @@ public SettingsAPI(ApiClient apiClient) {
defaultNamespaceAPI = new DefaultNamespaceAPI(apiClient);
+ defaultWarehouseIdAPI = new DefaultWarehouseIdAPI(apiClient);
+
disableLegacyAccessAPI = new DisableLegacyAccessAPI(apiClient);
disableLegacyDbfsAPI = new DisableLegacyDbfsAPI(apiClient);
@@ -123,6 +127,11 @@ public DefaultNamespaceAPI DefaultNamespace() {
return defaultNamespaceAPI;
}
+ /** Warehouse to be selected by default for users in this workspace. */
+ public DefaultWarehouseIdAPI DefaultWarehouseId() {
+ return defaultWarehouseIdAPI;
+ }
+
/** 'Disabling legacy access' has the following impacts: 1. */
public DisableLegacyAccessAPI DisableLegacyAccess() {
return disableLegacyAccessAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
index 692f87aee..48af63a1d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
@@ -13,7 +13,7 @@ public class TokenAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private TokenPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java
index 8264976e5..0303b550d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java
@@ -36,7 +36,7 @@ public void delete(DeleteTokenManagementRequest request) {
try {
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java
index 013077476..470a7e2e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenPermission.java
@@ -18,7 +18,7 @@ public class TokenPermission {
@JsonProperty("inherited_from_object")
private Collection A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private DefaultWarehouseId setting;
+
+ public UpdateDefaultWarehouseIdRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateDefaultWarehouseIdRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateDefaultWarehouseIdRequest setSetting(DefaultWarehouseId setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public DefaultWarehouseId getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDefaultWarehouseIdRequest that = (UpdateDefaultWarehouseIdRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDefaultWarehouseIdRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
index 20c17976f..f636af0ce 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java
@@ -27,13 +27,7 @@ public class UpdateIpAccessList {
@JsonProperty("label")
private String label;
- /**
- * Type of IP access list. Valid values are as follows and are case-sensitive:
- *
- * * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP
- * or range. IP addresses in the block list are excluded even if they are included in an allow
- * list.
- */
+ /** */
@JsonProperty("list_type")
private ListType listType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
index a8b5df2c4..defd2840b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
@@ -17,10 +17,7 @@ public class UpdateNccPrivateEndpointRuleRequest {
*/
@JsonIgnore private String networkConnectivityConfigId;
- /**
- * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
- * portal after initialization.
- */
+ /** */
@JsonProperty("private_endpoint_rule")
private UpdatePrivateEndpointRule privateEndpointRule;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java
index 54e9f3e6f..708f50a89 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java
@@ -10,7 +10,7 @@
@Generated
public class UpdateNetworkPolicyRequest {
- /** */
+ /** Updated network policy configuration details. */
@JsonProperty("network_policy")
private AccountNetworkPolicy networkPolicy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java
deleted file mode 100755
index 9ea1a5162..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java
index 708394972..51d55de20 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateWorkspaceNetworkOptionRequest.java
@@ -13,7 +13,7 @@ public class UpdateWorkspaceNetworkOptionRequest {
/** The workspace ID. */
@JsonIgnore private Long workspaceId;
- /** */
+ /** The network option details for the workspace. */
@JsonProperty("workspace_network_option")
private WorkspaceNetworkOption workspaceNetworkOption;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java
index b0f3313e7..542281a3c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java
@@ -32,7 +32,7 @@ public void setStatus(Map The scope name must consist of alphanumeric characters, dashes, underscores, and periods,
+ * and may not exceed 128 characters.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-simple-databricks-scope", "initial_manage_principal": "users"
+ * "scope_backend_type": "databricks|azure_keyvault", # below is only required if scope type is
+ * azure_keyvault "backend_azure_keyvault": { "resource_id":
+ * "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx",
+ * "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name":
+ * "https://xxxx.vault.azure.net/", } }
+ *
+ * If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is
+ * applied to the supplied principal (user or group) with ``MANAGE`` permissions. The only
+ * supported principal for this option is the group ``users``, which contains all users in the
+ * workspace. If ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE``
+ * permission applied to the scope is assigned to the API request issuer's user identity.
+ *
+ * If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from
+ * a given Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for
+ * the key vault. If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret
+ * scope is created and stored in Databricks's own storage.
+ *
+ * Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws
+ * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request
+ * violated constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a
+ * scope with name reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to
+ * verify user access permission on Azure KeyVault
*/
public void createScope(CreateScope request) {
impl.createScope(request);
@@ -53,9 +84,17 @@ public void deleteAcl(String scope, String principal) {
/**
* Deletes the given ACL on the given scope.
*
- * Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST`
- * if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does
- * not have permission to make this API call.
+ * Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "principal": "data-scientists" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid.
*/
public void deleteAcl(DeleteAcl request) {
impl.deleteAcl(request);
@@ -68,8 +107,15 @@ public void deleteScope(String scope) {
/**
* Deletes a secret scope.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if
- * the user does not have permission to make this API call.
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED``
+ * if the user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system
+ * user attempts to delete internal secret scope.
*/
public void deleteScope(DeleteScope request) {
impl.deleteScope(request);
@@ -80,11 +126,19 @@ public void deleteSecret(String scope, String key) {
}
/**
- * Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on
- * the secret scope.
+ * Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE``
+ * permission on the Secret Scope.
+ *
+ * Example request:
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws
- * `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "key": "my-secret-key" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made
+ * against Azure KeyVault backed scope.
*/
public void deleteSecret(DeleteSecret request) {
impl.deleteSecret(request);
@@ -95,11 +149,19 @@ public AclItem getAcl(String scope, String principal) {
}
/**
- * Gets the details about the given ACL, such as the group and permission. Users must have the
- * `MANAGE` permission to invoke this API.
+ * Describes the details about the given ACL, such as the group and permission.
+ *
+ * Users must have the ``MANAGE`` permission to invoke this API.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "principal": "data-scientists", "permission": "READ" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid.
*/
public AclItem getAcl(GetAclRequest request) {
return impl.getAcl(request);
@@ -110,15 +172,30 @@ public GetSecretResponse getSecret(String scope, String key) {
}
/**
- * Gets the bytes representation of a secret value for the specified scope and key.
+ * Gets a secret for a given key and scope. This API can only be called from the DBUtils
+ * interface. Users need the READ permission to make this call.
+ *
+ * Example response:
+ *
+ * .. code::
*
- * Users need the READ permission to make this call.
+ * { "key": "my-string-key", "value": Note that the secret value returned is in bytes. The interpretation of the bytes is
* determined by the caller in DBUtils and the type the data is decoded into.
*
- * Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
- * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
+ *
+ * Note: This is explicitly an undocumented API. It also doesn't need to be supported for the
+ * /preview prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils
+ * to fetch secrets).
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws
+ * ``BAD_REQUEST`` if normal user calls get secret outside of a notebook. AKV specific errors:
+ * Throws ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws
+ * ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws
+ * ``MALFORMED_REQUEST`` if secret manager cannot access AKV with any other 4xx error
*/
public GetSecretResponse getSecret(GetSecretRequest request) {
return impl.getSecret(request);
@@ -129,11 +206,19 @@ public Iterable Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example response:
+ *
+ * .. code::
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * { "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal":
+ * "data-scientists", "permission": "READ" }] }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
public Iterable Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name":
+ * "mount-points", "backend_type": "DATABRICKS" }] }
+ *
+ * Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
public Iterable The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws
- * `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
- * user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, {
+ * "key": "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] }
+ *
+ * The lastUpdatedTimestamp returned is in milliseconds since epoch.
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
public Iterable In general, a user or group will use the most powerful permission available to them, and
- * permissions are ordered as follows:
+ * Creates or overwrites the ACL associated with the given principal (user or group) on the
+ * specified scope point. In general, a user or group will use the most powerful permission
+ * available to them, and permissions are ordered as follows:
*
- * * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` -
- * Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope
- * and list what secrets are available.
+ * * ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE``
+ * - Allowed to read and write to this secret scope. * ``READ`` - Allowed to read this secret
+ * scope and list what secrets are available.
*
* Note that in general, secret values can only be read from within a command on a cluster (for
* example, through a notebook). There is no API to read the actual secret value material outside
* of a cluster. However, the user's permission will be applied based on who is executing the
* command, and they must have at least READ permission.
*
- * Users must have the `MANAGE` permission to invoke this API.
+ * Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" }
*
* The principal is a user or group name corresponding to an existing Databricks principal to
* be granted or revoked access.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
- * `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws
- * `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``RESOURCE_ALREADY_EXISTS`` if a permission for the principal already exists. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
public void putAcl(PutAcl request) {
impl.putAcl(request);
@@ -207,20 +311,28 @@ public void putSecret(String scope, String key) {
/**
* Inserts a secret under the provided scope with the given name. If a secret already exists with
* the same name, this command overwrites the existing secret's value. The server encrypts the
- * secret using the secret scope's encryption settings before storing it.
+ * secret using the secret scope's encryption settings before storing it. You must have ``WRITE``
+ * or ``MANAGE`` permission on the secret scope.
+ *
+ * The secret key must consist of alphanumeric characters, dashes, underscores, and periods,
+ * and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum
+ * number of secrets in a given scope is 1000.
+ *
+ * Example request:
+ *
+ * .. code::
*
- * You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must
- * consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128
- * characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a
- * given scope is 1000.
+ * { "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" }
*
* The input fields "string_value" or "bytes_value" specify the type of the secret, which will
* determine the value returned when the secret value is requested. Exactly one must be specified.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
- * `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws
- * `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws
- * `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of secrets in scope is exceeded. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the request parameters are invalid. Throws ``PERMISSION_DENIED``
+ * if the user does not have permission to make this API call. Throws ``MALFORMED_REQUEST`` if
+ * request is incorrectly formatted or conflicting. Throws ``BAD_REQUEST`` if request is made
+ * against Azure KeyVault backed scope.
*/
public void putSecret(PutSecret request) {
impl.putSecret(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java
index 78a15a19c..80e812a95 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java
@@ -22,9 +22,8 @@ public void createScope(CreateScope request) {
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, CreateScopeResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -36,9 +35,8 @@ public void deleteAcl(DeleteAcl request) {
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteAclResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -50,9 +48,8 @@ public void deleteScope(DeleteScope request) {
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteScopeResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -66,7 +63,7 @@ public void deleteSecret(DeleteSecret request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteSecretResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -142,9 +139,8 @@ public void putAcl(PutAcl request) {
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, PutAclResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -156,9 +152,8 @@ public void putSecret(PutSecret request) {
try {
Request req = new Request("POST", path, apiClient.serialize(request));
ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, PutSecretResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java
index f8de5787c..d189d8f7e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java
@@ -21,72 +21,164 @@
@Generated
public interface SecretsService {
/**
- * The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and
- * may not exceed 128 characters.
+ * Creates a new secret scope.
+ *
+ * The scope name must consist of alphanumeric characters, dashes, underscores, and periods,
+ * and may not exceed 128 characters.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-simple-databricks-scope", "initial_manage_principal": "users"
+ * "scope_backend_type": "databricks|azure_keyvault", # below is only required if scope type is
+ * azure_keyvault "backend_azure_keyvault": { "resource_id":
+ * "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx",
+ * "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name":
+ * "https://xxxx.vault.azure.net/", } }
+ *
+ * If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is
+ * applied to the supplied principal (user or group) with ``MANAGE`` permissions. The only
+ * supported principal for this option is the group ``users``, which contains all users in the
+ * workspace. If ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE``
+ * permission applied to the scope is assigned to the API request issuer's user identity.
+ *
+ * If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from
+ * a given Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for
+ * the key vault. If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret
+ * scope is created and stored in Databricks's own storage.
+ *
+ * Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws
+ * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request
+ * violated constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a
+ * scope with name reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to
+ * verify user access permission on Azure KeyVault
*/
void createScope(CreateScope createScope);
/**
* Deletes the given ACL on the given scope.
*
- * Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST`
- * if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does
- * not have permission to make this API call.
+ * Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "principal": "data-scientists" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid.
*/
void deleteAcl(DeleteAcl deleteAcl);
/**
* Deletes a secret scope.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if
- * the user does not have permission to make this API call.
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED``
+ * if the user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system
+ * user attempts to delete internal secret scope.
*/
void deleteScope(DeleteScope deleteScope);
/**
- * Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on
- * the secret scope.
+ * Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE``
+ * permission on the Secret Scope.
+ *
+ * Example request:
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws
- * `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "key": "my-secret-key" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made
+ * against Azure KeyVault backed scope.
*/
void deleteSecret(DeleteSecret deleteSecret);
/**
- * Gets the details about the given ACL, such as the group and permission. Users must have the
- * `MANAGE` permission to invoke this API.
+ * Describes the details about the given ACL, such as the group and permission.
+ *
+ * Users must have the ``MANAGE`` permission to invoke this API.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "principal": "data-scientists", "permission": "READ" }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid.
*/
AclItem getAcl(GetAclRequest getAclRequest);
/**
- * Gets the bytes representation of a secret value for the specified scope and key.
+ * Gets a secret for a given key and scope. This API can only be called from the DBUtils
+ * interface. Users need the READ permission to make this call.
+ *
+ * Example response:
+ *
+ * .. code::
*
- * Users need the READ permission to make this call.
+ * { "key": "my-string-key", "value": Note that the secret value returned is in bytes. The interpretation of the bytes is
* determined by the caller in DBUtils and the type the data is decoded into.
*
- * Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
- * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
+ *
+ * Note: This is explicitly an undocumented API. It also doesn't need to be supported for the
+ * /preview prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils
+ * to fetch secrets).
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws
+ * ``BAD_REQUEST`` if normal user calls get secret outside of a notebook. AKV specific errors:
+ * Throws ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws
+ * ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws
+ * ``MALFORMED_REQUEST`` if secret manager cannot access AKV with any other 4xx error
*/
GetSecretResponse getSecret(GetSecretRequest getSecretRequest);
/**
- * List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this
- * API.
+ * Lists the ACLs set on the given scope.
+ *
+ * Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example response:
+ *
+ * .. code::
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * { "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal":
+ * "data-scientists", "permission": "READ" }] }
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
ListAclsResponse listAcls(ListAclsRequest listAclsRequest);
/**
* Lists all secret scopes available in the workspace.
*
- * Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name":
+ * "mount-points", "backend_type": "DATABRICKS" }] }
+ *
+ * Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
ListScopesResponse listScopes();
@@ -94,57 +186,77 @@ public interface SecretsService {
* Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret
* data cannot be retrieved using this API. Users need the READ permission to make this call.
*
- * The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws
- * `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
- * user does not have permission to make this API call.
+ * Example response:
+ *
+ * .. code::
+ *
+ * { "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, {
+ * "key": "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] }
+ *
+ * The lastUpdatedTimestamp returned is in milliseconds since epoch.
+ *
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
ListSecretsResponse listSecrets(ListSecretsRequest listSecretsRequest);
/**
- * Creates or overwrites the Access Control List (ACL) associated with the given principal (user
- * or group) on the specified scope point.
- *
- * In general, a user or group will use the most powerful permission available to them, and
- * permissions are ordered as follows:
+ * Creates or overwrites the ACL associated with the given principal (user or group) on the
+ * specified scope point. In general, a user or group will use the most powerful permission
+ * available to them, and permissions are ordered as follows:
*
- * * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` -
- * Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope
- * and list what secrets are available.
+ * * ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE``
+ * - Allowed to read and write to this secret scope. * ``READ`` - Allowed to read this secret
+ * scope and list what secrets are available.
*
* Note that in general, secret values can only be read from within a command on a cluster (for
* example, through a notebook). There is no API to read the actual secret value material outside
* of a cluster. However, the user's permission will be applied based on who is executing the
* command, and they must have at least READ permission.
*
- * Users must have the `MANAGE` permission to invoke this API.
+ * Users must have the ``MANAGE`` permission to invoke this API.
+ *
+ * Example request:
+ *
+ * .. code::
+ *
+ * { "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" }
*
* The principal is a user or group name corresponding to an existing Databricks principal to
* be granted or revoked access.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
- * `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws
- * `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED`
- * if the user does not have permission to make this API call.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``RESOURCE_ALREADY_EXISTS`` if a permission for the principal already exists. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. Throws
+ * ``PERMISSION_DENIED`` if the user does not have permission to make this API call.
*/
void putAcl(PutAcl putAcl);
/**
* Inserts a secret under the provided scope with the given name. If a secret already exists with
* the same name, this command overwrites the existing secret's value. The server encrypts the
- * secret using the secret scope's encryption settings before storing it.
+ * secret using the secret scope's encryption settings before storing it. You must have ``WRITE``
+ * or ``MANAGE`` permission on the secret scope.
+ *
+ * The secret key must consist of alphanumeric characters, dashes, underscores, and periods,
+ * and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum
+ * number of secrets in a given scope is 1000.
+ *
+ * Example request:
+ *
+ * .. code::
*
- * You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must
- * consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128
- * characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a
- * given scope is 1000.
+ * { "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" }
*
* The input fields "string_value" or "bytes_value" specify the type of the secret, which will
* determine the value returned when the secret value is requested. Exactly one must be specified.
*
- * Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
- * `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws
- * `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws
- * `PERMISSION_DENIED` if the user does not have permission to make this API call.
+ * Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws
+ * ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of secrets in scope is exceeded. Throws
+ * ``INVALID_PARAMETER_VALUE`` if the request parameters are invalid. Throws ``PERMISSION_DENIED``
+ * if the user does not have permission to make this API call. Throws ``MALFORMED_REQUEST`` if
+ * request is incorrectly formatted or conflicting. Throws ``BAD_REQUEST`` if request is made
+ * against Azure KeyVault backed scope.
*/
void putSecret(PutSecret putSecret);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java
deleted file mode 100755
index 20e001bd3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.workspace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateCredentialsResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateCredentialsResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java
deleted file mode 100755
index c7d596164..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.workspace;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class UpdateRepoResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateRepoResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java
index 477ceb249..adb7adb44 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java
@@ -24,7 +24,7 @@ public void delete(Delete request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, DeleteResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -97,7 +97,7 @@ public void importContent(Import request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, ImportResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -124,7 +124,7 @@ public void mkdirs(Mkdirs request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- apiClient.execute(req, MkdirsResponse.class);
+ apiClient.execute(req, Void.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
index edb8f3b3e..de751831a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
@@ -13,7 +13,7 @@ public class WorkspaceObjectAccessControlRequest {
@JsonProperty("group_name")
private String groupName;
- /** Permission level */
+ /** */
@JsonProperty("permission_level")
private WorkspaceObjectPermissionLevel permissionLevel;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java
index 748bd2854..0fb785b84 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermission.java
@@ -18,7 +18,7 @@ public class WorkspaceObjectPermission {
@JsonProperty("inherited_from_object")
private Collection